summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog.rss21
-rw-r--r--ChangeLog.txt9
-rw-r--r--FILELIST.TXT80
-rwxr-xr-xrecompress.sh2
-rwxr-xr-xsource/d/rust/rust.SlackBuild8
-rw-r--r--source/d/rust/rust.url14
-rwxr-xr-xsource/xap/mozilla-firefox/mozilla-firefox.SlackBuild3
-rw-r--r--source/xap/mozilla-firefox/rust_1.47.0.patch30905
-rwxr-xr-xsource/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild3
-rw-r--r--source/xap/mozilla-thunderbird/rust_1.47.0.patch30905
-rwxr-xr-xsource/xap/seamonkey/seamonkey.SlackBuild4
11 files changed, 61900 insertions, 54 deletions
diff --git a/ChangeLog.rss b/ChangeLog.rss
index 25e4c91c..107b7964 100644
--- a/ChangeLog.rss
+++ b/ChangeLog.rss
@@ -11,10 +11,27 @@
<description>Tracking Slackware development in git.</description>
<language>en-us</language>
<id xmlns="http://www.w3.org/2005/Atom">urn:uuid:c964f45e-6732-11e8-bbe5-107b4450212f</id>
- <pubDate>Thu, 5 Nov 2020 20:40:51 GMT</pubDate>
- <lastBuildDate>Fri, 6 Nov 2020 07:59:48 GMT</lastBuildDate>
+ <pubDate>Fri, 6 Nov 2020 21:25:18 GMT</pubDate>
+ <lastBuildDate>Sat, 7 Nov 2020 07:59:46 GMT</lastBuildDate>
<generator>maintain_current_git.sh v 1.12</generator>
<item>
+ <title>Fri, 6 Nov 2020 21:25:18 GMT</title>
+ <pubDate>Fri, 6 Nov 2020 21:25:18 GMT</pubDate>
+ <link>https://git.slackware.nl/current/tag/?h=20201106212518</link>
+ <guid isPermaLink="false">20201106212518</guid>
+ <description>
+ <![CDATA[<pre>
+d/rust-1.47.0-x86_64-2.txz: Rebuilt.
+ Switch back to Rust 1.47.0 now that patches are available to fix Firefox
+ and Thunderbird.
+xap/mozilla-thunderbird-78.4.1-x86_64-1.txz: Upgraded.
+ This is a bugfix release.
+ For more information, see:
+ https://www.mozilla.org/en-US/thunderbird/78.4.1/releasenotes/
+ </pre>]]>
+ </description>
+ </item>
+ <item>
<title>Thu, 5 Nov 2020 20:40:51 GMT</title>
<pubDate>Thu, 5 Nov 2020 20:40:51 GMT</pubDate>
<link>https://git.slackware.nl/current/tag/?h=20201105204051</link>
diff --git a/ChangeLog.txt b/ChangeLog.txt
index a28834d0..d64e2015 100644
--- a/ChangeLog.txt
+++ b/ChangeLog.txt
@@ -1,3 +1,12 @@
+Fri Nov 6 21:25:18 UTC 2020
+d/rust-1.47.0-x86_64-2.txz: Rebuilt.
+ Switch back to Rust 1.47.0 now that patches are available to fix Firefox
+ and Thunderbird.
+xap/mozilla-thunderbird-78.4.1-x86_64-1.txz: Upgraded.
+ This is a bugfix release.
+ For more information, see:
+ https://www.mozilla.org/en-US/thunderbird/78.4.1/releasenotes/
++--------------------------+
Thu Nov 5 20:40:51 UTC 2020
a/hwdata-0.341-noarch-1.txz: Upgraded.
a/kernel-generic-5.4.75-x86_64-1.txz: Upgraded.
diff --git a/FILELIST.TXT b/FILELIST.TXT
index f5579cfd..06402b1a 100644
--- a/FILELIST.TXT
+++ b/FILELIST.TXT
@@ -1,20 +1,20 @@
-Thu Nov 5 20:47:14 UTC 2020
+Fri Nov 6 21:54:02 UTC 2020
Here is the file list for this directory. If you are using a
mirror site and find missing or extra files in the disk
subdirectories, please have the archive administrator refresh
the mirror.
-drwxr-xr-x 12 root root 4096 2020-11-05 20:40 .
+drwxr-xr-x 12 root root 4096 2020-11-06 21:28 .
-rw-r--r-- 1 root root 10064 2016-06-30 18:39 ./ANNOUNCE.14_2
-rw-r--r-- 1 root root 14975 2020-02-08 21:06 ./CHANGES_AND_HINTS.TXT
--rw-r--r-- 1 root root 1350030 2020-11-04 19:37 ./CHECKSUMS.md5
--rw-r--r-- 1 root root 163 2020-11-04 19:37 ./CHECKSUMS.md5.asc
+-rw-r--r-- 1 root root 1350290 2020-11-06 21:28 ./CHECKSUMS.md5
+-rw-r--r-- 1 root root 163 2020-11-06 21:28 ./CHECKSUMS.md5.asc
-rw-r--r-- 1 root root 17976 1994-06-10 02:28 ./COPYING
-rw-r--r-- 1 root root 35147 2007-06-30 04:21 ./COPYING3
-rw-r--r-- 1 root root 19573 2016-06-23 20:08 ./COPYRIGHT.TXT
-rw-r--r-- 1 root root 616 2006-10-02 04:37 ./CRYPTO_NOTICE.TXT
--rw-r--r-- 1 root root 1018366 2020-11-05 20:40 ./ChangeLog.txt
+-rw-r--r-- 1 root root 1018736 2020-11-06 21:25 ./ChangeLog.txt
drwxr-xr-x 3 root root 4096 2013-03-20 22:17 ./EFI
drwxr-xr-x 2 root root 4096 2020-11-05 20:39 ./EFI/BOOT
-rw-r--r-- 1 root root 1417216 2019-07-05 18:54 ./EFI/BOOT/bootx64.efi
@@ -25,9 +25,9 @@ drwxr-xr-x 2 root root 4096 2020-11-05 20:39 ./EFI/BOOT
-rwxr-xr-x 1 root root 2504 2019-07-05 18:54 ./EFI/BOOT/make-grub.sh
-rw-r--r-- 1 root root 10722 2013-09-21 19:02 ./EFI/BOOT/osdetect.cfg
-rw-r--r-- 1 root root 1273 2013-08-12 21:08 ./EFI/BOOT/tools.cfg
--rw-r--r-- 1 root root 1731047 2020-11-04 19:37 ./FILELIST.TXT
+-rw-r--r-- 1 root root 1731356 2020-11-06 21:28 ./FILELIST.TXT
-rw-r--r-- 1 root root 1572 2012-08-29 18:27 ./GPG-KEY
--rw-r--r-- 1 root root 766991 2020-11-05 20:46 ./PACKAGES.TXT
+-rw-r--r-- 1 root root 766991 2020-11-06 21:28 ./PACKAGES.TXT
-rw-r--r-- 1 root root 8564 2016-06-28 21:33 ./README.TXT
-rw-r--r-- 1 root root 3629 2020-11-05 20:27 ./README.initrd
-rw-r--r-- 1 root root 34412 2017-12-01 17:44 ./README_CRYPT.TXT
@@ -791,11 +791,11 @@ drwxr-xr-x 2 root root 4096 2012-09-20 18:06 ./patches
-rw-r--r-- 1 root root 575 2012-09-20 18:06 ./patches/FILE_LIST
-rw-r--r-- 1 root root 14 2012-09-20 18:06 ./patches/MANIFEST.bz2
-rw-r--r-- 1 root root 224 2012-09-20 18:06 ./patches/PACKAGES.TXT
-drwxr-xr-x 18 root root 4096 2020-11-05 20:46 ./slackware64
--rw-r--r-- 1 root root 302002 2020-11-05 20:46 ./slackware64/CHECKSUMS.md5
--rw-r--r-- 1 root root 163 2020-11-05 20:46 ./slackware64/CHECKSUMS.md5.asc
--rw-r--r-- 1 root root 375825 2020-11-05 20:45 ./slackware64/FILE_LIST
--rw-r--r-- 1 root root 3865286 2020-11-05 20:46 ./slackware64/MANIFEST.bz2
+drwxr-xr-x 18 root root 4096 2020-11-06 21:28 ./slackware64
+-rw-r--r-- 1 root root 302002 2020-11-06 21:28 ./slackware64/CHECKSUMS.md5
+-rw-r--r-- 1 root root 163 2020-11-06 21:28 ./slackware64/CHECKSUMS.md5.asc
+-rw-r--r-- 1 root root 375825 2020-11-06 21:27 ./slackware64/FILE_LIST
+-rw-r--r-- 1 root root 3877602 2020-11-06 21:27 ./slackware64/MANIFEST.bz2
lrwxrwxrwx 1 root root 15 2009-08-23 23:34 ./slackware64/PACKAGES.TXT -> ../PACKAGES.TXT
drwxr-xr-x 2 root root 20480 2020-11-05 20:45 ./slackware64/a
-rw-r--r-- 1 root root 327 2018-06-24 18:44 ./slackware64/a/aaa_base-14.2-x86_64-5.txt
@@ -1433,7 +1433,7 @@ drwxr-xr-x 2 root root 20480 2020-11-04 19:35 ./slackware64/ap
-rw-r--r-- 1 root root 506 2020-02-16 18:00 ./slackware64/ap/zsh-5.8-x86_64-1.txt
-rw-r--r-- 1 root root 3053312 2020-02-16 18:00 ./slackware64/ap/zsh-5.8-x86_64-1.txz
-rw-r--r-- 1 root root 163 2020-02-16 18:00 ./slackware64/ap/zsh-5.8-x86_64-1.txz.asc
-drwxr-xr-x 2 root root 16384 2020-11-05 20:45 ./slackware64/d
+drwxr-xr-x 2 root root 16384 2020-11-06 19:21 ./slackware64/d
-rw-r--r-- 1 root root 360 2020-10-24 22:39 ./slackware64/d/Cython-0.29.21-x86_64-2.txt
-rw-r--r-- 1 root root 2380688 2020-10-24 22:39 ./slackware64/d/Cython-0.29.21-x86_64-2.txz
-rw-r--r-- 1 root root 163 2020-10-24 22:39 ./slackware64/d/Cython-0.29.21-x86_64-2.txz.asc
@@ -1615,9 +1615,9 @@ drwxr-xr-x 2 root root 16384 2020-11-05 20:45 ./slackware64/d
-rw-r--r-- 1 root root 385 2020-10-02 19:53 ./slackware64/d/ruby-2.7.2-x86_64-1.txt
-rw-r--r-- 1 root root 6105584 2020-10-02 19:53 ./slackware64/d/ruby-2.7.2-x86_64-1.txz
-rw-r--r-- 1 root root 163 2020-10-02 19:53 ./slackware64/d/ruby-2.7.2-x86_64-1.txz.asc
--rw-r--r-- 1 root root 426 2020-10-16 19:40 ./slackware64/d/rust-1.46.0-x86_64-2.txt
--rw-r--r-- 1 root root 57264220 2020-10-16 19:40 ./slackware64/d/rust-1.46.0-x86_64-2.txz
--rw-r--r-- 1 root root 163 2020-10-16 19:40 ./slackware64/d/rust-1.46.0-x86_64-2.txz.asc
+-rw-r--r-- 1 root root 426 2020-10-13 19:53 ./slackware64/d/rust-1.47.0-x86_64-2.txt
+-rw-r--r-- 1 root root 65410800 2020-10-13 19:53 ./slackware64/d/rust-1.47.0-x86_64-2.txz
+-rw-r--r-- 1 root root 163 2020-10-13 19:53 ./slackware64/d/rust-1.47.0-x86_64-2.txz.asc
-rw-r--r-- 1 root root 456 2020-10-24 22:53 ./slackware64/d/scons-4.0.1-x86_64-2.txt
-rw-r--r-- 1 root root 1752500 2020-10-24 22:53 ./slackware64/d/scons-4.0.1-x86_64-2.txz
-rw-r--r-- 1 root root 163 2020-10-24 22:53 ./slackware64/d/scons-4.0.1-x86_64-2.txz.asc
@@ -4922,7 +4922,7 @@ drwxr-xr-x 2 root root 61440 2020-11-05 20:45 ./slackware64/x
-rw-r--r-- 1 root root 213 2018-04-13 06:10 ./slackware64/x/xwud-1.0.5-x86_64-2.txt
-rw-r--r-- 1 root root 25288 2018-04-13 06:10 ./slackware64/x/xwud-1.0.5-x86_64-2.txz
-rw-r--r-- 1 root root 163 2018-04-13 06:10 ./slackware64/x/xwud-1.0.5-x86_64-2.txz.asc
-drwxr-xr-x 2 root root 12288 2020-10-29 21:58 ./slackware64/xap
+drwxr-xr-x 2 root root 12288 2020-11-06 21:27 ./slackware64/xap
-rw-r--r-- 1 root root 625 2020-04-05 20:17 ./slackware64/xap/MPlayer-20200103-x86_64-2.txt
-rw-r--r-- 1 root root 2733304 2020-04-05 20:17 ./slackware64/xap/MPlayer-20200103-x86_64-2.txz
-rw-r--r-- 1 root root 163 2020-04-05 20:17 ./slackware64/xap/MPlayer-20200103-x86_64-2.txz.asc
@@ -4999,9 +4999,9 @@ drwxr-xr-x 2 root root 12288 2020-10-29 21:58 ./slackware64/xap
-rw-r--r-- 1 root root 570 2020-10-19 18:07 ./slackware64/xap/mozilla-firefox-78.4.0esr-x86_64-1.txt
-rw-r--r-- 1 root root 55346328 2020-10-19 18:07 ./slackware64/xap/mozilla-firefox-78.4.0esr-x86_64-1.txz
-rw-r--r-- 1 root root 163 2020-10-19 18:07 ./slackware64/xap/mozilla-firefox-78.4.0esr-x86_64-1.txz.asc
--rw-r--r-- 1 root root 663 2020-10-21 19:20 ./slackware64/xap/mozilla-thunderbird-78.4.0-x86_64-1.txt
--rw-r--r-- 1 root root 56893792 2020-10-21 19:20 ./slackware64/xap/mozilla-thunderbird-78.4.0-x86_64-1.txz
--rw-r--r-- 1 root root 163 2020-10-21 19:20 ./slackware64/xap/mozilla-thunderbird-78.4.0-x86_64-1.txz.asc
+-rw-r--r-- 1 root root 663 2020-11-06 19:18 ./slackware64/xap/mozilla-thunderbird-78.4.1-x86_64-1.txt
+-rw-r--r-- 1 root root 56951784 2020-11-06 19:18 ./slackware64/xap/mozilla-thunderbird-78.4.1-x86_64-1.txz
+-rw-r--r-- 1 root root 163 2020-11-06 19:18 ./slackware64/xap/mozilla-thunderbird-78.4.1-x86_64-1.txz.asc
-rw-r--r-- 1 root root 359 2020-06-23 18:24 ./slackware64/xap/network-manager-applet-1.18.0-x86_64-1.txt
-rw-r--r-- 1 root root 905824 2020-06-23 18:24 ./slackware64/xap/network-manager-applet-1.18.0-x86_64-1.txz
-rw-r--r-- 1 root root 163 2020-06-23 18:24 ./slackware64/xap/network-manager-applet-1.18.0-x86_64-1.txz.asc
@@ -5168,11 +5168,11 @@ drwxr-xr-x 2 root root 4096 2019-02-17 23:51 ./slackware64/y
-rw-r--r-- 1 root root 1147 2018-03-01 07:55 ./slackware64/y/maketag
-rw-r--r-- 1 root root 1147 2018-03-01 07:55 ./slackware64/y/maketag.ez
-rw-r--r-- 1 root root 14 2018-03-01 07:55 ./slackware64/y/tagfile
-drwxr-xr-x 19 root root 4096 2020-11-05 20:46 ./source
--rw-r--r-- 1 root root 503030 2020-11-05 20:46 ./source/CHECKSUMS.md5
--rw-r--r-- 1 root root 163 2020-11-05 20:46 ./source/CHECKSUMS.md5.asc
--rw-r--r-- 1 root root 708529 2020-11-05 20:46 ./source/FILE_LIST
--rw-r--r-- 1 root root 21899335 2020-11-05 20:46 ./source/MANIFEST.bz2
+drwxr-xr-x 19 root root 4096 2020-11-06 21:54 ./source
+-rw-r--r-- 1 root root 503188 2020-11-06 21:54 ./source/CHECKSUMS.md5
+-rw-r--r-- 1 root root 163 2020-11-06 21:54 ./source/CHECKSUMS.md5.asc
+-rw-r--r-- 1 root root 708714 2020-11-06 21:53 ./source/FILE_LIST
+-rw-r--r-- 1 root root 21872411 2020-11-06 21:53 ./source/MANIFEST.bz2
-rw-r--r-- 1 root root 1314 2006-10-02 04:40 ./source/README.TXT
drwxr-xr-x 119 root root 4096 2020-10-28 19:36 ./source/a
-rw-r--r-- 1 root root 1034 2019-05-04 17:56 ./source/a/FTBFSlog
@@ -6837,7 +6837,7 @@ drwxr-xr-x 2 root root 4096 2020-02-16 17:57 ./source/ap/zsh
-rwxr-xr-x 1 root root 4352 2020-02-16 17:58 ./source/ap/zsh/zsh.SlackBuild
-rw-r--r-- 1 root root 23 2018-09-14 17:50 ./source/ap/zsh/zsh.url
-rwxr-xr-x 1 root root 11975 2020-02-19 20:57 ./source/buildlist-from-changelog.sh
-drwxr-xr-x 60 root root 4096 2020-10-24 18:46 ./source/d
+drwxr-xr-x 60 root root 4096 2020-11-06 19:20 ./source/d
drwxr-xr-x 2 root root 4096 2020-10-24 18:47 ./source/d/Cython
-rw-r--r-- 1 root root 1602538 2020-07-08 21:56 ./source/d/Cython/Cython-0.29.21.tar.lz
-rwxr-xr-x 1 root root 3031 2020-10-24 18:47 ./source/d/Cython/Cython.SlackBuild
@@ -7192,11 +7192,11 @@ drwxr-xr-x 2 root root 4096 2020-10-02 19:51 ./source/d/ruby
-rw-r--r-- 1 root root 11694896 2020-10-02 09:28 ./source/d/ruby/ruby-2.7.2.tar.lz
-rwxr-xr-x 1 root root 4807 2019-12-25 18:16 ./source/d/ruby/ruby.SlackBuild
-rw-r--r-- 1 root root 837 2019-03-13 16:43 ./source/d/ruby/slack-desc
-drwxr-xr-x 2 root root 4096 2020-10-16 20:06 ./source/d/rust
+drwxr-xr-x 2 root root 4096 2020-10-08 18:05 ./source/d/rust
-rw-r--r-- 1 root root 300 2017-10-16 20:00 ./source/d/rust/link_libffi.diff.gz
--rwxr-xr-x 1 root root 9453 2020-10-16 18:54 ./source/d/rust/rust.SlackBuild
--rw-r--r-- 1 root root 1457 2020-08-27 17:59 ./source/d/rust/rust.url
--rw-r--r-- 1 root root 93811458 2020-08-27 15:49 ./source/d/rust/rustc-1.46.0-src.tar.lz
+-rwxr-xr-x 1 root root 9453 2020-10-13 19:10 ./source/d/rust/rust.SlackBuild
+-rw-r--r-- 1 root root 1457 2020-10-08 18:04 ./source/d/rust/rust.url
+-rw-r--r-- 1 root root 98424391 2020-10-08 13:24 ./source/d/rust/rustc-1.47.0-src.tar.lz
-rw-r--r-- 1 root root 910 2018-02-27 06:49 ./source/d/rust/slack-desc
drwxr-xr-x 2 root root 4096 2020-10-24 20:35 ./source/d/scons
-rw-r--r-- 1 root root 1274421 2020-07-17 02:00 ./source/d/scons/scons-4.0.1.tar.lz
@@ -13161,7 +13161,7 @@ drwxr-xr-x 2 root root 4096 2020-03-07 19:29 ./source/xap/libnma
-rw-r--r-- 1 root root 1366584 2020-03-06 19:49 ./source/xap/libnma/libnma-1.8.28.tar.xz
-rwxr-xr-x 1 root root 3570 2020-03-07 19:31 ./source/xap/libnma/libnma.SlackBuild
-rw-r--r-- 1 root root 792 2020-03-07 19:38 ./source/xap/libnma/slack-desc
-drwxr-xr-x 3 root root 4096 2020-10-19 17:46 ./source/xap/mozilla-firefox
+drwxr-xr-x 3 root root 4096 2020-11-06 21:53 ./source/xap/mozilla-firefox
drwxr-xr-x 5 root root 4096 2019-08-27 19:34 ./source/xap/mozilla-firefox/build-deps
-rwxr-xr-x 1 root root 1919 2019-07-09 19:35 ./source/xap/mozilla-firefox/build-deps.sh
drwxr-xr-x 2 root root 4096 2016-07-03 18:05 ./source/xap/mozilla-firefox/build-deps/autoconf
@@ -13182,11 +13182,12 @@ drwxr-xr-x 2 root root 4096 2020-07-03 19:10 ./source/xap/mozilla-firefox/
-rw-r--r-- 1 root root 327 2008-06-17 17:19 ./source/xap/mozilla-firefox/firefox.moz_plugin_path.diff.gz
-rw-r--r-- 1 root root 462 2009-07-01 06:05 ./source/xap/mozilla-firefox/mimeTypes.rdf.gz
-rw-r--r-- 1 root root 680 2009-07-01 13:28 ./source/xap/mozilla-firefox/mozilla-firefox-mimeTypes-fix.diff.gz
--rwxr-xr-x 1 root root 15166 2020-07-27 16:28 ./source/xap/mozilla-firefox/mozilla-firefox.SlackBuild
+-rwxr-xr-x 1 root root 15263 2020-11-06 19:25 ./source/xap/mozilla-firefox/mozilla-firefox.SlackBuild
-rw-r--r-- 1 root root 2748 2017-12-04 21:30 ./source/xap/mozilla-firefox/mozilla-firefox.desktop
+-rw-r--r-- 1 root root 173003 2020-10-20 13:21 ./source/xap/mozilla-firefox/rust_1.47.0.patch.gz
-rw-r--r-- 1 root root 1033 2020-07-07 18:08 ./source/xap/mozilla-firefox/slack-desc
-rw-r--r-- 1 root root 456 2019-07-08 18:54 ./source/xap/mozilla-firefox/unbreakdocs.diff.gz
-drwxr-xr-x 5 root root 4096 2020-10-21 19:24 ./source/xap/mozilla-thunderbird
+drwxr-xr-x 5 root root 4096 2020-11-06 19:26 ./source/xap/mozilla-thunderbird
drwxr-xr-x 2 root root 4096 2016-07-03 18:05 ./source/xap/mozilla-thunderbird/autoconf
-rw-r--r-- 1 root root 5869 2016-07-03 18:04 ./source/xap/mozilla-thunderbird/autoconf/autoconf-2.13-consolidated_fixes-1.patch.gz
-rw-r--r-- 1 root root 300116 1999-01-15 21:03 ./source/xap/mozilla-thunderbird/autoconf/autoconf-2.13.tar.xz
@@ -13210,12 +13211,13 @@ lrwxrwxrwx 1 root root 7 2019-08-28 16:42 ./source/xap/mozilla-thunderb
lrwxrwxrwx 1 root root 7 2019-08-28 16:40 ./source/xap/mozilla-thunderbird/gold/ld -> ld.gold
-rwxr-xr-x 1 root root 281 2019-08-28 19:15 ./source/xap/mozilla-thunderbird/gold/ld.gold
-rw-r--r-- 1 root root 312 2012-03-28 18:01 ./source/xap/mozilla-thunderbird/mozilla-firefox.xpcom_arm.patch.gz
--rwxr-xr-x 1 root root 13036 2020-08-25 16:18 ./source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild
+-rwxr-xr-x 1 root root 13133 2020-11-06 18:57 ./source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild
-rw-r--r-- 1 root root 3378 2005-03-08 05:13 ./source/xap/mozilla-thunderbird/mozilla-thunderbird.desktop
+-rw-r--r-- 1 root root 173003 2020-10-20 13:21 ./source/xap/mozilla-thunderbird/rust_1.47.0.patch.gz
-rw-r--r-- 1 root root 1130 2018-02-27 06:47 ./source/xap/mozilla-thunderbird/slack-desc
-rw-r--r-- 1 root root 330 2019-08-27 16:35 ./source/xap/mozilla-thunderbird/tb.ui.scrollToClick.diff.gz
--rw-r--r-- 1 root root 361176060 2020-10-21 14:38 ./source/xap/mozilla-thunderbird/thunderbird-78.4.0.source.tar.xz
--rw-r--r-- 1 root root 833 2020-10-21 14:38 ./source/xap/mozilla-thunderbird/thunderbird-78.4.0.source.tar.xz.asc
+-rw-r--r-- 1 root root 352778636 2020-11-06 13:00 ./source/xap/mozilla-thunderbird/thunderbird-78.4.1.source.tar.xz
+-rw-r--r-- 1 root root 833 2020-11-06 13:00 ./source/xap/mozilla-thunderbird/thunderbird-78.4.1.source.tar.xz.asc
-rw-r--r-- 1 root root 456 2019-07-08 18:54 ./source/xap/mozilla-thunderbird/unbreakdocs.diff.gz
drwxr-xr-x 2 root root 4096 2020-06-23 18:18 ./source/xap/network-manager-applet
-rw-r--r-- 1 root root 207 2012-08-10 23:59 ./source/xap/network-manager-applet/doinst.sh.gz
@@ -13260,7 +13262,7 @@ drwxr-xr-x 2 root root 4096 2020-08-24 19:02 ./source/xap/sane
-rwxr-xr-x 1 root root 6139 2020-08-24 19:02 ./source/xap/sane/sane.SlackBuild
-rw-r--r-- 1 root root 40 2020-05-17 19:09 ./source/xap/sane/sane.url
-rw-r--r-- 1 root root 811 2020-05-17 19:09 ./source/xap/sane/slack-desc
-drwxr-xr-x 3 root root 4096 2020-09-22 17:59 ./source/xap/seamonkey
+drwxr-xr-x 3 root root 4096 2020-11-06 20:41 ./source/xap/seamonkey
drwxr-xr-x 2 root root 4096 2016-07-03 18:05 ./source/xap/seamonkey/autoconf
-rw-r--r-- 1 root root 5869 2016-07-03 18:04 ./source/xap/seamonkey/autoconf/autoconf-2.13-consolidated_fixes-1.patch.gz
-rw-r--r-- 1 root root 300116 1999-01-15 21:03 ./source/xap/seamonkey/autoconf/autoconf-2.13.tar.xz
@@ -13270,7 +13272,7 @@ drwxr-xr-x 2 root root 4096 2016-07-03 18:05 ./source/xap/seamonkey/autoco
-rw-r--r-- 1 root root 12796 2006-02-09 03:17 ./source/xap/seamonkey/seamonkey-icon.png
-rw-r--r-- 1 root root 185 2006-02-09 03:40 ./source/xap/seamonkey/seamonkey-mail-icon.png
-rw-r--r-- 1 root root 207 2006-02-09 03:23 ./source/xap/seamonkey/seamonkey-mail.desktop
--rwxr-xr-x 1 root root 8571 2020-09-22 17:59 ./source/xap/seamonkey/seamonkey.SlackBuild
+-rwxr-xr-x 1 root root 8565 2020-11-06 20:24 ./source/xap/seamonkey/seamonkey.SlackBuild
-rw-r--r-- 1 root root 315 2013-08-13 03:23 ./source/xap/seamonkey/seamonkey.desktop
-rw-r--r-- 1 root root 849 2020-03-01 04:26 ./source/xap/seamonkey/slack-desc
-rw-r--r-- 1 root root 341 2017-07-21 16:21 ./source/xap/seamonkey/sm.ui.scrollToClick.diff.gz
@@ -17077,7 +17079,7 @@ drwxr-xr-x 2 root root 4096 2020-10-31 19:30 ./testing/source/vtown/kde/sr
-rw-r--r-- 1 root root 488 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdesdk-kioslaves-20.08.2.tar.xz.sig
-rw-r--r-- 1 root root 17700 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdesdk-thumbnailers-20.08.2.tar.xz
-rw-r--r-- 1 root root 488 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdesdk-thumbnailers-20.08.2.tar.xz.sig
--rw-r--r-- 1 root root 467300 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdf-20.08.2.tar.xz
+-rw-r--r-- 1 root root 467300 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdf-20.08.2.tar.xz
-rw-r--r-- 1 root root 488 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdf-20.08.2.tar.xz.sig
-rw-r--r-- 1 root root 107664 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdialog-20.08.2.tar.xz
-rw-r--r-- 1 root root 488 2020-10-06 04:30 ./testing/source/vtown/kde/src/applications/kdialog-20.08.2.tar.xz.sig
diff --git a/recompress.sh b/recompress.sh
index e896e61a..9657d639 100755
--- a/recompress.sh
+++ b/recompress.sh
@@ -1097,6 +1097,7 @@ gzip ./source/xap/mozilla-thunderbird/build-deps/autoconf/autoconf-2.13-consolid
gzip ./source/xap/mozilla-thunderbird/tb.ui.scrollToClick.diff
gzip ./source/xap/mozilla-thunderbird/mozilla-firefox.xpcom_arm.patch
gzip ./source/xap/mozilla-thunderbird/unbreakdocs.diff
+gzip ./source/xap/mozilla-thunderbird/rust_1.47.0.patch
gzip ./source/xap/mozilla-thunderbird/gkrust.a.no.networking.check.diff
gzip ./source/xap/xmms/xmms.alsa.default.diff
gzip ./source/xap/xmms/xmms.gtk.doublesize.diff
@@ -1211,6 +1212,7 @@ gzip ./source/xap/mozilla-firefox/build-deps/autoconf/autoconf-2.13-consolidated
gzip ./source/xap/mozilla-firefox/mimeTypes.rdf
gzip ./source/xap/mozilla-firefox/mozilla-firefox-mimeTypes-fix.diff
gzip ./source/xap/mozilla-firefox/unbreakdocs.diff
+gzip ./source/xap/mozilla-firefox/rust_1.47.0.patch
gzip ./source/xap/electricsheep/electricsheep.mplayer.diff
gzip ./source/installer/sources/initrd/etc/keymaps.tar
gzip ./source/x/liberation-fonts-ttf/doinst.sh
diff --git a/source/d/rust/rust.SlackBuild b/source/d/rust/rust.SlackBuild
index 8f3508a5..6e8e6af1 100755
--- a/source/d/rust/rust.SlackBuild
+++ b/source/d/rust/rust.SlackBuild
@@ -26,7 +26,7 @@ cd $(dirname $0) ; CWD=$(pwd)
PKGNAM=rust
SRCNAM="${PKGNAM}c"
-VERSION=${VERSION:-1.46.0}
+VERSION=${VERSION:-1.47.0}
BUILD=${BUILD:-2}
# Set this to YES to build with the system LLVM, or NO to use the bundled LLVM.
@@ -34,9 +34,9 @@ BUILD=${BUILD:-2}
SYSTEM_LLVM=${SYSTEM_LLVM:-YES}
# Bootstrap variables (might not be kept updated for latest Rust):
-RSTAGE0_VERSION=${RSTAGE0_VERSION:-1.45.2}
-RSTAGE0_DIR=${RSTAGE0_DIR:-2020-08-03}
-CSTAGE0_VERSION=${CSTAGE0_VERSION:-0.46.1}
+RSTAGE0_VERSION=${RSTAGE0_VERSION:-1.46.0}
+RSTAGE0_DIR=${RSTAGE0_DIR:-2020-08-27}
+CSTAGE0_VERSION=${CSTAGE0_VERSION:-0.47.0}
CSTAGE0_DIR=${CSTAGE0_DIR:-$RSTAGE0_DIR}
# Automatically determine the architecture we're building on:
diff --git a/source/d/rust/rust.url b/source/d/rust/rust.url
index e88857ae..c9982da0 100644
--- a/source/d/rust/rust.url
+++ b/source/d/rust/rust.url
@@ -1,5 +1,5 @@
# Source code (repacked to .tar.lz):
-VERSION=1.46.0
+VERSION=1.47.0
rm -f rustc-${VERSION}-src.tar.*
lftpget https://static.rust-lang.org/dist/rustc-${VERSION}-src.tar.gz
lftpget https://static.rust-lang.org/dist/rustc-${VERSION}-src.tar.gz.asc
@@ -21,12 +21,12 @@ fi
exit 0
# i686 bootstrap:
-lftpget https://static.rust-lang.org/dist/2020-08-03/cargo-0.46.1-i686-unknown-linux-gnu.tar.gz
-lftpget https://static.rust-lang.org/dist/2020-08-03/rust-std-1.45.2-i686-unknown-linux-gnu.tar.gz
-lftpget https://static.rust-lang.org/dist/2020-08-03/rustc-1.45.2-i686-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/cargo-0.47.0-i686-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/rust-std-1.46.0-i686-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/rustc-1.46.0-i686-unknown-linux-gnu.tar.gz
# x86_64 bootstrap:
-lftpget https://static.rust-lang.org/dist/2020-08-03/cargo-0.46.1-x86_64-unknown-linux-gnu.tar.gz
-lftpget https://static.rust-lang.org/dist/2020-08-03/rust-std-1.45.2-x86_64-unknown-linux-gnu.tar.gz
-lftpget https://static.rust-lang.org/dist/2020-08-03/rustc-1.45.2-x86_64-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/cargo-0.47.0-x86_64-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/rust-std-1.46.0-x86_64-unknown-linux-gnu.tar.gz
+lftpget https://static.rust-lang.org/dist/2020-08-27/rustc-1.46.0-x86_64-unknown-linux-gnu.tar.gz
diff --git a/source/xap/mozilla-firefox/mozilla-firefox.SlackBuild b/source/xap/mozilla-firefox/mozilla-firefox.SlackBuild
index c8e20e3c..6f3dd999 100755
--- a/source/xap/mozilla-firefox/mozilla-firefox.SlackBuild
+++ b/source/xap/mozilla-firefox/mozilla-firefox.SlackBuild
@@ -198,6 +198,9 @@ zcat $CWD/ff.ui.scrollToClick.diff.gz | patch -p1 --verbose || exit 1
# Fix building with latest Rust:
zcat $CWD/unbreakdocs.diff.gz | patch -p1 --verbose || exit 1
+# Fix building with rust-1.47.0:
+zcat $CWD/rust_1.47.0.patch.gz | patch -p1 --verbose || exit 1
+
# Fetch localization, if requested
# https://bugzilla.mozilla.org/show_bug.cgi?id=1256955
if [ ! -z $MOZLOCALIZE ]; then
diff --git a/source/xap/mozilla-firefox/rust_1.47.0.patch b/source/xap/mozilla-firefox/rust_1.47.0.patch
new file mode 100644
index 00000000..9471b7bd
--- /dev/null
+++ b/source/xap/mozilla-firefox/rust_1.47.0.patch
@@ -0,0 +1,30905 @@
+From 83fc2e3616ef15056be74f056a15e892038809b9 Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:10:20 +0200
+Subject: [PATCH 38/38] bmo#1663715: Update syn and proc-macro2 so that Firefox
+ can build on Rust nightly again
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ Cargo.lock | 8 +-
+ .../rust/lucet-wasi/.cargo-checksum.json | 2 +-
+ .../rust/packed_simd/.cargo-checksum.json | 2 +-
+ .../rust/proc-macro2/.cargo-checksum.json | 2 +-
+ third_party/rust/proc-macro2/Cargo.toml | 15 +-
+ third_party/rust/proc-macro2/README.md | 2 +-
+ third_party/rust/proc-macro2/build.rs | 20 +
+ third_party/rust/proc-macro2/src/detection.rs | 67 +
+ third_party/rust/proc-macro2/src/fallback.rs | 1010 ++----
+ third_party/rust/proc-macro2/src/lib.rs | 225 +-
+ third_party/rust/proc-macro2/src/marker.rs | 18 +
+ third_party/rust/proc-macro2/src/parse.rs | 849 +++++
+ third_party/rust/proc-macro2/src/strnom.rs | 391 ---
+ third_party/rust/proc-macro2/src/wrapper.rs | 258 +-
+ .../rust/proc-macro2/tests/comments.rs | 103 +
+ third_party/rust/proc-macro2/tests/marker.rs | 33 +
+ third_party/rust/proc-macro2/tests/test.rs | 240 +-
+ .../rust/proc-macro2/tests/test_fmt.rs | 26 +
+ .../spirv-cross-internal/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/Cargo.toml | 35 +-
+ third_party/rust/syn/README.md | 16 +-
+ third_party/rust/syn/benches/file.rs | 7 +
+ third_party/rust/syn/benches/rust.rs | 45 +-
+ third_party/rust/syn/build.rs | 38 +-
+ third_party/rust/syn/src/attr.rs | 126 +-
+ third_party/rust/syn/src/buffer.rs | 56 +-
+ third_party/rust/syn/src/custom_keyword.rs | 12 +-
+ .../rust/syn/src/custom_punctuation.rs | 50 +-
+ third_party/rust/syn/src/data.rs | 96 +-
+ third_party/rust/syn/src/derive.rs | 10 +-
+ third_party/rust/syn/src/discouraged.rs | 27 +-
+ third_party/rust/syn/src/error.rs | 33 +-
+ third_party/rust/syn/src/expr.rs | 826 +++--
+ third_party/rust/syn/src/ext.rs | 12 +-
+ third_party/rust/syn/src/file.rs | 4 +-
+ third_party/rust/syn/src/gen/clone.rs | 2051 ++++++++++++
+ third_party/rust/syn/src/gen/debug.rs | 2857 +++++++++++++++++
+ third_party/rust/syn/src/gen/eq.rs | 1930 +++++++++++
+ third_party/rust/syn/src/gen/fold.rs | 287 +-
+ third_party/rust/syn/src/gen/hash.rs | 2691 ++++++++++++++++
+ third_party/rust/syn/src/gen/visit.rs | 19 +-
+ third_party/rust/syn/src/gen/visit_mut.rs | 19 +-
+ third_party/rust/syn/src/generics.rs | 255 +-
+ third_party/rust/syn/src/item.rs | 1515 +++++----
+ third_party/rust/syn/src/keyword.rs | 0
+ third_party/rust/syn/src/lib.rs | 109 +-
+ third_party/rust/syn/src/lifetime.rs | 13 +-
+ third_party/rust/syn/src/lit.rs | 581 ++--
+ third_party/rust/syn/src/mac.rs | 55 +-
+ third_party/rust/syn/src/macros.rs | 61 +-
+ third_party/rust/syn/src/op.rs | 6 +-
+ third_party/rust/syn/src/parse.rs | 211 +-
+ third_party/rust/syn/src/parse_macro_input.rs | 32 +-
+ third_party/rust/syn/src/parse_quote.rs | 15 +-
+ third_party/rust/syn/src/pat.rs | 313 +-
+ third_party/rust/syn/src/path.rs | 33 +-
+ third_party/rust/syn/src/punctuated.rs | 123 +-
+ third_party/rust/syn/src/reserved.rs | 42 +
+ third_party/rust/syn/src/spanned.rs | 4 +-
+ third_party/rust/syn/src/stmt.rs | 141 +-
+ third_party/rust/syn/src/token.rs | 99 +-
+ third_party/rust/syn/src/tt.rs | 6 +-
+ third_party/rust/syn/src/ty.rs | 364 ++-
+ third_party/rust/syn/src/verbatim.rs | 15 +
+ third_party/rust/syn/src/whitespace.rs | 65 +
+ third_party/rust/syn/tests/clone.sh | 16 -
+ third_party/rust/syn/tests/common/eq.rs | 247 +-
+ third_party/rust/syn/tests/common/mod.rs | 13 +
+ third_party/rust/syn/tests/common/parse.rs | 24 +-
+ third_party/rust/syn/tests/debug/gen.rs | 50 +-
+ third_party/rust/syn/tests/debug/mod.rs | 17 +-
+ third_party/rust/syn/tests/features/error.rs | 1 -
+ third_party/rust/syn/tests/features/mod.rs | 22 -
+ third_party/rust/syn/tests/macros/mod.rs | 8 +-
+ third_party/rust/syn/tests/repo/mod.rs | 137 +-
+ third_party/rust/syn/tests/repo/progress.rs | 37 +
+ third_party/rust/syn/tests/test_asyncness.rs | 38 +-
+ third_party/rust/syn/tests/test_attribute.rs | 452 +--
+ .../rust/syn/tests/test_derive_input.rs | 1321 ++++----
+ third_party/rust/syn/tests/test_expr.rs | 314 +-
+ third_party/rust/syn/tests/test_generics.rs | 371 ++-
+ third_party/rust/syn/tests/test_grouping.rs | 53 +-
+ third_party/rust/syn/tests/test_ident.rs | 5 -
+ third_party/rust/syn/tests/test_item.rs | 45 +
+ third_party/rust/syn/tests/test_iterators.rs | 7 +-
+ third_party/rust/syn/tests/test_lit.rs | 75 +-
+ third_party/rust/syn/tests/test_meta.rs | 498 ++-
+ .../rust/syn/tests/test_parse_buffer.rs | 41 +-
+ .../rust/syn/tests/test_parse_stream.rs | 12 +
+ third_party/rust/syn/tests/test_pat.rs | 27 +-
+ third_party/rust/syn/tests/test_path.rs | 52 +
+ third_party/rust/syn/tests/test_precedence.rs | 196 +-
+ third_party/rust/syn/tests/test_receiver.rs | 127 +
+ third_party/rust/syn/tests/test_round_trip.rs | 41 +-
+ third_party/rust/syn/tests/test_shebang.rs | 59 +
+ .../rust/syn/tests/test_should_parse.rs | 4 -
+ third_party/rust/syn/tests/test_size.rs | 2 -
+ third_party/rust/syn/tests/test_stmt.rs | 44 +
+ .../rust/syn/tests/test_token_trees.rs | 12 +-
+ third_party/rust/syn/tests/test_ty.rs | 53 +
+ third_party/rust/syn/tests/test_visibility.rs | 145 +
+ third_party/rust/syn/tests/zzz_stable.rs | 4 +-
+ 103 files changed, 17319 insertions(+), 5831 deletions(-)
+ create mode 100644 third_party/rust/proc-macro2/src/detection.rs
+ create mode 100644 third_party/rust/proc-macro2/src/marker.rs
+ create mode 100644 third_party/rust/proc-macro2/src/parse.rs
+ delete mode 100644 third_party/rust/proc-macro2/src/strnom.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/comments.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/test_fmt.rs
+ create mode 100644 third_party/rust/syn/src/gen/clone.rs
+ create mode 100644 third_party/rust/syn/src/gen/debug.rs
+ create mode 100644 third_party/rust/syn/src/gen/eq.rs
+ create mode 100644 third_party/rust/syn/src/gen/hash.rs
+ delete mode 100644 third_party/rust/syn/src/keyword.rs
+ create mode 100644 third_party/rust/syn/src/reserved.rs
+ create mode 100644 third_party/rust/syn/src/verbatim.rs
+ create mode 100644 third_party/rust/syn/src/whitespace.rs
+ delete mode 100755 third_party/rust/syn/tests/clone.sh
+ delete mode 100644 third_party/rust/syn/tests/features/error.rs
+ delete mode 100644 third_party/rust/syn/tests/features/mod.rs
+ create mode 100644 third_party/rust/syn/tests/repo/progress.rs
+ create mode 100644 third_party/rust/syn/tests/test_item.rs
+ create mode 100644 third_party/rust/syn/tests/test_parse_stream.rs
+ create mode 100644 third_party/rust/syn/tests/test_path.rs
+ create mode 100644 third_party/rust/syn/tests/test_receiver.rs
+ create mode 100644 third_party/rust/syn/tests/test_shebang.rs
+ create mode 100644 third_party/rust/syn/tests/test_stmt.rs
+ create mode 100644 third_party/rust/syn/tests/test_ty.rs
+ create mode 100644 third_party/rust/syn/tests/test_visibility.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 19117e8368..d5fe0f6457 100644
+--- a/Cargo.lock
++++ b/Cargo.lock
+@@ -3717,9 +3717,9 @@ dependencies = [
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
++version = "1.0.24"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+ dependencies = [
+ "unicode-xid",
+ ]
+@@ -4647,9 +4647,9 @@ dependencies = [
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+diff --git a/third_party/rust/lucet-wasi/.cargo-checksum.json b/third_party/rust/lucet-wasi/.cargo-checksum.json
+index 229fc9978c..2c8c0a3c22 100644
+--- a/third_party/rust/lucet-wasi/.cargo-checksum.json
++++ b/third_party/rust/lucet-wasi/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/.gitignore":"44575cf5b28512d75644bf54a517dcef304ff809fd511747621b4d64f19aac66","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/packed_simd/.cargo-checksum.json b/third_party/rust/packed_simd/.cargo-checksum.json
+index 01afcc1efd..c727a10006 100644
+--- a/third_party/rust/packed_simd/.cargo-checksum.json
++++ b/third_party/rust/packed_simd/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/.gitignore":"fe82c7da551079d832cf74200b0b359b4df9828cb4a0416fa7384f07a2ae6a13","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
++{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+index eeef4120af..e7849f2896 100644
+--- a/third_party/rust/proc-macro2/.cargo-checksum.json
++++ b/third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"302d447d62c8d091d6241cf62bdad607c0d4ed8ff9f43d9b254c9d99c253ee8e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"b114e013695260f6066395c8712cea112ec2a386010397a80f15a60f8b986444","src/lib.rs":"7f528764a958587f007f0c2a330a6a414bae2c8e73d5ed9fb64ff1b42b1805b1","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"1d2253eacbd40eb3a2a933be2adcee356af922bdb48cc89ff266252a41fd98a1","src/wrapper.rs":"f52646ce1705c1f6265516f30d4c43297b5f529dd31fb91f4c806be89d5a4122","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"5f30a704eeb2b9198b57f416d622da72d25cb9bf8d8b12e6d0e90aa2cb0e43fc","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+index 95d653633d..22150c516a 100644
+--- a/third_party/rust/proc-macro2/Cargo.toml
++++ b/third_party/rust/proc-macro2/Cargo.toml
+@@ -13,21 +13,22 @@
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.24"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+@@ -39,5 +40,3 @@ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+index 19b0c3b5f8..3d05e871a7 100644
+--- a/third_party/rust/proc-macro2/README.md
++++ b/third_party/rust/proc-macro2/README.md
+@@ -1,6 +1,6 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+index deb9b92719..b247d874f6 100644
+--- a/third_party/rust/proc-macro2/build.rs
++++ b/third_party/rust/proc-macro2/build.rs
+@@ -14,6 +14,10 @@
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+@@ -57,6 +61,22 @@ fn main() {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 32 {
++ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
++ }
++
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 44 {
++ println!("cargo:rustc-cfg=lexerror_display");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+index 0000000000..c597bc99c6
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+index fe582b3b5f..8900c5ff0f 100644
+--- a/third_party/rust/proc-macro2/src/fallback.rs
++++ b/third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,27 +1,41 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+@@ -31,6 +45,72 @@ impl TokenStream {
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+@@ -59,20 +139,22 @@ impl FromStr for TokenStream {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ f.write_str("cannot parse string into token stream")
++ }
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+@@ -80,37 +162,22 @@ impl fmt::Display for TokenStream {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
+- }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+@@ -139,28 +206,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+@@ -168,31 +233,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+@@ -208,7 +272,7 @@ impl SourceFile {
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+@@ -218,7 +282,7 @@ impl fmt::Debug for SourceFile {
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+@@ -228,23 +292,11 @@ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+@@ -282,16 +334,21 @@ impl FileInfo {
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+@@ -310,23 +367,22 @@ impl SourceMap {
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+@@ -343,11 +399,11 @@ impl SourceMap {
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+@@ -361,12 +417,16 @@ impl Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+@@ -374,7 +434,6 @@ impl Span {
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+@@ -427,26 +486,59 @@ impl Span {
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+@@ -474,11 +566,11 @@ impl Group {
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+@@ -486,36 +578,45 @@ impl Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+@@ -549,16 +650,14 @@ impl Ident {
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+@@ -615,18 +714,18 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+@@ -637,17 +736,17 @@ impl fmt::Debug for Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+@@ -669,7 +768,7 @@ macro_rules! unsuffixed_numbers {
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+@@ -711,7 +810,7 @@ impl Literal {
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -719,7 +818,7 @@ impl Literal {
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -730,10 +829,10 @@ impl Literal {
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+@@ -744,10 +843,10 @@ impl Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+@@ -756,6 +855,7 @@ impl Literal {
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+@@ -784,651 +884,17 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+index a08be3e815..c20fb50d4a 100644
+--- a/third_party/rust/proc-macro2/src/lib.rs
++++ b/third_party/rust/proc-macro2/src/lib.rs
+@@ -78,27 +78,24 @@
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.24")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+-use std::cmp::Ordering;
+-use std::fmt;
+-use std::hash::{Hash, Hasher};
+-use std::iter::FromIterator;
+-use std::marker;
+-use std::ops::RangeBounds;
+-#[cfg(procmacro2_semver_exempt)]
+-use std::path::PathBuf;
+-use std::rc::Rc;
+-use std::str::FromStr;
++mod marker;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+@@ -106,6 +103,17 @@ use crate::fallback as imp;
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
++use crate::marker::Marker;
++use std::cmp::Ordering;
++use std::error::Error;
++use std::fmt::{self, Debug, Display};
++use std::hash::{Hash, Hasher};
++use std::iter::FromIterator;
++use std::ops::RangeBounds;
++#[cfg(procmacro2_semver_exempt)]
++use std::path::PathBuf;
++use std::str::FromStr;
++
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+ ///
+ /// This type provides interfaces for iterating over token trees and for
+@@ -116,27 +124,27 @@ mod imp;
+ #[derive(Clone)]
+ pub struct TokenStream {
+ inner: imp::TokenStream,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ /// Error returned from `TokenStream::from_str`.
+ pub struct LexError {
+ inner: imp::LexError,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -173,7 +181,7 @@ impl FromStr for TokenStream {
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+@@ -228,25 +236,33 @@ impl FromIterator<TokenStream> for TokenStream {
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ Debug::fmt(&self.inner, f)
++ }
++}
++
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
++impl Error for LexError {}
++
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+@@ -254,7 +270,7 @@ impl fmt::Debug for LexError {
+ #[derive(Clone, PartialEq, Eq)]
+ pub struct SourceFile {
+ inner: imp::SourceFile,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+@@ -262,7 +278,7 @@ impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -291,9 +307,9 @@ impl SourceFile {
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -311,25 +327,41 @@ pub struct LineColumn {
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Span) -> Span {
+ Span {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -342,6 +374,16 @@ impl Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+@@ -352,18 +394,12 @@ impl Span {
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+@@ -439,9 +475,9 @@ impl Span {
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -462,11 +498,11 @@ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+@@ -476,11 +512,11 @@ impl TokenTree {
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+@@ -513,32 +549,32 @@ impl From<Literal> for TokenTree {
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+@@ -651,15 +687,15 @@ impl Group {
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+@@ -669,7 +705,7 @@ impl fmt::Debug for Group {
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+ pub struct Punct {
+- op: char,
++ ch: char,
+ spacing: Spacing,
+ span: Span,
+ }
+@@ -695,9 +731,9 @@ impl Punct {
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+- pub fn new(op: char, spacing: Spacing) -> Punct {
++ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct {
+- op,
++ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+@@ -705,7 +741,7 @@ impl Punct {
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+- self.op
++ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+@@ -730,16 +766,16 @@ impl Punct {
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.ch, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+- debug.field("op", &self.op);
++ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+@@ -813,14 +849,14 @@ impl fmt::Debug for Punct {
+ #[derive(Clone)]
+ pub struct Ident {
+ inner: imp::Ident,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Ident {
+ fn _new(inner: imp::Ident) -> Ident {
+ Ident {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -920,15 +956,15 @@ impl Hash for Ident {
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -941,7 +977,7 @@ impl fmt::Debug for Ident {
+ #[derive(Clone)]
+ pub struct Literal {
+ inner: imp::Literal,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ macro_rules! suffixed_int_literals {
+@@ -988,14 +1024,14 @@ impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Literal) -> Literal {
+ Literal {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -1140,26 +1176,25 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
+- use std::marker;
+- use std::rc::Rc;
++ use crate::marker::Marker;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+@@ -1168,7 +1203,7 @@ pub mod token_stream {
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Iterator for IntoIter {
+@@ -1179,9 +1214,9 @@ pub mod token_stream {
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -1192,7 +1227,7 @@ pub mod token_stream {
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/src/marker.rs b/third_party/rust/proc-macro2/src/marker.rs
+new file mode 100644
+index 0000000000..58729baf4a
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/marker.rs
+@@ -0,0 +1,18 @@
++use std::marker::PhantomData;
++use std::panic::{RefUnwindSafe, UnwindSafe};
++use std::rc::Rc;
++
++// Zero sized marker with the correct set of autotrait impls we want all proc
++// macro types to have.
++pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
++
++pub(crate) use self::value::*;
++
++mod value {
++ pub(crate) use std::marker::PhantomData as Marker;
++}
++
++pub(crate) struct ProcMacroAutoTraits(Rc<()>);
++
++impl UnwindSafe for ProcMacroAutoTraits {}
++impl RefUnwindSafe for ProcMacroAutoTraits {}
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+index 0000000000..365fe0484d
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,849 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::char;
++use std::str::{Bytes, CharIndices, Chars};
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if is_ident_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = punct(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
++ .iter()
++ .any(|prefix| input.starts_with(prefix))
++ {
++ Err(LexError)
++ } else {
++ ident_any(input)
++ }
++}
++
++fn ident_any(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
++ let mut last = ch;
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.peek() {
++ Some((_, ch)) if ch.is_whitespace() => {
++ last = *ch;
++ chars.next();
++ }
++ _ => break,
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => match bytes.next() {
++ Some((_, b'\n')) => {}
++ _ => break,
++ },
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
++ let mut last = b as char;
++ let rest = input.advance(newline + 1);
++ let mut chars = rest.char_indices();
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.next() {
++ Some((_, ch)) if ch.is_whitespace() => last = ch,
++ Some((offset, _)) => {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ break;
++ }
++ None => return Err(LexError),
++ }
++ }
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ let mut value = 0;
++ let mut len = 0;
++ while let Some((_, ch)) = chars.next() {
++ let digit = match ch {
++ '0'..='9' => ch as u8 - b'0',
++ 'a'..='f' => 10 + ch as u8 - b'a',
++ 'A'..='F' => 10 + ch as u8 - b'A',
++ '_' if len > 0 => continue,
++ '}' if len > 0 => return char::from_u32(value).is_some(),
++ _ => return false,
++ };
++ if len == 6 {
++ return false;
++ }
++ value *= 0x10;
++ value += u32::from(digit);
++ len += 1;
++ }
++ false
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ if !(has_dot || has_exp) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let token_before_exp = if has_dot {
++ Ok(input.advance(len - 1))
++ } else {
++ Err(LexError)
++ };
++ let mut has_sign = false;
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ if has_sign {
++ return token_before_exp;
++ }
++ chars.next();
++ len += 1;
++ has_sign = true;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return token_before_exp;
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ match b {
++ b'0'..=b'9' => {
++ let digit = (b - b'0') as u64;
++ if digit >= base {
++ return Err(LexError);
++ }
++ }
++ b'a'..=b'f' => {
++ let digit = 10 + (b - b'a') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'A'..=b'F' => {
++ let digit = 10 + (b - b'A') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn punct(input: Cursor) -> PResult<Punct> {
++ match punct_char(input) {
++ Ok((rest, '\'')) => {
++ if ident_any(rest)?.0.starts_with("'") {
++ Err(LexError)
++ } else {
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ }
++ Ok((rest, ch)) => {
++ let kind = match punct_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn punct_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as a punct.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+index eb7d0b8a8e..0000000000
+--- a/third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+index 552b9381cf..3df044af17 100644
+--- a/third_party/rust/proc-macro2/src/wrapper.rs
++++ b/third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,15 +1,15 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+@@ -19,73 +19,16 @@ pub enum TokenStream {
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+@@ -103,7 +46,12 @@ impl DeferredTokenStream {
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+@@ -114,7 +62,7 @@ impl DeferredTokenStream {
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+@@ -147,9 +95,9 @@ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+@@ -157,11 +105,17 @@ impl FromStr for TokenStream {
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+@@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+@@ -196,9 +150,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+- op.set_span(tt.span().inner.unwrap_nightly());
+- op.into()
++ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
++ punct.set_span(tt.span().inner.unwrap_nightly());
++ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+@@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+@@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+@@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+@@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+@@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+@@ -300,17 +255,29 @@ impl From<fallback::LexError> for LexError {
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
++ }
++ }
++}
++
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ #[cfg(lexerror_display)]
++ LexError::Compiler(e) => Display::fmt(e, f),
++ #[cfg(not(lexerror_display))]
++ LexError::Compiler(_e) => Display::fmt(&fallback::LexError, f),
++ LexError::Fallback(e) => Display::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+@@ -361,7 +328,7 @@ impl Iterator for TokenTreeIter {
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+@@ -369,7 +336,7 @@ impl fmt::Debug for TokenTreeIter {
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+@@ -397,58 +364,77 @@ impl SourceFile {
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+@@ -542,16 +528,16 @@ impl From<fallback::Span> for Span {
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+@@ -561,7 +547,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+@@ -652,26 +638,26 @@ impl From<fallback::Group> for Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+@@ -747,26 +733,26 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+@@ -774,7 +760,7 @@ pub enum Literal {
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -786,7 +772,7 @@ macro_rules! suffixed_numbers {
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -830,7 +816,7 @@ impl Literal {
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+@@ -838,7 +824,7 @@ impl Literal {
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+@@ -846,7 +832,7 @@ impl Literal {
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+@@ -854,7 +840,7 @@ impl Literal {
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+@@ -862,7 +848,7 @@ impl Literal {
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+@@ -908,20 +894,20 @@ impl From<fallback::Literal> for Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+index 0000000000..708cccb880
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
+index 7af2539c1a..70e57677cd 100644
+--- a/third_party/rust/proc-macro2/tests/marker.rs
++++ b/third_party/rust/proc-macro2/tests/marker.rs
+@@ -57,3 +57,36 @@ mod semver_exempt {
+
+ assert_impl!(SourceFile is not Send or Sync);
+ }
++
++#[cfg(not(no_libprocmacro_unwind_safe))]
++mod unwind_safe {
++ use super::*;
++ use std::panic::{RefUnwindSafe, UnwindSafe};
++
++ macro_rules! assert_unwind_safe {
++ ($($types:ident)*) => {
++ $(
++ assert_impl!($types is UnwindSafe and RefUnwindSafe);
++ )*
++ };
++ }
++
++ assert_unwind_safe! {
++ Delimiter
++ Group
++ Ident
++ LexError
++ Literal
++ Punct
++ Spacing
++ Span
++ TokenStream
++ TokenTree
++ }
++
++ #[cfg(procmacro2_semver_exempt)]
++ assert_unwind_safe! {
++ LineColumn
++ SourceFile
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+index 7528388138..1e9f633944 100644
+--- a/third_party/rust/proc-macro2/tests/test.rs
++++ b/third_party/rust/proc-macro2/tests/test.rs
+@@ -1,7 +1,6 @@
++use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+@@ -84,6 +83,11 @@ fn literal_string() {
+ assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
+ }
+
++#[test]
++fn literal_raw_string() {
++ "r\"\r\n\"".parse::<TokenStream>().unwrap();
++}
++
+ #[test]
+ fn literal_character() {
+ assert_eq!(Literal::character('x').to_string(), "'x'");
+@@ -110,6 +114,37 @@ fn literal_suffix() {
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++ assert_eq!(token_count("0E"), 1);
++ assert_eq!(token_count("0o0A"), 1);
++ assert_eq!(token_count("0E--0"), 4);
++ assert_eq!(token_count("0.0ECMA"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+@@ -161,41 +196,21 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
++ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
++ fail("\"\\u{999999}\""); // outside of valid range of char
++ fail("\"\\u{_0}\""); // leading underscore
++ fail("\"\\u{}\""); // empty
++ fail("b\"\r\""); // bare carriage return in byte string
++ fail("r\"\r\""); // bare carriage return in raw string
++ fail("\"\\\r \""); // backslash carriage return
++ fail("'aa'aa");
++ fail("br##\"\"#");
++ fail("\"\\\n\u{85}\r\"");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+@@ -274,53 +289,11 @@ fn span_join() {
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+-}
+-
+-#[test]
+-fn op_before_comment() {
++fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+@@ -331,6 +304,22 @@ fn op_before_comment() {
+ }
+ }
+
++#[test]
++fn joint_last_token() {
++ // This test verifies that we match the behavior of libproc_macro *not* in
++ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
++ // behavior was temporarily broken.
++ // See https://github.com/rust-lang/rust/issues/76399
++
++ let joint_punct = Punct::new(':', Spacing::Joint);
++ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
++ let punct = match stream.into_iter().next().unwrap() {
++ TokenTree::Punct(punct) => punct,
++ _ => unreachable!(),
++ };
++ assert_eq!(punct.spacing(), Spacing::Joint);
++}
++
+ #[test]
+ fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+@@ -345,11 +334,11 @@ fn raw_identifier() {
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+@@ -358,7 +347,7 @@ fn test_debug_ident() {
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -368,7 +357,7 @@ TokenStream [
+ sym: a,
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+@@ -379,7 +368,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -389,7 +378,7 @@ TokenStream [
+ sym: a
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone
+ },
+ Literal {
+@@ -400,7 +389,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -411,7 +400,7 @@ TokenStream [
+ span: bytes(2..3),
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+@@ -425,7 +414,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -436,7 +425,7 @@ TokenStream [
+ span: bytes(2..3)
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+@@ -464,3 +453,80 @@ fn default_tokenstream_is_empty() {
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+index 0000000000..99a0aee5c8
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/spirv-cross-internal/.cargo-checksum.json b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+index 3c732d6d0e..014aa640e1 100644
+--- a/third_party/rust/spirv-cross-internal/.cargo-checksum.json
++++ b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.gitignore":"7f23cc92ddb5e1f584447e98d3e8ab6543fc182f1543f0f6ec29856f9250cdd6","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+index 77939d8fc6..704f2ed200 100644
+--- a/third_party/rust/syn/.cargo-checksum.json
++++ b/third_party/rust/syn/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+index 7a5c962f06..20277fc461 100644
+--- a/third_party/rust/syn/Cargo.toml
++++ b/third_party/rust/syn/Cargo.toml
+@@ -13,7 +13,7 @@
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+@@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+@@ -52,18 +48,34 @@ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+@@ -80,7 +92,6 @@ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+index 29a7f32a46..12b5f45b3d 100644
+--- a/third_party/rust/syn/README.md
++++ b/third_party/rust/syn/README.md
+@@ -1,10 +1,10 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+@@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+@@ -88,8 +84,6 @@ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+@@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+index 08ecd90960..58ab8df297 100644
+--- a/third_party/rust/syn/benches/file.rs
++++ b/third_party/rust/syn/benches/file.rs
+@@ -1,9 +1,16 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+index e3d9cd29ba..50e1a7f601 100644
+--- a/third_party/rust/syn/benches/rust.rs
++++ b/third_party/rust/syn/benches/rust.rs
+@@ -4,7 +4,14 @@
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+@@ -28,31 +35,35 @@ mod syn_parse {
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+@@ -104,11 +115,11 @@ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+@@ -128,12 +139,12 @@ fn main() {
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+index c0f9ed3406..cf7681c3f9 100644
+--- a/third_party/rust/syn/build.rs
++++ b/third_party/rust/syn/build.rs
+@@ -1,6 +1,6 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+@@ -26,38 +26,14 @@ struct Compiler {
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+index 34009deabc..fa4f1cb2a3 100644
+--- a/third_party/rust/syn/src/attr.rs
++++ b/third_party/rust/syn/src/attr.rs
+@@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -111,7 +107,46 @@ ast_struct! {
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+@@ -120,39 +155,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+@@ -199,7 +206,7 @@ impl Attribute {
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+@@ -208,7 +215,7 @@ impl Attribute {
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+@@ -221,7 +228,7 @@ impl Attribute {
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+@@ -234,7 +241,7 @@ impl Attribute {
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+@@ -247,7 +254,7 @@ impl Attribute {
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+@@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+@@ -298,7 +309,7 @@ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+@@ -312,7 +323,6 @@ ast_enum! {
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+@@ -322,7 +332,7 @@ ast_enum! {
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+@@ -360,7 +370,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+@@ -372,7 +382,7 @@ ast_struct! {
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+@@ -398,7 +408,7 @@ impl Meta {
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+@@ -429,8 +439,8 @@ ast_enum_of_structs! {
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -464,7 +474,7 @@ where
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+@@ -474,7 +484,7 @@ where
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+index 551a5ac816..a461cc49ea 100644
+--- a/third_party/rust/syn/src/buffer.rs
++++ b/third_party/rust/syn/src/buffer.rs
+@@ -1,7 +1,7 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+@@ -36,7 +36,7 @@ enum Entry {
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+@@ -98,7 +98,7 @@ impl TokenBuffer {
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -133,8 +133,7 @@ impl TokenBuffer {
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+@@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+@@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+@@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+index 200e8478ef..a33044a564 100644
+--- a/third_party/rust/syn/src/custom_keyword.rs
++++ b/third_party/rust/syn/src/custom_keyword.rs
+@@ -86,7 +86,7 @@
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+@@ -95,7 +95,7 @@ macro_rules! custom_keyword {
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+@@ -112,10 +112,10 @@ macro_rules! custom_keyword {
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+index 29fa448bd8..70dff42851 100644
+--- a/third_party/rust/syn/src/custom_punctuation.rs
++++ b/third_party/rust/syn/src/custom_punctuation.rs
+@@ -74,19 +74,19 @@
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+@@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+@@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+@@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+@@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+@@ -297,13 +297,3 @@ macro_rules! stringify_punct {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+index be43679874..b217b8ca6f 100644
+--- a/third_party/rust/syn/src/data.rs
++++ b/third_party/rust/syn/src/data.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+@@ -24,7 +24,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -52,7 +52,7 @@ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+@@ -63,7 +63,7 @@ ast_struct! {
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+@@ -93,6 +93,24 @@ impl Fields {
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+@@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+@@ -154,7 +172,7 @@ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -184,7 +202,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+@@ -194,7 +212,7 @@ ast_struct! {
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+@@ -205,7 +223,7 @@ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+@@ -220,12 +238,15 @@ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+@@ -295,6 +316,17 @@ pub mod parsing {
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+@@ -310,27 +342,39 @@ pub mod parsing {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
+ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+@@ -347,6 +391,14 @@ pub mod parsing {
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+index 8cb9cf7b6d..3fa9d89a93 100644
+--- a/third_party/rust/syn/src/derive.rs
++++ b/third_party/rust/syn/src/derive.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+@@ -26,7 +26,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -53,7 +53,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+@@ -65,7 +65,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+@@ -77,7 +77,7 @@ ast_struct! {
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+index 4d9ff93728..76c9fce6f8 100644
+--- a/third_party/rust/syn/src/discouraged.rs
++++ b/third_party/rust/syn/src/discouraged.rs
+@@ -16,7 +16,7 @@ pub trait Speculative {
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+@@ -72,7 +72,6 @@ pub trait Speculative {
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+@@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+index 146d652299..dba34f9254 100644
+--- a/third_party/rust/syn/src/error.rs
++++ b/third_party/rust/syn/src/error.rs
+@@ -1,4 +1,3 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+@@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+@@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+@@ -278,6 +287,14 @@ impl Display for Error {
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+@@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+index 2874a463aa..2fe0e0b5d8 100644
+--- a/third_party/rust/syn/src/expr.rs
++++ b/third_party/rust/syn/src/expr.rs
+@@ -1,18 +1,21 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+@@ -83,7 +86,7 @@ ast_enum_of_structs! {
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+@@ -228,7 +231,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -239,7 +242,7 @@ ast_struct! {
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -251,7 +254,7 @@ ast_struct! {
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -263,7 +266,7 @@ ast_struct! {
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+@@ -275,7 +278,7 @@ ast_struct! {
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -287,7 +290,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+@@ -300,7 +303,7 @@ ast_struct! {
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -311,7 +314,7 @@ ast_struct! {
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -323,7 +326,7 @@ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+@@ -335,7 +338,7 @@ ast_struct! {
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+@@ -348,7 +351,7 @@ ast_struct! {
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+@@ -361,7 +364,7 @@ ast_struct! {
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+@@ -378,7 +381,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+@@ -390,7 +393,7 @@ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -402,7 +405,7 @@ ast_struct! {
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -421,7 +424,7 @@ ast_struct! {
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+@@ -436,7 +439,7 @@ ast_struct! {
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+@@ -449,7 +452,7 @@ ast_struct! {
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+@@ -462,7 +465,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -475,7 +478,7 @@ ast_struct! {
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+@@ -486,7 +489,7 @@ ast_struct! {
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -498,7 +501,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -508,7 +511,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+@@ -521,7 +524,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+@@ -536,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -550,7 +553,7 @@ ast_struct! {
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+@@ -562,7 +565,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+@@ -574,7 +577,7 @@ ast_struct! {
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -587,7 +590,7 @@ ast_struct! {
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -600,7 +603,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+@@ -614,7 +617,7 @@ ast_struct! {
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -628,7 +631,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -639,7 +642,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+@@ -650,7 +653,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -661,7 +664,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -673,7 +676,7 @@ ast_struct! {
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+@@ -685,7 +688,7 @@ ast_struct! {
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+@@ -696,7 +699,7 @@ ast_struct! {
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -709,7 +712,7 @@ ast_struct! {
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+@@ -717,232 +720,6 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+@@ -996,7 +773,7 @@ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+@@ -1006,12 +783,50 @@ ast_enum! {
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+@@ -1027,28 +842,28 @@ impl From<usize> for Index {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+@@ -1057,7 +872,7 @@ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+@@ -1070,7 +885,7 @@ ast_struct! {
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+@@ -1086,7 +901,7 @@ ast_enum! {
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+@@ -1107,7 +922,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+@@ -1134,7 +949,7 @@ ast_struct! {
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+@@ -1149,8 +964,7 @@ ast_struct! {
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+@@ -1162,7 +976,7 @@ ast_enum! {
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+@@ -1183,16 +997,17 @@ pub(crate) mod parsing {
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+@@ -1246,9 +1061,121 @@ pub(crate) mod parsing {
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+@@ -1430,56 +1357,84 @@ pub(crate) mod parsing {
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+@@ -1495,13 +1450,11 @@ pub(crate) mod parsing {
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+@@ -1523,18 +1476,26 @@ pub(crate) mod parsing {
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+@@ -1620,10 +1581,17 @@ pub(crate) mod parsing {
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+@@ -1646,7 +1614,11 @@ pub(crate) mod parsing {
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+@@ -1668,7 +1640,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+@@ -1740,7 +1711,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+@@ -1878,7 +1848,7 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+@@ -1905,7 +1875,7 @@ pub(crate) mod parsing {
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+@@ -1951,7 +1921,16 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+@@ -1960,44 +1939,20 @@ pub(crate) mod parsing {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+@@ -2033,29 +1988,14 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2063,7 +2003,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+@@ -2077,6 +2017,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+@@ -2086,7 +2027,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+@@ -2097,8 +2038,9 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2110,7 +2052,7 @@ pub(crate) mod parsing {
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+@@ -2305,9 +2247,10 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2315,7 +2258,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+@@ -2399,6 +2342,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+@@ -2416,7 +2360,7 @@ pub(crate) mod parsing {
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+@@ -2433,46 +2377,36 @@ pub(crate) mod parsing {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+@@ -2577,27 +2511,7 @@ pub(crate) mod parsing {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+@@ -2641,6 +2555,26 @@ pub(crate) mod parsing {
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+index d09577a27a..4f9bc145d9 100644
+--- a/third_party/rust/syn/src/ext.rs
++++ b/third_party/rust/syn/src/ext.rs
+@@ -1,6 +1,6 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+@@ -16,7 +16,7 @@ use crate::token::CustomToken;
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+@@ -129,7 +129,13 @@ mod private {
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+index 88c02fe832..c8fab63cd9 100644
+--- a/third_party/rust/syn/src/file.rs
++++ b/third_party/rust/syn/src/file.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+@@ -37,6 +37,8 @@ ast_struct! {
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+index 0000000000..bea3887013
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+index 0000000000..72baab05f4
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+index 0000000000..15b2bcbbde
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+index f51218b78c..d9dd32a420 100644
+--- a/third_party/rust/syn/src/gen/fold.rs
++++ b/third_party/rust/syn/src/gen/fold.rs
+@@ -2,6 +2,7 @@
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -26,7 +27,7 @@ macro_rules! full {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+@@ -433,35 +434,27 @@ pub trait Fold {
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+@@ -799,10 +792,10 @@ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -819,9 +812,9 @@ where
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -842,7 +835,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+@@ -859,7 +852,7 @@ where
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+@@ -871,59 +864,47 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -934,7 +915,7 @@ where
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -955,9 +936,9 @@ where
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -969,9 +950,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+@@ -982,7 +963,7 @@ where
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -1016,7 +997,7 @@ where
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+@@ -1112,7 +1093,7 @@ where
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+@@ -1148,7 +1129,7 @@ where
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+@@ -1232,9 +1213,9 @@ where
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+@@ -1258,7 +1239,7 @@ where
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+@@ -1327,7 +1308,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+@@ -1384,7 +1365,7 @@ where
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+@@ -1432,7 +1413,7 @@ where
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+@@ -1447,7 +1428,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+@@ -1484,7 +1465,7 @@ where
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1517,7 +1498,7 @@ where
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -1576,7 +1557,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -1588,7 +1569,7 @@ where
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+@@ -1600,7 +1581,7 @@ where
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+@@ -1681,7 +1662,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1692,7 +1673,7 @@ where
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1706,9 +1687,9 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1721,7 +1702,7 @@ where
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1779,9 +1760,9 @@ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+@@ -1819,11 +1800,11 @@ where
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1834,7 +1815,7 @@ where
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1862,9 +1843,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1913,11 +1894,11 @@ where
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1952,7 +1933,7 @@ where
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2011,7 +1992,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2043,7 +2024,7 @@ where
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2057,11 +2038,11 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2076,7 +2057,7 @@ where
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2092,7 +2073,7 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+@@ -2109,9 +2090,9 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2125,9 +2106,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2153,9 +2134,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2165,7 +2146,7 @@ where
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+@@ -2185,11 +2166,10 @@ where
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+@@ -2205,7 +2185,6 @@ where
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+@@ -2215,7 +2194,6 @@ where
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+@@ -2225,7 +2203,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+@@ -2235,7 +2212,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+@@ -2245,7 +2221,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+@@ -2255,7 +2230,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+@@ -2265,7 +2239,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+@@ -2286,11 +2259,11 @@ where
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2361,7 +2334,7 @@ where
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+@@ -2371,10 +2344,10 @@ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2449,7 +2422,7 @@ where
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+@@ -2482,7 +2455,7 @@ where
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+@@ -2516,7 +2489,7 @@ where
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+@@ -2585,7 +2558,7 @@ where
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -2605,7 +2578,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+@@ -2641,7 +2614,7 @@ where
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+@@ -2652,7 +2625,7 @@ where
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+@@ -2664,7 +2637,7 @@ where
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -2674,11 +2647,11 @@ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2691,7 +2664,7 @@ where
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2704,7 +2677,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+@@ -2720,7 +2693,7 @@ where
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+@@ -2761,7 +2734,7 @@ where
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+@@ -2785,7 +2758,7 @@ where
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2812,15 +2785,15 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2831,7 +2804,7 @@ where
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2843,7 +2816,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2856,15 +2829,15 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2899,7 +2872,7 @@ where
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+@@ -2974,9 +2947,9 @@ where
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+@@ -3018,7 +2991,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3030,7 +3003,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3072,9 +3045,9 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3083,7 +3056,7 @@ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3112,7 +3085,7 @@ where
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+@@ -3147,7 +3120,7 @@ where
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -3161,7 +3134,7 @@ where
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+index 0000000000..9e9e84a7af
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+index b667f530c3..24d34b7480 100644
+--- a/third_party/rust/syn/src/gen/visit.rs
++++ b/third_party/rust/syn/src/gen/visit.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -30,7 +29,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+@@ -434,35 +433,27 @@ pub trait Visit<'ast> {
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+@@ -2537,7 +2528,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2569,7 +2559,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2577,37 +2566,31 @@ where
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+index 5cddb827c6..5ce11f0b2e 100644
+--- a/third_party/rust/syn/src/gen/visit_mut.rs
++++ b/third_party/rust/syn/src/gen/visit_mut.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -31,7 +30,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+@@ -438,35 +437,27 @@ pub trait VisitMut {
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+@@ -2543,7 +2534,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+@@ -2575,7 +2565,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+@@ -2583,37 +2572,31 @@ where
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+index 95ab2e404a..05e8ef5cdf 100644
+--- a/third_party/rust/syn/src/generics.rs
++++ b/third_party/rust/syn/src/generics.rs
+@@ -1,13 +1,16 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+@@ -20,7 +23,7 @@ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -28,9 +31,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+@@ -46,7 +46,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+@@ -61,7 +61,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+@@ -74,7 +74,7 @@ ast_struct! {
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+@@ -87,6 +87,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+@@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+@@ -314,9 +319,8 @@ impl Generics {
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+@@ -324,11 +328,10 @@ impl Generics {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+@@ -339,11 +342,57 @@ impl Generics {
+ }
+ }
+
++#[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
+ #[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+@@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+@@ -364,6 +412,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+@@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+@@ -402,7 +461,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+@@ -418,9 +477,8 @@ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+@@ -431,7 +489,7 @@ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+@@ -442,7 +500,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -450,9 +508,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+@@ -468,7 +523,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+@@ -484,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+@@ -496,7 +551,7 @@ ast_struct! {
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+@@ -521,7 +576,6 @@ pub mod parsing {
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+@@ -534,7 +588,7 @@ pub mod parsing {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+@@ -542,7 +596,6 @@ pub mod parsing {
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+@@ -665,57 +718,53 @@ pub mod parsing {
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
+- }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
+ }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
+ }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+@@ -898,6 +947,8 @@ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+@@ -1080,9 +1131,25 @@ mod printing {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+@@ -1117,9 +1184,9 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+index ff4485ace9..0d8f7d3ddc 100644
+--- a/third_party/rust/syn/src/item.rs
++++ b/third_party/rust/syn/src/item.rs
+@@ -1,17 +1,15 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -21,7 +19,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+@@ -83,7 +81,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -100,7 +98,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -115,7 +113,7 @@ ast_struct! {
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -131,7 +129,7 @@ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -143,7 +141,7 @@ ast_struct! {
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+@@ -156,7 +154,7 @@ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+@@ -175,7 +173,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+@@ -188,8 +186,8 @@ ast_struct! {
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+@@ -201,7 +199,7 @@ ast_struct! {
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -215,7 +213,7 @@ ast_struct! {
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -233,7 +231,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -248,7 +246,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -267,7 +265,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -283,7 +281,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -299,7 +297,7 @@ ast_struct! {
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -313,7 +311,7 @@ ast_struct! {
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -324,145 +322,32 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+@@ -496,10 +381,57 @@ impl From<DeriveInput> for Item {
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -530,7 +462,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+@@ -541,7 +473,7 @@ ast_struct! {
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+@@ -550,7 +482,7 @@ ast_struct! {
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+@@ -561,7 +493,7 @@ ast_struct! {
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+@@ -570,7 +502,7 @@ ast_struct! {
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+@@ -580,7 +512,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -590,7 +522,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+@@ -614,7 +546,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -626,7 +558,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -642,7 +574,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -655,7 +587,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -663,61 +595,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -727,7 +608,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+@@ -751,7 +632,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+@@ -766,7 +647,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+@@ -778,7 +659,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+@@ -794,7 +675,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -802,61 +683,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -866,7 +696,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+@@ -890,7 +720,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -908,7 +738,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -921,7 +751,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -938,7 +768,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -946,62 +776,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+@@ -1017,13 +796,34 @@ ast_struct! {
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+@@ -1035,7 +835,10 @@ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+@@ -1056,7 +859,8 @@ pub mod parsing {
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+@@ -1064,18 +868,26 @@ pub mod parsing {
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+@@ -1083,8 +895,6 @@ pub mod parsing {
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1094,18 +904,61 @@ pub mod parsing {
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1117,21 +970,19 @@ pub mod parsing {
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+@@ -1147,14 +998,18 @@ pub mod parsing {
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1163,32 +1018,64 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+@@ -1310,7 +1197,6 @@ pub mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+@@ -1392,69 +1278,126 @@ pub mod parsing {
+ }
+ }
+
+- impl Parse for ItemFn {
+- fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
+ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ Some(variadic)
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
++ impl Parse for ItemFn {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
++
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
++ }
++
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1491,26 +1434,79 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+@@ -1581,22 +1577,60 @@ pub mod parsing {
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1605,17 +1639,16 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+@@ -1625,55 +1658,12 @@ pub mod parsing {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+@@ -1706,6 +1696,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1742,6 +1763,36 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+@@ -1887,7 +1938,7 @@ pub mod parsing {
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+@@ -1896,7 +1947,7 @@ pub mod parsing {
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+@@ -1909,7 +1960,7 @@ pub mod parsing {
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+@@ -1937,13 +1988,14 @@ pub mod parsing {
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+@@ -2014,14 +2066,19 @@ pub mod parsing {
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+@@ -2032,18 +2089,11 @@ pub mod parsing {
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+@@ -2052,18 +2102,20 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+@@ -2073,7 +2125,14 @@ pub mod parsing {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+@@ -2093,20 +2152,7 @@ pub mod parsing {
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+@@ -2124,22 +2170,7 @@ pub mod parsing {
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+@@ -2188,6 +2219,35 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2207,52 +2267,67 @@ pub mod parsing {
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
+-
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
+-
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
++
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
++
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
+
+- Ok(ItemImpl {
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+@@ -2265,12 +2340,13 @@ pub mod parsing {
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+@@ -2284,28 +2360,38 @@ pub mod parsing {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+@@ -2313,7 +2399,6 @@ pub mod parsing {
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -2346,7 +2431,14 @@ pub mod parsing {
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+@@ -2358,50 +2450,39 @@ pub mod parsing {
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let sig = parse_signature(input)?;
++
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+@@ -2426,6 +2507,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2471,6 +2583,7 @@ mod printing {
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+@@ -2835,6 +2948,14 @@ mod printing {
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+@@ -2905,6 +3026,33 @@ mod printing {
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+@@ -2915,11 +3063,24 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
++ }
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
+ }
+- self.variadic.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+index e69de29bb2..0000000000
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+index c8ada7e638..3da506731e 100644
+--- a/third_party/rust/syn/src/lib.rs
++++ b/third_party/rust/syn/src/lib.rs
+@@ -1,3 +1,11 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+@@ -62,8 +70,8 @@
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+@@ -242,35 +250,48 @@
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+@@ -284,7 +305,6 @@ extern crate unicode_xid;
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+@@ -307,7 +327,6 @@ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -364,9 +383,7 @@ pub use crate::file::File;
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+@@ -441,6 +458,9 @@ pub mod parse_macro_input;
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+@@ -482,7 +502,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -603,7 +623,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -702,7 +722,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -744,6 +764,22 @@ mod gen {
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+@@ -757,6 +793,8 @@ pub mod export;
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+@@ -764,13 +802,15 @@ mod lookahead;
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+@@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+@@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+@@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+@@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+@@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+@@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+@@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+index d51c48e827..959cc5f9c6 100644
+--- a/third_party/rust/syn/src/lifetime.rs
++++ b/third_party/rust/syn/src/lifetime.rs
+@@ -18,10 +18,8 @@ use crate::lookahead;
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+@@ -72,6 +70,15 @@ impl Display for Lifetime {
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+index f2209a2980..ee77e75bec 100644
+--- a/third_party/rust/syn/src/lit.rs
++++ b/third_party/rust/syn/src/lit.rs
+@@ -22,9 +22,6 @@ use crate::{Error, Result};
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+@@ -33,7 +30,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+@@ -64,61 +61,44 @@ ast_enum_of_structs! {
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -129,15 +109,11 @@ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -146,92 +122,27 @@ struct LitFloatRepr {
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+@@ -311,7 +222,7 @@ impl LitStr {
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+@@ -345,19 +256,30 @@ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -365,19 +287,30 @@ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -385,36 +318,52 @@ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -492,18 +441,23 @@ impl Display for LitInt {
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -575,7 +529,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -584,7 +538,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -593,7 +547,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -626,15 +580,53 @@ mod debug_impls {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+@@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+@@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+@@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -699,25 +696,73 @@ pub mod parsing {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+@@ -803,19 +848,19 @@ mod printing {
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+@@ -855,20 +900,29 @@ mod value {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+@@ -905,6 +959,44 @@ mod value {
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+@@ -1004,19 +1096,18 @@ mod value {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+@@ -1028,25 +1119,25 @@ mod value {
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1057,10 +1148,10 @@ mod value {
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+@@ -1069,42 +1160,45 @@ mod value {
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1118,16 +1212,18 @@ mod value {
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+@@ -1163,8 +1259,9 @@ mod value {
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+@@ -1334,7 +1431,11 @@ mod value {
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+@@ -1372,11 +1473,33 @@ mod value {
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+index 6c3dcae92a..de288a34e1 100644
+--- a/third_party/rust/syn/src/mac.rs
++++ b/third_party/rust/syn/src/mac.rs
+@@ -2,21 +2,17 @@ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+@@ -27,7 +23,7 @@ ast_struct! {
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+@@ -36,39 +32,20 @@ ast_enum! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+@@ -163,9 +140,7 @@ impl Macro {
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+index 9cac5c15df..8060224381 100644
+--- a/third_party/rust/syn/src/macros.rs
++++ b/third_party/rust/syn/src/macros.rs
+@@ -4,15 +4,11 @@ macro_rules! ast_struct {
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+@@ -23,29 +19,10 @@ macro_rules! ast_struct {
+ }
+ };
+
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+@@ -63,21 +40,10 @@ macro_rules! ast_enum {
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+- (
+- [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+ (
+ [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+@@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+@@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+index 49fb853c79..d254673b40 100644
+--- a/third_party/rust/syn/src/op.rs
++++ b/third_party/rust/syn/src/op.rs
+@@ -1,9 +1,8 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+@@ -67,9 +66,8 @@ ast_enum! {
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+index 7c7b194308..abb4c4c14f 100644
+--- a/third_party/rust/syn/src/parse.rs
++++ b/third_party/rust/syn/src/parse.rs
+@@ -26,8 +26,8 @@
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+@@ -109,9 +109,7 @@
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+@@ -155,8 +153,8 @@
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+@@ -186,7 +184,7 @@
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+@@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+@@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+@@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+@@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+@@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
++ }
++}
++
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+@@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+@@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+@@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+@@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+@@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+@@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+@@ -1048,7 +1115,7 @@ impl Parse for Literal {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+@@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+@@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+@@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+@@ -1118,38 +1187,42 @@ where
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+@@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+index d6e0725c17..c8fc1cea37 100644
+--- a/third_party/rust/syn/src/parse_macro_input.rs
++++ b/third_party/rust/syn/src/parse_macro_input.rs
+@@ -16,8 +16,8 @@
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+@@ -43,7 +43,31 @@
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+@@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+index 18a47b95c7..66aa818cd0 100644
+--- a/third_party/rust/syn/src/parse_quote.rs
++++ b/third_party/rust/syn/src/parse_quote.rs
+@@ -24,7 +24,7 @@
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+@@ -56,8 +56,10 @@
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+@@ -67,7 +69,7 @@
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+@@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+@@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+index 9371e05493..e9576a2361 100644
+--- a/third_party/rust/syn/src/pat.rs
++++ b/third_party/rust/syn/src/pat.rs
+@@ -1,16 +1,12 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+@@ -86,7 +82,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -97,7 +93,10 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+@@ -113,7 +112,7 @@ ast_struct! {
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -123,7 +122,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -133,7 +132,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+@@ -150,7 +149,7 @@ ast_struct! {
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+@@ -161,7 +160,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+@@ -173,7 +172,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -185,7 +184,7 @@ ast_struct! {
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+@@ -195,7 +194,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -206,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -219,7 +218,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -230,7 +229,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -241,7 +240,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+@@ -253,7 +252,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+@@ -266,7 +265,7 @@ ast_struct! {
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+@@ -275,122 +274,17 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+@@ -411,7 +305,6 @@ mod parsing {
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+@@ -434,7 +327,7 @@ mod parsing {
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -442,10 +335,11 @@ mod parsing {
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+@@ -487,7 +381,7 @@ mod parsing {
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+@@ -546,7 +440,7 @@ mod parsing {
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+@@ -578,6 +472,7 @@ mod parsing {
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+@@ -587,10 +482,10 @@ mod parsing {
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+@@ -610,30 +505,57 @@ mod parsing {
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
+- pat: Box::new(pat),
+ box_token: boxed,
++ pat: Box::new(pat),
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
+- pat: Box::new(pat),
+- attrs: Vec::new(),
+ colon_token: None,
++ pat: Box::new(pat),
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+@@ -642,7 +564,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -668,14 +590,21 @@ mod parsing {
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+@@ -684,7 +613,17 @@ mod parsing {
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+@@ -696,7 +635,6 @@ mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+@@ -704,7 +642,7 @@ mod parsing {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+@@ -712,7 +650,7 @@ mod parsing {
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+@@ -721,7 +659,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -737,11 +675,35 @@ mod parsing {
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+@@ -756,12 +718,14 @@ mod printing {
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -774,6 +738,7 @@ mod printing {
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+@@ -788,6 +753,7 @@ mod printing {
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -804,12 +770,14 @@ mod printing {
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -818,6 +786,7 @@ mod printing {
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -825,6 +794,7 @@ mod printing {
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+@@ -833,18 +803,21 @@ mod printing {
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+@@ -856,6 +829,7 @@ mod printing {
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -864,12 +838,14 @@ mod printing {
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+@@ -877,6 +853,7 @@ mod printing {
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+index 8dda43ee67..15c0fcc664 100644
+--- a/third_party/rust/syn/src/path.rs
++++ b/third_party/rust/syn/src/path.rs
+@@ -2,9 +2,9 @@ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+@@ -29,7 +29,7 @@ where
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+@@ -52,7 +52,7 @@ where
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+@@ -98,7 +98,7 @@ impl PathArguments {
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+@@ -122,7 +122,7 @@ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+@@ -135,7 +135,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+@@ -147,7 +147,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+@@ -160,7 +160,7 @@ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+@@ -189,7 +189,7 @@ ast_struct! {
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+@@ -291,11 +291,7 @@ pub mod parsing {
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+@@ -358,7 +354,7 @@ pub mod parsing {
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -400,7 +396,6 @@ pub mod parsing {
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+@@ -433,7 +428,7 @@ pub mod parsing {
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -472,7 +467,7 @@ pub mod parsing {
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+index 38c7bf4e82..46c82a65b1 100644
+--- a/third_party/rust/syn/src/punctuated.rs
++++ b/third_party/rust/syn/src/punctuated.rs
+@@ -22,6 +22,8 @@
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+@@ -41,8 +43,6 @@ use crate::token::Token;
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+@@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+@@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+@@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+@@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+@@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+@@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+@@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+@@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+@@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+@@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+index 0000000000..ccfb8b5ad0
+--- /dev/null
++++ b/third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+index 71ffe26b81..01591cedcb 100644
+--- a/third_party/rust/syn/src/spanned.rs
++++ b/third_party/rust/syn/src/spanned.rs
+@@ -1,7 +1,7 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+@@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+index e4277fdbaa..b06e843d75 100644
+--- a/third_party/rust/syn/src/stmt.rs
++++ b/third_party/rust/syn/src/stmt.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+@@ -14,7 +14,7 @@ ast_struct! {
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+@@ -33,7 +33,7 @@ ast_enum! {
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -47,14 +47,15 @@ ast_struct! {
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -106,8 +107,8 @@ pub mod parsing {
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+@@ -146,55 +147,55 @@ pub mod parsing {
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+@@ -213,33 +214,12 @@ pub mod parsing {
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+@@ -265,12 +245,19 @@ pub mod parsing {
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+index 0b8c18192f..8539378c5e 100644
+--- a/third_party/rust/syn/src/token.rs
++++ b/third_party/rust/syn/src/token.rs
+@@ -88,7 +88,6 @@
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+@@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+@@ -112,10 +111,8 @@ use self::private::WithSpan;
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+@@ -155,21 +152,20 @@ mod private {
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+@@ -189,24 +185,38 @@ macro_rules! impl_token {
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+@@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+@@ -260,6 +269,16 @@ macro_rules! define_keywords {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+@@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -436,7 +464,6 @@ macro_rules! define_punctuation {
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+@@ -458,6 +485,16 @@ macro_rules! define_delimiters {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -855,7 +892,7 @@ pub mod parsing {
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+index f860eebb4f..8dba0627cd 100644
+--- a/third_party/rust/syn/src/tt.rs
++++ b/third_party/rust/syn/src/tt.rs
+@@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+@@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+index 4ee59bda2a..fd7c97eab7 100644
+--- a/third_party/rust/syn/src/ty.rs
++++ b/third_party/rust/syn/src/ty.rs
+@@ -1,15 +1,11 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+@@ -77,7 +73,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+@@ -90,7 +86,7 @@ ast_struct! {
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+@@ -107,7 +103,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+@@ -119,7 +115,7 @@ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+@@ -130,7 +126,7 @@ ast_struct! {
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+@@ -140,7 +136,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+@@ -150,7 +146,7 @@ ast_struct! {
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+@@ -160,7 +156,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+@@ -172,7 +168,7 @@ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+@@ -183,7 +179,7 @@ ast_struct! {
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+@@ -196,7 +192,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+@@ -209,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+@@ -221,7 +217,7 @@ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+@@ -232,7 +228,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+@@ -240,111 +236,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+@@ -355,7 +250,7 @@ ast_struct! {
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+@@ -377,7 +272,7 @@ ast_struct! {
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+@@ -388,7 +283,7 @@ ast_struct! {
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+@@ -407,10 +302,13 @@ pub mod parsing {
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+@@ -421,15 +319,17 @@ pub mod parsing {
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+@@ -524,7 +424,7 @@ pub mod parsing {
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+@@ -549,17 +449,20 @@ pub mod parsing {
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+@@ -722,38 +625,58 @@ pub mod parsing {
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
+- }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
+ dots: args.parse()?,
+- })
++ });
++ break;
++ }
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++ if args.is_empty() {
++ break;
++ }
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+@@ -776,9 +699,27 @@ pub mod parsing {
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+@@ -807,9 +748,11 @@ pub mod parsing {
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+@@ -844,10 +787,12 @@ pub mod parsing {
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+@@ -910,7 +855,8 @@ pub mod parsing {
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+@@ -926,22 +872,72 @@ pub mod parsing {
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+index 0000000000..0686352f7a
+--- /dev/null
++++ b/third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+index 0000000000..7be082e1a2
+--- /dev/null
++++ b/third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+index 8e0863cba6..0000000000
+--- a/third_party/rust/syn/tests/clone.sh
++++ /dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+index 13a6c36ae5..7589a07573 100644
+--- a/third_party/rust/syn/tests/common/eq.rs
++++ b/third_party/rust/syn/tests/common/eq.rs
+@@ -1,36 +1,35 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+@@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+@@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+@@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+@@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+@@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+@@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
+- }
+-}
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
+ }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+ }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+ }
+- tokens
+ }
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+index 8b784beed7..a1cc80a16f 100644
+--- a/third_party/rust/syn/tests/common/mod.rs
++++ b/third_party/rust/syn/tests/common/mod.rs
+@@ -1,5 +1,6 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+@@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+index 41d192f6fb..192828fedd 100644
+--- a/third_party/rust/syn/tests/common/parse.rs
++++ b/third_party/rust/syn/tests/common/parse.rs
+@@ -1,20 +1,20 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+@@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+index 8450c09ecf..85a1a39079 100644
+--- a/third_party/rust/syn/tests/debug/gen.rs
++++ b/third_party/rust/syn/tests/debug/gen.rs
+@@ -2,7 +2,7 @@
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+@@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+@@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+@@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+index c1180532ec..cefebacef7 100644
+--- a/third_party/rust/syn/tests/debug/mod.rs
++++ b/third_party/rust/syn/tests/debug/mod.rs
+@@ -1,10 +1,7 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+@@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+index 10ac88965d..0000000000
+--- a/third_party/rust/syn/tests/features/error.rs
++++ /dev/null
+@@ -1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+index 83fbe13e7e..0000000000
+--- a/third_party/rust/syn/tests/features/mod.rs
++++ /dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+index c72fd01058..3994615fc4 100644
+--- a/third_party/rust/syn/tests/macros/mod.rs
++++ b/third_party/rust/syn/tests/macros/mod.rs
+@@ -1,5 +1,3 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+@@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+index c22cb03758..1d3e1f0e74 100644
+--- a/third_party/rust/syn/tests/repo/mod.rs
++++ b/third_party/rust/syn/tests/repo/mod.rs
+@@ -1,8 +1,37 @@
+-extern crate walkdir;
++mod progress;
+
+-use std::process::Command;
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
+
+-use self::walkdir::DirEntry;
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
++
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
++
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+@@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
++}
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
+ }
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+index 0000000000..28c8a44b12
+--- /dev/null
++++ b/third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+index f868fbcc20..0efef5976f 100644
+--- a/third_party/rust/syn/tests/test_asyncness.rs
++++ b/third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,16 +8,16 @@ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+@@ -30,12 +26,12 @@ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+index aff6294fc3..c26bd090ec 100644
+--- a/third_party/rust/syn/tests/test_attribute.rs
++++ b/third_party/rust/syn/tests/test_attribute.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -13,14 +9,14 @@ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+@@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+@@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+@@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -270,21 +266,63 @@ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+index de68240166..bf1ebdb67d 100644
+--- a/third_party/rust/syn/tests/test_derive_input.rs
++++ b/third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,15 +11,15 @@ fn test_unit() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -39,105 +34,105 @@ fn test_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -151,46 +146,46 @@ fn test_union() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+@@ -212,118 +207,118 @@ fn test_enum() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+@@ -333,27 +328,27 @@ fn test_enum() {
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+@@ -366,34 +361,34 @@ fn test_attr_with_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -407,29 +402,29 @@ fn test_attr_with_non_mod_style_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -443,48 +438,48 @@ fn test_attr_with_mod_style_path_with_self() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -496,55 +491,55 @@ fn test_pub_restricted() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -555,15 +550,15 @@ fn test_vis_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -574,24 +569,24 @@ fn test_pub_restricted_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -602,24 +597,24 @@ fn test_pub_restricted_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -630,25 +625,25 @@ fn test_pub_restricted_in_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -659,15 +654,15 @@ fn test_fields_on_unit_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -688,47 +683,47 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -737,38 +732,38 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -779,44 +774,44 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -825,34 +820,34 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -864,34 +859,34 @@ fn test_ambiguous_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+index c8a11cec2c..b2b65a254f 100644
+--- a/third_party/rust/syn/tests/test_expr.rs
++++ b/third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
+-
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ let tokens = quote!(fut.await);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+index 55c79e066b..b29434a147 100644
+--- a/third_party/rust/syn/tests/test_generics.rs
++++ b/third_party/rust/syn/tests/test_generics.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,90 +11,90 @@ fn test_split_for_impl() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+@@ -131,46 +126,46 @@ fn test_split_for_impl() {
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+@@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+@@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+index 1558a47b4b..a0fe716390 100644
+--- a/third_party/rust/syn/tests/test_grouping.rs
++++ b/third_party/rust/syn/tests/test_grouping.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -28,31 +23,31 @@ fn test_grouping() {
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+index bec00a70c9..ee01bfcc9f 100644
+--- a/third_party/rust/syn/tests/test_ident.rs
++++ b/third_party/rust/syn/tests/test_ident.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+index 0000000000..74ac4baec6
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+index 1cf7157e6f..2c8359c157 100644
+--- a/third_party/rust/syn/tests/test_iterators.rs
++++ b/third_party/rust/syn/tests/test_iterators.rs
+@@ -1,10 +1,5 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+index 1e8f49d19b..e995f2287f 100644
+--- a/third_party/rust/syn/tests/test_lit.rs
++++ b/third_party/rust/syn/tests/test_lit.rs
+@@ -1,13 +1,11 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+@@ -50,6 +48,9 @@ fn strings() {
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+@@ -79,6 +80,9 @@ fn byte_strings() {
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+@@ -100,6 +104,7 @@ fn bytes() {
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+@@ -125,6 +130,7 @@ fn chars() {
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+@@ -185,4 +191,59 @@ fn floats() {
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
++}
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
+ }
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+index 547472d6f4..d37dda948a 100644
+--- a/third_party/rust/syn/tests/test_meta.rs
++++ b/third_party/rust/syn/tests/test_meta.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+index f09495187f..57a3c7c38c 100644
+--- a/third_party/rust/syn/tests/test_parse_buffer.rs
++++ b/third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,7 +1,7 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+@@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+index 0000000000..76bd065777
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+index 1343aa646f..73388dd79d 100644
+--- a/third_party/rust/syn/tests/test_pat.rs
++++ b/third_party/rust/syn/tests/test_pat.rs
+@@ -1,10 +1,5 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+@@ -21,3 +16,23 @@ fn test_pat_path() {
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+index 0000000000..2ce12066f5
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+index 53ee66e372..a586b3fe48 100644
+--- a/third_party/rust/syn/tests/test_precedence.rs
++++ b/third_party/rust/syn/tests/test_precedence.rs
+@@ -4,35 +4,26 @@
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -73,7 +64,7 @@ fn test_simple_precedence() {
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+@@ -91,8 +82,8 @@ fn test_simple_precedence() {
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -118,15 +109,6 @@ fn test_rustc_precedence() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+@@ -134,8 +116,9 @@ fn test_rustc_precedence() {
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+@@ -169,36 +152,36 @@ fn test_rustc_precedence() {
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+@@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+@@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+@@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+@@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+@@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+index 0000000000..923df96ba9
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+index 2fc9cecd86..260dd0c3d9 100644
+--- a/third_party/rust/syn/tests/test_round_trip.rs
++++ b/third_party/rust/syn/tests/test_round_trip.rs
+@@ -2,22 +2,20 @@
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -38,8 +36,8 @@ mod repo;
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -78,11 +76,12 @@ fn test_round_trip() {
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+@@ -93,7 +92,7 @@ fn test_round_trip() {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+@@ -101,10 +100,10 @@ fn test_round_trip() {
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+@@ -130,7 +129,7 @@ fn test_round_trip() {
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+@@ -147,7 +146,7 @@ fn test_round_trip() {
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+index 0000000000..dc26b9aab3
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+index aadf42e3af..180d859916 100644
+--- a/third_party/rust/syn/tests/test_should_parse.rs
++++ b/third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+index 386d4df889..01e8401158 100644
+--- a/third_party/rust/syn/tests/test_size.rs
++++ b/third_party/rust/syn/tests/test_size.rs
+@@ -1,7 +1,5 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+index 0000000000..d68b47fd2f
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+index 70a9a72aab..5b00448af8 100644
+--- a/third_party/rust/syn/tests/test_token_trees.rs
++++ b/third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,9 +1,3 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -21,7 +15,11 @@ fn test_struct() {
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+index 0000000000..9cbdcd6b99
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+index 0000000000..c3d0ac7a5b
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+index a81b3df4d0..a1a670d9ed 100644
+--- a/third_party/rust/syn/tests/zzz_stable.rs
++++ b/third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,7 +1,5 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+@@ -10,7 +8,7 @@ const MSG: &str = "\
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+--
+2.28.0
+
diff --git a/source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild b/source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild
index 48476c96..4804e26d 100755
--- a/source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild
+++ b/source/xap/mozilla-thunderbird/mozilla-thunderbird.SlackBuild
@@ -182,6 +182,9 @@ zcat $CWD/unbreakdocs.diff.gz | patch -p1 --verbose || exit 1
# Bypass a test that fails the build:
zcat $CWD/gkrust.a.no.networking.check.diff.gz | patch -p1 --verbose || exit 1
+# Fix building with rust-1.47.0:
+zcat $CWD/rust_1.47.0.patch.gz | patch -p1 --verbose || exit 1
+
# Fetch localization, if requested:
if [ ! -z $MOZLOCALIZE ]; then
LOC_TAG="THUNDERBIRD_$( echo $VERSION | tr \. _ )_RELEASE"
diff --git a/source/xap/mozilla-thunderbird/rust_1.47.0.patch b/source/xap/mozilla-thunderbird/rust_1.47.0.patch
new file mode 100644
index 00000000..9471b7bd
--- /dev/null
+++ b/source/xap/mozilla-thunderbird/rust_1.47.0.patch
@@ -0,0 +1,30905 @@
+From 83fc2e3616ef15056be74f056a15e892038809b9 Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:10:20 +0200
+Subject: [PATCH 38/38] bmo#1663715: Update syn and proc-macro2 so that Firefox
+ can build on Rust nightly again
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ Cargo.lock | 8 +-
+ .../rust/lucet-wasi/.cargo-checksum.json | 2 +-
+ .../rust/packed_simd/.cargo-checksum.json | 2 +-
+ .../rust/proc-macro2/.cargo-checksum.json | 2 +-
+ third_party/rust/proc-macro2/Cargo.toml | 15 +-
+ third_party/rust/proc-macro2/README.md | 2 +-
+ third_party/rust/proc-macro2/build.rs | 20 +
+ third_party/rust/proc-macro2/src/detection.rs | 67 +
+ third_party/rust/proc-macro2/src/fallback.rs | 1010 ++----
+ third_party/rust/proc-macro2/src/lib.rs | 225 +-
+ third_party/rust/proc-macro2/src/marker.rs | 18 +
+ third_party/rust/proc-macro2/src/parse.rs | 849 +++++
+ third_party/rust/proc-macro2/src/strnom.rs | 391 ---
+ third_party/rust/proc-macro2/src/wrapper.rs | 258 +-
+ .../rust/proc-macro2/tests/comments.rs | 103 +
+ third_party/rust/proc-macro2/tests/marker.rs | 33 +
+ third_party/rust/proc-macro2/tests/test.rs | 240 +-
+ .../rust/proc-macro2/tests/test_fmt.rs | 26 +
+ .../spirv-cross-internal/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/Cargo.toml | 35 +-
+ third_party/rust/syn/README.md | 16 +-
+ third_party/rust/syn/benches/file.rs | 7 +
+ third_party/rust/syn/benches/rust.rs | 45 +-
+ third_party/rust/syn/build.rs | 38 +-
+ third_party/rust/syn/src/attr.rs | 126 +-
+ third_party/rust/syn/src/buffer.rs | 56 +-
+ third_party/rust/syn/src/custom_keyword.rs | 12 +-
+ .../rust/syn/src/custom_punctuation.rs | 50 +-
+ third_party/rust/syn/src/data.rs | 96 +-
+ third_party/rust/syn/src/derive.rs | 10 +-
+ third_party/rust/syn/src/discouraged.rs | 27 +-
+ third_party/rust/syn/src/error.rs | 33 +-
+ third_party/rust/syn/src/expr.rs | 826 +++--
+ third_party/rust/syn/src/ext.rs | 12 +-
+ third_party/rust/syn/src/file.rs | 4 +-
+ third_party/rust/syn/src/gen/clone.rs | 2051 ++++++++++++
+ third_party/rust/syn/src/gen/debug.rs | 2857 +++++++++++++++++
+ third_party/rust/syn/src/gen/eq.rs | 1930 +++++++++++
+ third_party/rust/syn/src/gen/fold.rs | 287 +-
+ third_party/rust/syn/src/gen/hash.rs | 2691 ++++++++++++++++
+ third_party/rust/syn/src/gen/visit.rs | 19 +-
+ third_party/rust/syn/src/gen/visit_mut.rs | 19 +-
+ third_party/rust/syn/src/generics.rs | 255 +-
+ third_party/rust/syn/src/item.rs | 1515 +++++----
+ third_party/rust/syn/src/keyword.rs | 0
+ third_party/rust/syn/src/lib.rs | 109 +-
+ third_party/rust/syn/src/lifetime.rs | 13 +-
+ third_party/rust/syn/src/lit.rs | 581 ++--
+ third_party/rust/syn/src/mac.rs | 55 +-
+ third_party/rust/syn/src/macros.rs | 61 +-
+ third_party/rust/syn/src/op.rs | 6 +-
+ third_party/rust/syn/src/parse.rs | 211 +-
+ third_party/rust/syn/src/parse_macro_input.rs | 32 +-
+ third_party/rust/syn/src/parse_quote.rs | 15 +-
+ third_party/rust/syn/src/pat.rs | 313 +-
+ third_party/rust/syn/src/path.rs | 33 +-
+ third_party/rust/syn/src/punctuated.rs | 123 +-
+ third_party/rust/syn/src/reserved.rs | 42 +
+ third_party/rust/syn/src/spanned.rs | 4 +-
+ third_party/rust/syn/src/stmt.rs | 141 +-
+ third_party/rust/syn/src/token.rs | 99 +-
+ third_party/rust/syn/src/tt.rs | 6 +-
+ third_party/rust/syn/src/ty.rs | 364 ++-
+ third_party/rust/syn/src/verbatim.rs | 15 +
+ third_party/rust/syn/src/whitespace.rs | 65 +
+ third_party/rust/syn/tests/clone.sh | 16 -
+ third_party/rust/syn/tests/common/eq.rs | 247 +-
+ third_party/rust/syn/tests/common/mod.rs | 13 +
+ third_party/rust/syn/tests/common/parse.rs | 24 +-
+ third_party/rust/syn/tests/debug/gen.rs | 50 +-
+ third_party/rust/syn/tests/debug/mod.rs | 17 +-
+ third_party/rust/syn/tests/features/error.rs | 1 -
+ third_party/rust/syn/tests/features/mod.rs | 22 -
+ third_party/rust/syn/tests/macros/mod.rs | 8 +-
+ third_party/rust/syn/tests/repo/mod.rs | 137 +-
+ third_party/rust/syn/tests/repo/progress.rs | 37 +
+ third_party/rust/syn/tests/test_asyncness.rs | 38 +-
+ third_party/rust/syn/tests/test_attribute.rs | 452 +--
+ .../rust/syn/tests/test_derive_input.rs | 1321 ++++----
+ third_party/rust/syn/tests/test_expr.rs | 314 +-
+ third_party/rust/syn/tests/test_generics.rs | 371 ++-
+ third_party/rust/syn/tests/test_grouping.rs | 53 +-
+ third_party/rust/syn/tests/test_ident.rs | 5 -
+ third_party/rust/syn/tests/test_item.rs | 45 +
+ third_party/rust/syn/tests/test_iterators.rs | 7 +-
+ third_party/rust/syn/tests/test_lit.rs | 75 +-
+ third_party/rust/syn/tests/test_meta.rs | 498 ++-
+ .../rust/syn/tests/test_parse_buffer.rs | 41 +-
+ .../rust/syn/tests/test_parse_stream.rs | 12 +
+ third_party/rust/syn/tests/test_pat.rs | 27 +-
+ third_party/rust/syn/tests/test_path.rs | 52 +
+ third_party/rust/syn/tests/test_precedence.rs | 196 +-
+ third_party/rust/syn/tests/test_receiver.rs | 127 +
+ third_party/rust/syn/tests/test_round_trip.rs | 41 +-
+ third_party/rust/syn/tests/test_shebang.rs | 59 +
+ .../rust/syn/tests/test_should_parse.rs | 4 -
+ third_party/rust/syn/tests/test_size.rs | 2 -
+ third_party/rust/syn/tests/test_stmt.rs | 44 +
+ .../rust/syn/tests/test_token_trees.rs | 12 +-
+ third_party/rust/syn/tests/test_ty.rs | 53 +
+ third_party/rust/syn/tests/test_visibility.rs | 145 +
+ third_party/rust/syn/tests/zzz_stable.rs | 4 +-
+ 103 files changed, 17319 insertions(+), 5831 deletions(-)
+ create mode 100644 third_party/rust/proc-macro2/src/detection.rs
+ create mode 100644 third_party/rust/proc-macro2/src/marker.rs
+ create mode 100644 third_party/rust/proc-macro2/src/parse.rs
+ delete mode 100644 third_party/rust/proc-macro2/src/strnom.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/comments.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/test_fmt.rs
+ create mode 100644 third_party/rust/syn/src/gen/clone.rs
+ create mode 100644 third_party/rust/syn/src/gen/debug.rs
+ create mode 100644 third_party/rust/syn/src/gen/eq.rs
+ create mode 100644 third_party/rust/syn/src/gen/hash.rs
+ delete mode 100644 third_party/rust/syn/src/keyword.rs
+ create mode 100644 third_party/rust/syn/src/reserved.rs
+ create mode 100644 third_party/rust/syn/src/verbatim.rs
+ create mode 100644 third_party/rust/syn/src/whitespace.rs
+ delete mode 100755 third_party/rust/syn/tests/clone.sh
+ delete mode 100644 third_party/rust/syn/tests/features/error.rs
+ delete mode 100644 third_party/rust/syn/tests/features/mod.rs
+ create mode 100644 third_party/rust/syn/tests/repo/progress.rs
+ create mode 100644 third_party/rust/syn/tests/test_item.rs
+ create mode 100644 third_party/rust/syn/tests/test_parse_stream.rs
+ create mode 100644 third_party/rust/syn/tests/test_path.rs
+ create mode 100644 third_party/rust/syn/tests/test_receiver.rs
+ create mode 100644 third_party/rust/syn/tests/test_shebang.rs
+ create mode 100644 third_party/rust/syn/tests/test_stmt.rs
+ create mode 100644 third_party/rust/syn/tests/test_ty.rs
+ create mode 100644 third_party/rust/syn/tests/test_visibility.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 19117e8368..d5fe0f6457 100644
+--- a/Cargo.lock
++++ b/Cargo.lock
+@@ -3717,9 +3717,9 @@ dependencies = [
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
++version = "1.0.24"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+ dependencies = [
+ "unicode-xid",
+ ]
+@@ -4647,9 +4647,9 @@ dependencies = [
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+diff --git a/third_party/rust/lucet-wasi/.cargo-checksum.json b/third_party/rust/lucet-wasi/.cargo-checksum.json
+index 229fc9978c..2c8c0a3c22 100644
+--- a/third_party/rust/lucet-wasi/.cargo-checksum.json
++++ b/third_party/rust/lucet-wasi/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/.gitignore":"44575cf5b28512d75644bf54a517dcef304ff809fd511747621b4d64f19aac66","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/packed_simd/.cargo-checksum.json b/third_party/rust/packed_simd/.cargo-checksum.json
+index 01afcc1efd..c727a10006 100644
+--- a/third_party/rust/packed_simd/.cargo-checksum.json
++++ b/third_party/rust/packed_simd/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/.gitignore":"fe82c7da551079d832cf74200b0b359b4df9828cb4a0416fa7384f07a2ae6a13","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
++{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+index eeef4120af..e7849f2896 100644
+--- a/third_party/rust/proc-macro2/.cargo-checksum.json
++++ b/third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"302d447d62c8d091d6241cf62bdad607c0d4ed8ff9f43d9b254c9d99c253ee8e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"b114e013695260f6066395c8712cea112ec2a386010397a80f15a60f8b986444","src/lib.rs":"7f528764a958587f007f0c2a330a6a414bae2c8e73d5ed9fb64ff1b42b1805b1","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"1d2253eacbd40eb3a2a933be2adcee356af922bdb48cc89ff266252a41fd98a1","src/wrapper.rs":"f52646ce1705c1f6265516f30d4c43297b5f529dd31fb91f4c806be89d5a4122","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"5f30a704eeb2b9198b57f416d622da72d25cb9bf8d8b12e6d0e90aa2cb0e43fc","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+index 95d653633d..22150c516a 100644
+--- a/third_party/rust/proc-macro2/Cargo.toml
++++ b/third_party/rust/proc-macro2/Cargo.toml
+@@ -13,21 +13,22 @@
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.24"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+@@ -39,5 +40,3 @@ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+index 19b0c3b5f8..3d05e871a7 100644
+--- a/third_party/rust/proc-macro2/README.md
++++ b/third_party/rust/proc-macro2/README.md
+@@ -1,6 +1,6 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+index deb9b92719..b247d874f6 100644
+--- a/third_party/rust/proc-macro2/build.rs
++++ b/third_party/rust/proc-macro2/build.rs
+@@ -14,6 +14,10 @@
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+@@ -57,6 +61,22 @@ fn main() {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 32 {
++ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
++ }
++
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 44 {
++ println!("cargo:rustc-cfg=lexerror_display");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+index 0000000000..c597bc99c6
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+index fe582b3b5f..8900c5ff0f 100644
+--- a/third_party/rust/proc-macro2/src/fallback.rs
++++ b/third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,27 +1,41 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+@@ -31,6 +45,72 @@ impl TokenStream {
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+@@ -59,20 +139,22 @@ impl FromStr for TokenStream {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ f.write_str("cannot parse string into token stream")
++ }
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+@@ -80,37 +162,22 @@ impl fmt::Display for TokenStream {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
+- }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+@@ -139,28 +206,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+@@ -168,31 +233,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+@@ -208,7 +272,7 @@ impl SourceFile {
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+@@ -218,7 +282,7 @@ impl fmt::Debug for SourceFile {
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+@@ -228,23 +292,11 @@ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+@@ -282,16 +334,21 @@ impl FileInfo {
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+@@ -310,23 +367,22 @@ impl SourceMap {
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+@@ -343,11 +399,11 @@ impl SourceMap {
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+@@ -361,12 +417,16 @@ impl Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+@@ -374,7 +434,6 @@ impl Span {
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+@@ -427,26 +486,59 @@ impl Span {
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+@@ -474,11 +566,11 @@ impl Group {
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+@@ -486,36 +578,45 @@ impl Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+@@ -549,16 +650,14 @@ impl Ident {
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+@@ -615,18 +714,18 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+@@ -637,17 +736,17 @@ impl fmt::Debug for Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+@@ -669,7 +768,7 @@ macro_rules! unsuffixed_numbers {
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+@@ -711,7 +810,7 @@ impl Literal {
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -719,7 +818,7 @@ impl Literal {
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -730,10 +829,10 @@ impl Literal {
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+@@ -744,10 +843,10 @@ impl Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+@@ -756,6 +855,7 @@ impl Literal {
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+@@ -784,651 +884,17 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+index a08be3e815..c20fb50d4a 100644
+--- a/third_party/rust/proc-macro2/src/lib.rs
++++ b/third_party/rust/proc-macro2/src/lib.rs
+@@ -78,27 +78,24 @@
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.24")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+-use std::cmp::Ordering;
+-use std::fmt;
+-use std::hash::{Hash, Hasher};
+-use std::iter::FromIterator;
+-use std::marker;
+-use std::ops::RangeBounds;
+-#[cfg(procmacro2_semver_exempt)]
+-use std::path::PathBuf;
+-use std::rc::Rc;
+-use std::str::FromStr;
++mod marker;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+@@ -106,6 +103,17 @@ use crate::fallback as imp;
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
++use crate::marker::Marker;
++use std::cmp::Ordering;
++use std::error::Error;
++use std::fmt::{self, Debug, Display};
++use std::hash::{Hash, Hasher};
++use std::iter::FromIterator;
++use std::ops::RangeBounds;
++#[cfg(procmacro2_semver_exempt)]
++use std::path::PathBuf;
++use std::str::FromStr;
++
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+ ///
+ /// This type provides interfaces for iterating over token trees and for
+@@ -116,27 +124,27 @@ mod imp;
+ #[derive(Clone)]
+ pub struct TokenStream {
+ inner: imp::TokenStream,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ /// Error returned from `TokenStream::from_str`.
+ pub struct LexError {
+ inner: imp::LexError,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -173,7 +181,7 @@ impl FromStr for TokenStream {
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+@@ -228,25 +236,33 @@ impl FromIterator<TokenStream> for TokenStream {
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ Debug::fmt(&self.inner, f)
++ }
++}
++
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
++impl Error for LexError {}
++
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+@@ -254,7 +270,7 @@ impl fmt::Debug for LexError {
+ #[derive(Clone, PartialEq, Eq)]
+ pub struct SourceFile {
+ inner: imp::SourceFile,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+@@ -262,7 +278,7 @@ impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -291,9 +307,9 @@ impl SourceFile {
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -311,25 +327,41 @@ pub struct LineColumn {
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Span) -> Span {
+ Span {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -342,6 +374,16 @@ impl Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+@@ -352,18 +394,12 @@ impl Span {
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+@@ -439,9 +475,9 @@ impl Span {
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -462,11 +498,11 @@ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+@@ -476,11 +512,11 @@ impl TokenTree {
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+@@ -513,32 +549,32 @@ impl From<Literal> for TokenTree {
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+@@ -651,15 +687,15 @@ impl Group {
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+@@ -669,7 +705,7 @@ impl fmt::Debug for Group {
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+ pub struct Punct {
+- op: char,
++ ch: char,
+ spacing: Spacing,
+ span: Span,
+ }
+@@ -695,9 +731,9 @@ impl Punct {
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+- pub fn new(op: char, spacing: Spacing) -> Punct {
++ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct {
+- op,
++ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+@@ -705,7 +741,7 @@ impl Punct {
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+- self.op
++ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+@@ -730,16 +766,16 @@ impl Punct {
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.ch, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+- debug.field("op", &self.op);
++ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+@@ -813,14 +849,14 @@ impl fmt::Debug for Punct {
+ #[derive(Clone)]
+ pub struct Ident {
+ inner: imp::Ident,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Ident {
+ fn _new(inner: imp::Ident) -> Ident {
+ Ident {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -920,15 +956,15 @@ impl Hash for Ident {
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -941,7 +977,7 @@ impl fmt::Debug for Ident {
+ #[derive(Clone)]
+ pub struct Literal {
+ inner: imp::Literal,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ macro_rules! suffixed_int_literals {
+@@ -988,14 +1024,14 @@ impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Literal) -> Literal {
+ Literal {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -1140,26 +1176,25 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
+- use std::marker;
+- use std::rc::Rc;
++ use crate::marker::Marker;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+@@ -1168,7 +1203,7 @@ pub mod token_stream {
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Iterator for IntoIter {
+@@ -1179,9 +1214,9 @@ pub mod token_stream {
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -1192,7 +1227,7 @@ pub mod token_stream {
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/src/marker.rs b/third_party/rust/proc-macro2/src/marker.rs
+new file mode 100644
+index 0000000000..58729baf4a
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/marker.rs
+@@ -0,0 +1,18 @@
++use std::marker::PhantomData;
++use std::panic::{RefUnwindSafe, UnwindSafe};
++use std::rc::Rc;
++
++// Zero sized marker with the correct set of autotrait impls we want all proc
++// macro types to have.
++pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
++
++pub(crate) use self::value::*;
++
++mod value {
++ pub(crate) use std::marker::PhantomData as Marker;
++}
++
++pub(crate) struct ProcMacroAutoTraits(Rc<()>);
++
++impl UnwindSafe for ProcMacroAutoTraits {}
++impl RefUnwindSafe for ProcMacroAutoTraits {}
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+index 0000000000..365fe0484d
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,849 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::char;
++use std::str::{Bytes, CharIndices, Chars};
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if is_ident_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = punct(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
++ .iter()
++ .any(|prefix| input.starts_with(prefix))
++ {
++ Err(LexError)
++ } else {
++ ident_any(input)
++ }
++}
++
++fn ident_any(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
++ let mut last = ch;
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.peek() {
++ Some((_, ch)) if ch.is_whitespace() => {
++ last = *ch;
++ chars.next();
++ }
++ _ => break,
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => match bytes.next() {
++ Some((_, b'\n')) => {}
++ _ => break,
++ },
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
++ let mut last = b as char;
++ let rest = input.advance(newline + 1);
++ let mut chars = rest.char_indices();
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.next() {
++ Some((_, ch)) if ch.is_whitespace() => last = ch,
++ Some((offset, _)) => {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ break;
++ }
++ None => return Err(LexError),
++ }
++ }
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ let mut value = 0;
++ let mut len = 0;
++ while let Some((_, ch)) = chars.next() {
++ let digit = match ch {
++ '0'..='9' => ch as u8 - b'0',
++ 'a'..='f' => 10 + ch as u8 - b'a',
++ 'A'..='F' => 10 + ch as u8 - b'A',
++ '_' if len > 0 => continue,
++ '}' if len > 0 => return char::from_u32(value).is_some(),
++ _ => return false,
++ };
++ if len == 6 {
++ return false;
++ }
++ value *= 0x10;
++ value += u32::from(digit);
++ len += 1;
++ }
++ false
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ if !(has_dot || has_exp) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let token_before_exp = if has_dot {
++ Ok(input.advance(len - 1))
++ } else {
++ Err(LexError)
++ };
++ let mut has_sign = false;
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ if has_sign {
++ return token_before_exp;
++ }
++ chars.next();
++ len += 1;
++ has_sign = true;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return token_before_exp;
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ match b {
++ b'0'..=b'9' => {
++ let digit = (b - b'0') as u64;
++ if digit >= base {
++ return Err(LexError);
++ }
++ }
++ b'a'..=b'f' => {
++ let digit = 10 + (b - b'a') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'A'..=b'F' => {
++ let digit = 10 + (b - b'A') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn punct(input: Cursor) -> PResult<Punct> {
++ match punct_char(input) {
++ Ok((rest, '\'')) => {
++ if ident_any(rest)?.0.starts_with("'") {
++ Err(LexError)
++ } else {
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ }
++ Ok((rest, ch)) => {
++ let kind = match punct_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn punct_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as a punct.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+index eb7d0b8a8e..0000000000
+--- a/third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+index 552b9381cf..3df044af17 100644
+--- a/third_party/rust/proc-macro2/src/wrapper.rs
++++ b/third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,15 +1,15 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+@@ -19,73 +19,16 @@ pub enum TokenStream {
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+@@ -103,7 +46,12 @@ impl DeferredTokenStream {
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+@@ -114,7 +62,7 @@ impl DeferredTokenStream {
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+@@ -147,9 +95,9 @@ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+@@ -157,11 +105,17 @@ impl FromStr for TokenStream {
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+@@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+@@ -196,9 +150,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+- op.set_span(tt.span().inner.unwrap_nightly());
+- op.into()
++ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
++ punct.set_span(tt.span().inner.unwrap_nightly());
++ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+@@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+@@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+@@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+@@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+@@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+@@ -300,17 +255,29 @@ impl From<fallback::LexError> for LexError {
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
++ }
++ }
++}
++
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ #[cfg(lexerror_display)]
++ LexError::Compiler(e) => Display::fmt(e, f),
++ #[cfg(not(lexerror_display))]
++ LexError::Compiler(_e) => Display::fmt(&fallback::LexError, f),
++ LexError::Fallback(e) => Display::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+@@ -361,7 +328,7 @@ impl Iterator for TokenTreeIter {
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+@@ -369,7 +336,7 @@ impl fmt::Debug for TokenTreeIter {
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+@@ -397,58 +364,77 @@ impl SourceFile {
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+@@ -542,16 +528,16 @@ impl From<fallback::Span> for Span {
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+@@ -561,7 +547,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+@@ -652,26 +638,26 @@ impl From<fallback::Group> for Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+@@ -747,26 +733,26 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+@@ -774,7 +760,7 @@ pub enum Literal {
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -786,7 +772,7 @@ macro_rules! suffixed_numbers {
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -830,7 +816,7 @@ impl Literal {
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+@@ -838,7 +824,7 @@ impl Literal {
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+@@ -846,7 +832,7 @@ impl Literal {
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+@@ -854,7 +840,7 @@ impl Literal {
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+@@ -862,7 +848,7 @@ impl Literal {
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+@@ -908,20 +894,20 @@ impl From<fallback::Literal> for Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+index 0000000000..708cccb880
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
+index 7af2539c1a..70e57677cd 100644
+--- a/third_party/rust/proc-macro2/tests/marker.rs
++++ b/third_party/rust/proc-macro2/tests/marker.rs
+@@ -57,3 +57,36 @@ mod semver_exempt {
+
+ assert_impl!(SourceFile is not Send or Sync);
+ }
++
++#[cfg(not(no_libprocmacro_unwind_safe))]
++mod unwind_safe {
++ use super::*;
++ use std::panic::{RefUnwindSafe, UnwindSafe};
++
++ macro_rules! assert_unwind_safe {
++ ($($types:ident)*) => {
++ $(
++ assert_impl!($types is UnwindSafe and RefUnwindSafe);
++ )*
++ };
++ }
++
++ assert_unwind_safe! {
++ Delimiter
++ Group
++ Ident
++ LexError
++ Literal
++ Punct
++ Spacing
++ Span
++ TokenStream
++ TokenTree
++ }
++
++ #[cfg(procmacro2_semver_exempt)]
++ assert_unwind_safe! {
++ LineColumn
++ SourceFile
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+index 7528388138..1e9f633944 100644
+--- a/third_party/rust/proc-macro2/tests/test.rs
++++ b/third_party/rust/proc-macro2/tests/test.rs
+@@ -1,7 +1,6 @@
++use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+@@ -84,6 +83,11 @@ fn literal_string() {
+ assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
+ }
+
++#[test]
++fn literal_raw_string() {
++ "r\"\r\n\"".parse::<TokenStream>().unwrap();
++}
++
+ #[test]
+ fn literal_character() {
+ assert_eq!(Literal::character('x').to_string(), "'x'");
+@@ -110,6 +114,37 @@ fn literal_suffix() {
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++ assert_eq!(token_count("0E"), 1);
++ assert_eq!(token_count("0o0A"), 1);
++ assert_eq!(token_count("0E--0"), 4);
++ assert_eq!(token_count("0.0ECMA"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+@@ -161,41 +196,21 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
++ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
++ fail("\"\\u{999999}\""); // outside of valid range of char
++ fail("\"\\u{_0}\""); // leading underscore
++ fail("\"\\u{}\""); // empty
++ fail("b\"\r\""); // bare carriage return in byte string
++ fail("r\"\r\""); // bare carriage return in raw string
++ fail("\"\\\r \""); // backslash carriage return
++ fail("'aa'aa");
++ fail("br##\"\"#");
++ fail("\"\\\n\u{85}\r\"");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+@@ -274,53 +289,11 @@ fn span_join() {
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+-}
+-
+-#[test]
+-fn op_before_comment() {
++fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+@@ -331,6 +304,22 @@ fn op_before_comment() {
+ }
+ }
+
++#[test]
++fn joint_last_token() {
++ // This test verifies that we match the behavior of libproc_macro *not* in
++ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
++ // behavior was temporarily broken.
++ // See https://github.com/rust-lang/rust/issues/76399
++
++ let joint_punct = Punct::new(':', Spacing::Joint);
++ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
++ let punct = match stream.into_iter().next().unwrap() {
++ TokenTree::Punct(punct) => punct,
++ _ => unreachable!(),
++ };
++ assert_eq!(punct.spacing(), Spacing::Joint);
++}
++
+ #[test]
+ fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+@@ -345,11 +334,11 @@ fn raw_identifier() {
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+@@ -358,7 +347,7 @@ fn test_debug_ident() {
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -368,7 +357,7 @@ TokenStream [
+ sym: a,
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+@@ -379,7 +368,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -389,7 +378,7 @@ TokenStream [
+ sym: a
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone
+ },
+ Literal {
+@@ -400,7 +389,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -411,7 +400,7 @@ TokenStream [
+ span: bytes(2..3),
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+@@ -425,7 +414,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -436,7 +425,7 @@ TokenStream [
+ span: bytes(2..3)
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+@@ -464,3 +453,80 @@ fn default_tokenstream_is_empty() {
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+index 0000000000..99a0aee5c8
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/spirv-cross-internal/.cargo-checksum.json b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+index 3c732d6d0e..014aa640e1 100644
+--- a/third_party/rust/spirv-cross-internal/.cargo-checksum.json
++++ b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.gitignore":"7f23cc92ddb5e1f584447e98d3e8ab6543fc182f1543f0f6ec29856f9250cdd6","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+index 77939d8fc6..704f2ed200 100644
+--- a/third_party/rust/syn/.cargo-checksum.json
++++ b/third_party/rust/syn/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+index 7a5c962f06..20277fc461 100644
+--- a/third_party/rust/syn/Cargo.toml
++++ b/third_party/rust/syn/Cargo.toml
+@@ -13,7 +13,7 @@
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+@@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+@@ -52,18 +48,34 @@ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+@@ -80,7 +92,6 @@ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+index 29a7f32a46..12b5f45b3d 100644
+--- a/third_party/rust/syn/README.md
++++ b/third_party/rust/syn/README.md
+@@ -1,10 +1,10 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+@@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+@@ -88,8 +84,6 @@ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+@@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+index 08ecd90960..58ab8df297 100644
+--- a/third_party/rust/syn/benches/file.rs
++++ b/third_party/rust/syn/benches/file.rs
+@@ -1,9 +1,16 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+index e3d9cd29ba..50e1a7f601 100644
+--- a/third_party/rust/syn/benches/rust.rs
++++ b/third_party/rust/syn/benches/rust.rs
+@@ -4,7 +4,14 @@
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+@@ -28,31 +35,35 @@ mod syn_parse {
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+@@ -104,11 +115,11 @@ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+@@ -128,12 +139,12 @@ fn main() {
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+index c0f9ed3406..cf7681c3f9 100644
+--- a/third_party/rust/syn/build.rs
++++ b/third_party/rust/syn/build.rs
+@@ -1,6 +1,6 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+@@ -26,38 +26,14 @@ struct Compiler {
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+index 34009deabc..fa4f1cb2a3 100644
+--- a/third_party/rust/syn/src/attr.rs
++++ b/third_party/rust/syn/src/attr.rs
+@@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -111,7 +107,46 @@ ast_struct! {
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+@@ -120,39 +155,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+@@ -199,7 +206,7 @@ impl Attribute {
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+@@ -208,7 +215,7 @@ impl Attribute {
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+@@ -221,7 +228,7 @@ impl Attribute {
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+@@ -234,7 +241,7 @@ impl Attribute {
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+@@ -247,7 +254,7 @@ impl Attribute {
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+@@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+@@ -298,7 +309,7 @@ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+@@ -312,7 +323,6 @@ ast_enum! {
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+@@ -322,7 +332,7 @@ ast_enum! {
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+@@ -360,7 +370,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+@@ -372,7 +382,7 @@ ast_struct! {
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+@@ -398,7 +408,7 @@ impl Meta {
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+@@ -429,8 +439,8 @@ ast_enum_of_structs! {
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -464,7 +474,7 @@ where
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+@@ -474,7 +484,7 @@ where
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+index 551a5ac816..a461cc49ea 100644
+--- a/third_party/rust/syn/src/buffer.rs
++++ b/third_party/rust/syn/src/buffer.rs
+@@ -1,7 +1,7 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+@@ -36,7 +36,7 @@ enum Entry {
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+@@ -98,7 +98,7 @@ impl TokenBuffer {
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -133,8 +133,7 @@ impl TokenBuffer {
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+@@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+@@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+@@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+index 200e8478ef..a33044a564 100644
+--- a/third_party/rust/syn/src/custom_keyword.rs
++++ b/third_party/rust/syn/src/custom_keyword.rs
+@@ -86,7 +86,7 @@
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+@@ -95,7 +95,7 @@ macro_rules! custom_keyword {
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+@@ -112,10 +112,10 @@ macro_rules! custom_keyword {
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+index 29fa448bd8..70dff42851 100644
+--- a/third_party/rust/syn/src/custom_punctuation.rs
++++ b/third_party/rust/syn/src/custom_punctuation.rs
+@@ -74,19 +74,19 @@
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+@@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+@@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+@@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+@@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+@@ -297,13 +297,3 @@ macro_rules! stringify_punct {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+index be43679874..b217b8ca6f 100644
+--- a/third_party/rust/syn/src/data.rs
++++ b/third_party/rust/syn/src/data.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+@@ -24,7 +24,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -52,7 +52,7 @@ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+@@ -63,7 +63,7 @@ ast_struct! {
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+@@ -93,6 +93,24 @@ impl Fields {
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+@@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+@@ -154,7 +172,7 @@ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -184,7 +202,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+@@ -194,7 +212,7 @@ ast_struct! {
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+@@ -205,7 +223,7 @@ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+@@ -220,12 +238,15 @@ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+@@ -295,6 +316,17 @@ pub mod parsing {
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+@@ -310,27 +342,39 @@ pub mod parsing {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
+ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+@@ -347,6 +391,14 @@ pub mod parsing {
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+index 8cb9cf7b6d..3fa9d89a93 100644
+--- a/third_party/rust/syn/src/derive.rs
++++ b/third_party/rust/syn/src/derive.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+@@ -26,7 +26,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -53,7 +53,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+@@ -65,7 +65,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+@@ -77,7 +77,7 @@ ast_struct! {
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+index 4d9ff93728..76c9fce6f8 100644
+--- a/third_party/rust/syn/src/discouraged.rs
++++ b/third_party/rust/syn/src/discouraged.rs
+@@ -16,7 +16,7 @@ pub trait Speculative {
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+@@ -72,7 +72,6 @@ pub trait Speculative {
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+@@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+index 146d652299..dba34f9254 100644
+--- a/third_party/rust/syn/src/error.rs
++++ b/third_party/rust/syn/src/error.rs
+@@ -1,4 +1,3 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+@@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+@@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+@@ -278,6 +287,14 @@ impl Display for Error {
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+@@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+index 2874a463aa..2fe0e0b5d8 100644
+--- a/third_party/rust/syn/src/expr.rs
++++ b/third_party/rust/syn/src/expr.rs
+@@ -1,18 +1,21 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+@@ -83,7 +86,7 @@ ast_enum_of_structs! {
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+@@ -228,7 +231,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -239,7 +242,7 @@ ast_struct! {
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -251,7 +254,7 @@ ast_struct! {
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -263,7 +266,7 @@ ast_struct! {
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+@@ -275,7 +278,7 @@ ast_struct! {
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -287,7 +290,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+@@ -300,7 +303,7 @@ ast_struct! {
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -311,7 +314,7 @@ ast_struct! {
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -323,7 +326,7 @@ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+@@ -335,7 +338,7 @@ ast_struct! {
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+@@ -348,7 +351,7 @@ ast_struct! {
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+@@ -361,7 +364,7 @@ ast_struct! {
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+@@ -378,7 +381,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+@@ -390,7 +393,7 @@ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -402,7 +405,7 @@ ast_struct! {
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -421,7 +424,7 @@ ast_struct! {
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+@@ -436,7 +439,7 @@ ast_struct! {
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+@@ -449,7 +452,7 @@ ast_struct! {
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+@@ -462,7 +465,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -475,7 +478,7 @@ ast_struct! {
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+@@ -486,7 +489,7 @@ ast_struct! {
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -498,7 +501,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -508,7 +511,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+@@ -521,7 +524,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+@@ -536,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -550,7 +553,7 @@ ast_struct! {
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+@@ -562,7 +565,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+@@ -574,7 +577,7 @@ ast_struct! {
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -587,7 +590,7 @@ ast_struct! {
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -600,7 +603,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+@@ -614,7 +617,7 @@ ast_struct! {
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -628,7 +631,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -639,7 +642,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+@@ -650,7 +653,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -661,7 +664,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -673,7 +676,7 @@ ast_struct! {
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+@@ -685,7 +688,7 @@ ast_struct! {
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+@@ -696,7 +699,7 @@ ast_struct! {
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -709,7 +712,7 @@ ast_struct! {
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+@@ -717,232 +720,6 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+@@ -996,7 +773,7 @@ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+@@ -1006,12 +783,50 @@ ast_enum! {
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+@@ -1027,28 +842,28 @@ impl From<usize> for Index {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+@@ -1057,7 +872,7 @@ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+@@ -1070,7 +885,7 @@ ast_struct! {
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+@@ -1086,7 +901,7 @@ ast_enum! {
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+@@ -1107,7 +922,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+@@ -1134,7 +949,7 @@ ast_struct! {
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+@@ -1149,8 +964,7 @@ ast_struct! {
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+@@ -1162,7 +976,7 @@ ast_enum! {
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+@@ -1183,16 +997,17 @@ pub(crate) mod parsing {
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+@@ -1246,9 +1061,121 @@ pub(crate) mod parsing {
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+@@ -1430,56 +1357,84 @@ pub(crate) mod parsing {
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+@@ -1495,13 +1450,11 @@ pub(crate) mod parsing {
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+@@ -1523,18 +1476,26 @@ pub(crate) mod parsing {
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+@@ -1620,10 +1581,17 @@ pub(crate) mod parsing {
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+@@ -1646,7 +1614,11 @@ pub(crate) mod parsing {
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+@@ -1668,7 +1640,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+@@ -1740,7 +1711,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+@@ -1878,7 +1848,7 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+@@ -1905,7 +1875,7 @@ pub(crate) mod parsing {
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+@@ -1951,7 +1921,16 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+@@ -1960,44 +1939,20 @@ pub(crate) mod parsing {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+@@ -2033,29 +1988,14 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2063,7 +2003,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+@@ -2077,6 +2017,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+@@ -2086,7 +2027,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+@@ -2097,8 +2038,9 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2110,7 +2052,7 @@ pub(crate) mod parsing {
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+@@ -2305,9 +2247,10 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2315,7 +2258,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+@@ -2399,6 +2342,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+@@ -2416,7 +2360,7 @@ pub(crate) mod parsing {
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+@@ -2433,46 +2377,36 @@ pub(crate) mod parsing {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+@@ -2577,27 +2511,7 @@ pub(crate) mod parsing {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+@@ -2641,6 +2555,26 @@ pub(crate) mod parsing {
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+index d09577a27a..4f9bc145d9 100644
+--- a/third_party/rust/syn/src/ext.rs
++++ b/third_party/rust/syn/src/ext.rs
+@@ -1,6 +1,6 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+@@ -16,7 +16,7 @@ use crate::token::CustomToken;
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+@@ -129,7 +129,13 @@ mod private {
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+index 88c02fe832..c8fab63cd9 100644
+--- a/third_party/rust/syn/src/file.rs
++++ b/third_party/rust/syn/src/file.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+@@ -37,6 +37,8 @@ ast_struct! {
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+index 0000000000..bea3887013
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+index 0000000000..72baab05f4
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+index 0000000000..15b2bcbbde
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+index f51218b78c..d9dd32a420 100644
+--- a/third_party/rust/syn/src/gen/fold.rs
++++ b/third_party/rust/syn/src/gen/fold.rs
+@@ -2,6 +2,7 @@
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -26,7 +27,7 @@ macro_rules! full {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+@@ -433,35 +434,27 @@ pub trait Fold {
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+@@ -799,10 +792,10 @@ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -819,9 +812,9 @@ where
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -842,7 +835,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+@@ -859,7 +852,7 @@ where
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+@@ -871,59 +864,47 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -934,7 +915,7 @@ where
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -955,9 +936,9 @@ where
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -969,9 +950,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+@@ -982,7 +963,7 @@ where
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -1016,7 +997,7 @@ where
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+@@ -1112,7 +1093,7 @@ where
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+@@ -1148,7 +1129,7 @@ where
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+@@ -1232,9 +1213,9 @@ where
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+@@ -1258,7 +1239,7 @@ where
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+@@ -1327,7 +1308,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+@@ -1384,7 +1365,7 @@ where
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+@@ -1432,7 +1413,7 @@ where
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+@@ -1447,7 +1428,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+@@ -1484,7 +1465,7 @@ where
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1517,7 +1498,7 @@ where
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -1576,7 +1557,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -1588,7 +1569,7 @@ where
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+@@ -1600,7 +1581,7 @@ where
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+@@ -1681,7 +1662,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1692,7 +1673,7 @@ where
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1706,9 +1687,9 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1721,7 +1702,7 @@ where
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1779,9 +1760,9 @@ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+@@ -1819,11 +1800,11 @@ where
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1834,7 +1815,7 @@ where
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1862,9 +1843,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1913,11 +1894,11 @@ where
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1952,7 +1933,7 @@ where
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2011,7 +1992,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2043,7 +2024,7 @@ where
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2057,11 +2038,11 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2076,7 +2057,7 @@ where
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2092,7 +2073,7 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+@@ -2109,9 +2090,9 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2125,9 +2106,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2153,9 +2134,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2165,7 +2146,7 @@ where
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+@@ -2185,11 +2166,10 @@ where
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+@@ -2205,7 +2185,6 @@ where
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+@@ -2215,7 +2194,6 @@ where
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+@@ -2225,7 +2203,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+@@ -2235,7 +2212,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+@@ -2245,7 +2221,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+@@ -2255,7 +2230,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+@@ -2265,7 +2239,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+@@ -2286,11 +2259,11 @@ where
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2361,7 +2334,7 @@ where
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+@@ -2371,10 +2344,10 @@ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2449,7 +2422,7 @@ where
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+@@ -2482,7 +2455,7 @@ where
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+@@ -2516,7 +2489,7 @@ where
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+@@ -2585,7 +2558,7 @@ where
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -2605,7 +2578,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+@@ -2641,7 +2614,7 @@ where
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+@@ -2652,7 +2625,7 @@ where
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+@@ -2664,7 +2637,7 @@ where
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -2674,11 +2647,11 @@ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2691,7 +2664,7 @@ where
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2704,7 +2677,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+@@ -2720,7 +2693,7 @@ where
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+@@ -2761,7 +2734,7 @@ where
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+@@ -2785,7 +2758,7 @@ where
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2812,15 +2785,15 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2831,7 +2804,7 @@ where
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2843,7 +2816,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2856,15 +2829,15 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2899,7 +2872,7 @@ where
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+@@ -2974,9 +2947,9 @@ where
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+@@ -3018,7 +2991,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3030,7 +3003,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3072,9 +3045,9 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3083,7 +3056,7 @@ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3112,7 +3085,7 @@ where
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+@@ -3147,7 +3120,7 @@ where
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -3161,7 +3134,7 @@ where
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+index 0000000000..9e9e84a7af
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+index b667f530c3..24d34b7480 100644
+--- a/third_party/rust/syn/src/gen/visit.rs
++++ b/third_party/rust/syn/src/gen/visit.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -30,7 +29,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+@@ -434,35 +433,27 @@ pub trait Visit<'ast> {
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+@@ -2537,7 +2528,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2569,7 +2559,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2577,37 +2566,31 @@ where
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+index 5cddb827c6..5ce11f0b2e 100644
+--- a/third_party/rust/syn/src/gen/visit_mut.rs
++++ b/third_party/rust/syn/src/gen/visit_mut.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -31,7 +30,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+@@ -438,35 +437,27 @@ pub trait VisitMut {
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+@@ -2543,7 +2534,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+@@ -2575,7 +2565,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+@@ -2583,37 +2572,31 @@ where
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+index 95ab2e404a..05e8ef5cdf 100644
+--- a/third_party/rust/syn/src/generics.rs
++++ b/third_party/rust/syn/src/generics.rs
+@@ -1,13 +1,16 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+@@ -20,7 +23,7 @@ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -28,9 +31,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+@@ -46,7 +46,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+@@ -61,7 +61,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+@@ -74,7 +74,7 @@ ast_struct! {
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+@@ -87,6 +87,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+@@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+@@ -314,9 +319,8 @@ impl Generics {
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+@@ -324,11 +328,10 @@ impl Generics {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+@@ -339,11 +342,57 @@ impl Generics {
+ }
+ }
+
++#[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
+ #[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+@@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+@@ -364,6 +412,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+@@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+@@ -402,7 +461,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+@@ -418,9 +477,8 @@ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+@@ -431,7 +489,7 @@ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+@@ -442,7 +500,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -450,9 +508,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+@@ -468,7 +523,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+@@ -484,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+@@ -496,7 +551,7 @@ ast_struct! {
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+@@ -521,7 +576,6 @@ pub mod parsing {
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+@@ -534,7 +588,7 @@ pub mod parsing {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+@@ -542,7 +596,6 @@ pub mod parsing {
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+@@ -665,57 +718,53 @@ pub mod parsing {
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
+- }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
+ }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
+ }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+@@ -898,6 +947,8 @@ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+@@ -1080,9 +1131,25 @@ mod printing {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+@@ -1117,9 +1184,9 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+index ff4485ace9..0d8f7d3ddc 100644
+--- a/third_party/rust/syn/src/item.rs
++++ b/third_party/rust/syn/src/item.rs
+@@ -1,17 +1,15 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -21,7 +19,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+@@ -83,7 +81,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -100,7 +98,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -115,7 +113,7 @@ ast_struct! {
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -131,7 +129,7 @@ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -143,7 +141,7 @@ ast_struct! {
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+@@ -156,7 +154,7 @@ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+@@ -175,7 +173,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+@@ -188,8 +186,8 @@ ast_struct! {
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+@@ -201,7 +199,7 @@ ast_struct! {
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -215,7 +213,7 @@ ast_struct! {
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -233,7 +231,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -248,7 +246,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -267,7 +265,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -283,7 +281,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -299,7 +297,7 @@ ast_struct! {
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -313,7 +311,7 @@ ast_struct! {
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -324,145 +322,32 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+@@ -496,10 +381,57 @@ impl From<DeriveInput> for Item {
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -530,7 +462,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+@@ -541,7 +473,7 @@ ast_struct! {
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+@@ -550,7 +482,7 @@ ast_struct! {
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+@@ -561,7 +493,7 @@ ast_struct! {
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+@@ -570,7 +502,7 @@ ast_struct! {
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+@@ -580,7 +512,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -590,7 +522,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+@@ -614,7 +546,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -626,7 +558,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -642,7 +574,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -655,7 +587,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -663,61 +595,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -727,7 +608,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+@@ -751,7 +632,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+@@ -766,7 +647,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+@@ -778,7 +659,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+@@ -794,7 +675,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -802,61 +683,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -866,7 +696,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+@@ -890,7 +720,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -908,7 +738,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -921,7 +751,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -938,7 +768,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -946,62 +776,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+@@ -1017,13 +796,34 @@ ast_struct! {
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+@@ -1035,7 +835,10 @@ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+@@ -1056,7 +859,8 @@ pub mod parsing {
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+@@ -1064,18 +868,26 @@ pub mod parsing {
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+@@ -1083,8 +895,6 @@ pub mod parsing {
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1094,18 +904,61 @@ pub mod parsing {
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1117,21 +970,19 @@ pub mod parsing {
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+@@ -1147,14 +998,18 @@ pub mod parsing {
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1163,32 +1018,64 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+@@ -1310,7 +1197,6 @@ pub mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+@@ -1392,69 +1278,126 @@ pub mod parsing {
+ }
+ }
+
+- impl Parse for ItemFn {
+- fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
+ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ Some(variadic)
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
++ impl Parse for ItemFn {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
++
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
++ }
++
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1491,26 +1434,79 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+@@ -1581,22 +1577,60 @@ pub mod parsing {
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1605,17 +1639,16 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+@@ -1625,55 +1658,12 @@ pub mod parsing {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+@@ -1706,6 +1696,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1742,6 +1763,36 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+@@ -1887,7 +1938,7 @@ pub mod parsing {
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+@@ -1896,7 +1947,7 @@ pub mod parsing {
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+@@ -1909,7 +1960,7 @@ pub mod parsing {
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+@@ -1937,13 +1988,14 @@ pub mod parsing {
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+@@ -2014,14 +2066,19 @@ pub mod parsing {
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+@@ -2032,18 +2089,11 @@ pub mod parsing {
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+@@ -2052,18 +2102,20 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+@@ -2073,7 +2125,14 @@ pub mod parsing {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+@@ -2093,20 +2152,7 @@ pub mod parsing {
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+@@ -2124,22 +2170,7 @@ pub mod parsing {
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+@@ -2188,6 +2219,35 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2207,52 +2267,67 @@ pub mod parsing {
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
+-
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
+-
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
++
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
++
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
+
+- Ok(ItemImpl {
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+@@ -2265,12 +2340,13 @@ pub mod parsing {
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+@@ -2284,28 +2360,38 @@ pub mod parsing {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+@@ -2313,7 +2399,6 @@ pub mod parsing {
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -2346,7 +2431,14 @@ pub mod parsing {
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+@@ -2358,50 +2450,39 @@ pub mod parsing {
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let sig = parse_signature(input)?;
++
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+@@ -2426,6 +2507,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2471,6 +2583,7 @@ mod printing {
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+@@ -2835,6 +2948,14 @@ mod printing {
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+@@ -2905,6 +3026,33 @@ mod printing {
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+@@ -2915,11 +3063,24 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
++ }
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
+ }
+- self.variadic.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+index e69de29bb2..0000000000
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+index c8ada7e638..3da506731e 100644
+--- a/third_party/rust/syn/src/lib.rs
++++ b/third_party/rust/syn/src/lib.rs
+@@ -1,3 +1,11 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+@@ -62,8 +70,8 @@
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+@@ -242,35 +250,48 @@
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+@@ -284,7 +305,6 @@ extern crate unicode_xid;
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+@@ -307,7 +327,6 @@ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -364,9 +383,7 @@ pub use crate::file::File;
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+@@ -441,6 +458,9 @@ pub mod parse_macro_input;
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+@@ -482,7 +502,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -603,7 +623,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -702,7 +722,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -744,6 +764,22 @@ mod gen {
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+@@ -757,6 +793,8 @@ pub mod export;
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+@@ -764,13 +802,15 @@ mod lookahead;
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+@@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+@@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+@@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+@@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+@@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+@@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+@@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+index d51c48e827..959cc5f9c6 100644
+--- a/third_party/rust/syn/src/lifetime.rs
++++ b/third_party/rust/syn/src/lifetime.rs
+@@ -18,10 +18,8 @@ use crate::lookahead;
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+@@ -72,6 +70,15 @@ impl Display for Lifetime {
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+index f2209a2980..ee77e75bec 100644
+--- a/third_party/rust/syn/src/lit.rs
++++ b/third_party/rust/syn/src/lit.rs
+@@ -22,9 +22,6 @@ use crate::{Error, Result};
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+@@ -33,7 +30,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+@@ -64,61 +61,44 @@ ast_enum_of_structs! {
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -129,15 +109,11 @@ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -146,92 +122,27 @@ struct LitFloatRepr {
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+@@ -311,7 +222,7 @@ impl LitStr {
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+@@ -345,19 +256,30 @@ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -365,19 +287,30 @@ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -385,36 +318,52 @@ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -492,18 +441,23 @@ impl Display for LitInt {
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -575,7 +529,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -584,7 +538,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -593,7 +547,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -626,15 +580,53 @@ mod debug_impls {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+@@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+@@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+@@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -699,25 +696,73 @@ pub mod parsing {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+@@ -803,19 +848,19 @@ mod printing {
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+@@ -855,20 +900,29 @@ mod value {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+@@ -905,6 +959,44 @@ mod value {
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+@@ -1004,19 +1096,18 @@ mod value {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+@@ -1028,25 +1119,25 @@ mod value {
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1057,10 +1148,10 @@ mod value {
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+@@ -1069,42 +1160,45 @@ mod value {
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1118,16 +1212,18 @@ mod value {
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+@@ -1163,8 +1259,9 @@ mod value {
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+@@ -1334,7 +1431,11 @@ mod value {
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+@@ -1372,11 +1473,33 @@ mod value {
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+index 6c3dcae92a..de288a34e1 100644
+--- a/third_party/rust/syn/src/mac.rs
++++ b/third_party/rust/syn/src/mac.rs
+@@ -2,21 +2,17 @@ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+@@ -27,7 +23,7 @@ ast_struct! {
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+@@ -36,39 +32,20 @@ ast_enum! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+@@ -163,9 +140,7 @@ impl Macro {
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+index 9cac5c15df..8060224381 100644
+--- a/third_party/rust/syn/src/macros.rs
++++ b/third_party/rust/syn/src/macros.rs
+@@ -4,15 +4,11 @@ macro_rules! ast_struct {
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+@@ -23,29 +19,10 @@ macro_rules! ast_struct {
+ }
+ };
+
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+@@ -63,21 +40,10 @@ macro_rules! ast_enum {
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+- (
+- [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+ (
+ [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+@@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+@@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+index 49fb853c79..d254673b40 100644
+--- a/third_party/rust/syn/src/op.rs
++++ b/third_party/rust/syn/src/op.rs
+@@ -1,9 +1,8 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+@@ -67,9 +66,8 @@ ast_enum! {
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+index 7c7b194308..abb4c4c14f 100644
+--- a/third_party/rust/syn/src/parse.rs
++++ b/third_party/rust/syn/src/parse.rs
+@@ -26,8 +26,8 @@
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+@@ -109,9 +109,7 @@
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+@@ -155,8 +153,8 @@
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+@@ -186,7 +184,7 @@
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+@@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+@@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+@@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+@@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+@@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
++ }
++}
++
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+@@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+@@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+@@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+@@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+@@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+@@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+@@ -1048,7 +1115,7 @@ impl Parse for Literal {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+@@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+@@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+@@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+@@ -1118,38 +1187,42 @@ where
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+@@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+index d6e0725c17..c8fc1cea37 100644
+--- a/third_party/rust/syn/src/parse_macro_input.rs
++++ b/third_party/rust/syn/src/parse_macro_input.rs
+@@ -16,8 +16,8 @@
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+@@ -43,7 +43,31 @@
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+@@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+index 18a47b95c7..66aa818cd0 100644
+--- a/third_party/rust/syn/src/parse_quote.rs
++++ b/third_party/rust/syn/src/parse_quote.rs
+@@ -24,7 +24,7 @@
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+@@ -56,8 +56,10 @@
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+@@ -67,7 +69,7 @@
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+@@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+@@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+index 9371e05493..e9576a2361 100644
+--- a/third_party/rust/syn/src/pat.rs
++++ b/third_party/rust/syn/src/pat.rs
+@@ -1,16 +1,12 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+@@ -86,7 +82,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -97,7 +93,10 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+@@ -113,7 +112,7 @@ ast_struct! {
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -123,7 +122,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -133,7 +132,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+@@ -150,7 +149,7 @@ ast_struct! {
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+@@ -161,7 +160,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+@@ -173,7 +172,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -185,7 +184,7 @@ ast_struct! {
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+@@ -195,7 +194,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -206,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -219,7 +218,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -230,7 +229,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -241,7 +240,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+@@ -253,7 +252,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+@@ -266,7 +265,7 @@ ast_struct! {
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+@@ -275,122 +274,17 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+@@ -411,7 +305,6 @@ mod parsing {
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+@@ -434,7 +327,7 @@ mod parsing {
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -442,10 +335,11 @@ mod parsing {
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+@@ -487,7 +381,7 @@ mod parsing {
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+@@ -546,7 +440,7 @@ mod parsing {
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+@@ -578,6 +472,7 @@ mod parsing {
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+@@ -587,10 +482,10 @@ mod parsing {
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+@@ -610,30 +505,57 @@ mod parsing {
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
+- pat: Box::new(pat),
+ box_token: boxed,
++ pat: Box::new(pat),
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
+- pat: Box::new(pat),
+- attrs: Vec::new(),
+ colon_token: None,
++ pat: Box::new(pat),
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+@@ -642,7 +564,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -668,14 +590,21 @@ mod parsing {
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+@@ -684,7 +613,17 @@ mod parsing {
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+@@ -696,7 +635,6 @@ mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+@@ -704,7 +642,7 @@ mod parsing {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+@@ -712,7 +650,7 @@ mod parsing {
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+@@ -721,7 +659,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -737,11 +675,35 @@ mod parsing {
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+@@ -756,12 +718,14 @@ mod printing {
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -774,6 +738,7 @@ mod printing {
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+@@ -788,6 +753,7 @@ mod printing {
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -804,12 +770,14 @@ mod printing {
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -818,6 +786,7 @@ mod printing {
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -825,6 +794,7 @@ mod printing {
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+@@ -833,18 +803,21 @@ mod printing {
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+@@ -856,6 +829,7 @@ mod printing {
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -864,12 +838,14 @@ mod printing {
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+@@ -877,6 +853,7 @@ mod printing {
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+index 8dda43ee67..15c0fcc664 100644
+--- a/third_party/rust/syn/src/path.rs
++++ b/third_party/rust/syn/src/path.rs
+@@ -2,9 +2,9 @@ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+@@ -29,7 +29,7 @@ where
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+@@ -52,7 +52,7 @@ where
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+@@ -98,7 +98,7 @@ impl PathArguments {
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+@@ -122,7 +122,7 @@ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+@@ -135,7 +135,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+@@ -147,7 +147,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+@@ -160,7 +160,7 @@ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+@@ -189,7 +189,7 @@ ast_struct! {
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+@@ -291,11 +291,7 @@ pub mod parsing {
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+@@ -358,7 +354,7 @@ pub mod parsing {
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -400,7 +396,6 @@ pub mod parsing {
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+@@ -433,7 +428,7 @@ pub mod parsing {
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -472,7 +467,7 @@ pub mod parsing {
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+index 38c7bf4e82..46c82a65b1 100644
+--- a/third_party/rust/syn/src/punctuated.rs
++++ b/third_party/rust/syn/src/punctuated.rs
+@@ -22,6 +22,8 @@
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+@@ -41,8 +43,6 @@ use crate::token::Token;
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+@@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+@@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+@@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+@@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+@@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+@@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+@@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+@@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+@@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+@@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+index 0000000000..ccfb8b5ad0
+--- /dev/null
++++ b/third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+index 71ffe26b81..01591cedcb 100644
+--- a/third_party/rust/syn/src/spanned.rs
++++ b/third_party/rust/syn/src/spanned.rs
+@@ -1,7 +1,7 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+@@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+index e4277fdbaa..b06e843d75 100644
+--- a/third_party/rust/syn/src/stmt.rs
++++ b/third_party/rust/syn/src/stmt.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+@@ -14,7 +14,7 @@ ast_struct! {
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+@@ -33,7 +33,7 @@ ast_enum! {
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -47,14 +47,15 @@ ast_struct! {
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -106,8 +107,8 @@ pub mod parsing {
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+@@ -146,55 +147,55 @@ pub mod parsing {
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+@@ -213,33 +214,12 @@ pub mod parsing {
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+@@ -265,12 +245,19 @@ pub mod parsing {
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+index 0b8c18192f..8539378c5e 100644
+--- a/third_party/rust/syn/src/token.rs
++++ b/third_party/rust/syn/src/token.rs
+@@ -88,7 +88,6 @@
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+@@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+@@ -112,10 +111,8 @@ use self::private::WithSpan;
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+@@ -155,21 +152,20 @@ mod private {
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+@@ -189,24 +185,38 @@ macro_rules! impl_token {
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+@@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+@@ -260,6 +269,16 @@ macro_rules! define_keywords {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+@@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -436,7 +464,6 @@ macro_rules! define_punctuation {
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+@@ -458,6 +485,16 @@ macro_rules! define_delimiters {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -855,7 +892,7 @@ pub mod parsing {
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+index f860eebb4f..8dba0627cd 100644
+--- a/third_party/rust/syn/src/tt.rs
++++ b/third_party/rust/syn/src/tt.rs
+@@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+@@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+index 4ee59bda2a..fd7c97eab7 100644
+--- a/third_party/rust/syn/src/ty.rs
++++ b/third_party/rust/syn/src/ty.rs
+@@ -1,15 +1,11 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+@@ -77,7 +73,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+@@ -90,7 +86,7 @@ ast_struct! {
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+@@ -107,7 +103,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+@@ -119,7 +115,7 @@ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+@@ -130,7 +126,7 @@ ast_struct! {
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+@@ -140,7 +136,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+@@ -150,7 +146,7 @@ ast_struct! {
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+@@ -160,7 +156,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+@@ -172,7 +168,7 @@ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+@@ -183,7 +179,7 @@ ast_struct! {
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+@@ -196,7 +192,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+@@ -209,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+@@ -221,7 +217,7 @@ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+@@ -232,7 +228,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+@@ -240,111 +236,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+@@ -355,7 +250,7 @@ ast_struct! {
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+@@ -377,7 +272,7 @@ ast_struct! {
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+@@ -388,7 +283,7 @@ ast_struct! {
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+@@ -407,10 +302,13 @@ pub mod parsing {
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+@@ -421,15 +319,17 @@ pub mod parsing {
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+@@ -524,7 +424,7 @@ pub mod parsing {
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+@@ -549,17 +449,20 @@ pub mod parsing {
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+@@ -722,38 +625,58 @@ pub mod parsing {
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
+- }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
+ dots: args.parse()?,
+- })
++ });
++ break;
++ }
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++ if args.is_empty() {
++ break;
++ }
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+@@ -776,9 +699,27 @@ pub mod parsing {
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+@@ -807,9 +748,11 @@ pub mod parsing {
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+@@ -844,10 +787,12 @@ pub mod parsing {
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+@@ -910,7 +855,8 @@ pub mod parsing {
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+@@ -926,22 +872,72 @@ pub mod parsing {
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+index 0000000000..0686352f7a
+--- /dev/null
++++ b/third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+index 0000000000..7be082e1a2
+--- /dev/null
++++ b/third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+index 8e0863cba6..0000000000
+--- a/third_party/rust/syn/tests/clone.sh
++++ /dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+index 13a6c36ae5..7589a07573 100644
+--- a/third_party/rust/syn/tests/common/eq.rs
++++ b/third_party/rust/syn/tests/common/eq.rs
+@@ -1,36 +1,35 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+@@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+@@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+@@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+@@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+@@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+@@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
+- }
+-}
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
+ }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+ }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+ }
+- tokens
+ }
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+index 8b784beed7..a1cc80a16f 100644
+--- a/third_party/rust/syn/tests/common/mod.rs
++++ b/third_party/rust/syn/tests/common/mod.rs
+@@ -1,5 +1,6 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+@@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+index 41d192f6fb..192828fedd 100644
+--- a/third_party/rust/syn/tests/common/parse.rs
++++ b/third_party/rust/syn/tests/common/parse.rs
+@@ -1,20 +1,20 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+@@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+index 8450c09ecf..85a1a39079 100644
+--- a/third_party/rust/syn/tests/debug/gen.rs
++++ b/third_party/rust/syn/tests/debug/gen.rs
+@@ -2,7 +2,7 @@
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+@@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+@@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+@@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+index c1180532ec..cefebacef7 100644
+--- a/third_party/rust/syn/tests/debug/mod.rs
++++ b/third_party/rust/syn/tests/debug/mod.rs
+@@ -1,10 +1,7 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+@@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+index 10ac88965d..0000000000
+--- a/third_party/rust/syn/tests/features/error.rs
++++ /dev/null
+@@ -1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+index 83fbe13e7e..0000000000
+--- a/third_party/rust/syn/tests/features/mod.rs
++++ /dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+index c72fd01058..3994615fc4 100644
+--- a/third_party/rust/syn/tests/macros/mod.rs
++++ b/third_party/rust/syn/tests/macros/mod.rs
+@@ -1,5 +1,3 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+@@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+index c22cb03758..1d3e1f0e74 100644
+--- a/third_party/rust/syn/tests/repo/mod.rs
++++ b/third_party/rust/syn/tests/repo/mod.rs
+@@ -1,8 +1,37 @@
+-extern crate walkdir;
++mod progress;
+
+-use std::process::Command;
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
+
+-use self::walkdir::DirEntry;
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
++
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
++
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+@@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
++}
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
+ }
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+index 0000000000..28c8a44b12
+--- /dev/null
++++ b/third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+index f868fbcc20..0efef5976f 100644
+--- a/third_party/rust/syn/tests/test_asyncness.rs
++++ b/third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,16 +8,16 @@ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+@@ -30,12 +26,12 @@ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+index aff6294fc3..c26bd090ec 100644
+--- a/third_party/rust/syn/tests/test_attribute.rs
++++ b/third_party/rust/syn/tests/test_attribute.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -13,14 +9,14 @@ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+@@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+@@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+@@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -270,21 +266,63 @@ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+index de68240166..bf1ebdb67d 100644
+--- a/third_party/rust/syn/tests/test_derive_input.rs
++++ b/third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,15 +11,15 @@ fn test_unit() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -39,105 +34,105 @@ fn test_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -151,46 +146,46 @@ fn test_union() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+@@ -212,118 +207,118 @@ fn test_enum() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+@@ -333,27 +328,27 @@ fn test_enum() {
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+@@ -366,34 +361,34 @@ fn test_attr_with_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -407,29 +402,29 @@ fn test_attr_with_non_mod_style_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -443,48 +438,48 @@ fn test_attr_with_mod_style_path_with_self() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -496,55 +491,55 @@ fn test_pub_restricted() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -555,15 +550,15 @@ fn test_vis_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -574,24 +569,24 @@ fn test_pub_restricted_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -602,24 +597,24 @@ fn test_pub_restricted_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -630,25 +625,25 @@ fn test_pub_restricted_in_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -659,15 +654,15 @@ fn test_fields_on_unit_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -688,47 +683,47 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -737,38 +732,38 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -779,44 +774,44 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -825,34 +820,34 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -864,34 +859,34 @@ fn test_ambiguous_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+index c8a11cec2c..b2b65a254f 100644
+--- a/third_party/rust/syn/tests/test_expr.rs
++++ b/third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
+-
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ let tokens = quote!(fut.await);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+index 55c79e066b..b29434a147 100644
+--- a/third_party/rust/syn/tests/test_generics.rs
++++ b/third_party/rust/syn/tests/test_generics.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,90 +11,90 @@ fn test_split_for_impl() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+@@ -131,46 +126,46 @@ fn test_split_for_impl() {
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+@@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+@@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+index 1558a47b4b..a0fe716390 100644
+--- a/third_party/rust/syn/tests/test_grouping.rs
++++ b/third_party/rust/syn/tests/test_grouping.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -28,31 +23,31 @@ fn test_grouping() {
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+index bec00a70c9..ee01bfcc9f 100644
+--- a/third_party/rust/syn/tests/test_ident.rs
++++ b/third_party/rust/syn/tests/test_ident.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+index 0000000000..74ac4baec6
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+index 1cf7157e6f..2c8359c157 100644
+--- a/third_party/rust/syn/tests/test_iterators.rs
++++ b/third_party/rust/syn/tests/test_iterators.rs
+@@ -1,10 +1,5 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+index 1e8f49d19b..e995f2287f 100644
+--- a/third_party/rust/syn/tests/test_lit.rs
++++ b/third_party/rust/syn/tests/test_lit.rs
+@@ -1,13 +1,11 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+@@ -50,6 +48,9 @@ fn strings() {
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+@@ -79,6 +80,9 @@ fn byte_strings() {
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+@@ -100,6 +104,7 @@ fn bytes() {
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+@@ -125,6 +130,7 @@ fn chars() {
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+@@ -185,4 +191,59 @@ fn floats() {
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
++}
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
+ }
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+index 547472d6f4..d37dda948a 100644
+--- a/third_party/rust/syn/tests/test_meta.rs
++++ b/third_party/rust/syn/tests/test_meta.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+index f09495187f..57a3c7c38c 100644
+--- a/third_party/rust/syn/tests/test_parse_buffer.rs
++++ b/third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,7 +1,7 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+@@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+index 0000000000..76bd065777
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+index 1343aa646f..73388dd79d 100644
+--- a/third_party/rust/syn/tests/test_pat.rs
++++ b/third_party/rust/syn/tests/test_pat.rs
+@@ -1,10 +1,5 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+@@ -21,3 +16,23 @@ fn test_pat_path() {
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+index 0000000000..2ce12066f5
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+index 53ee66e372..a586b3fe48 100644
+--- a/third_party/rust/syn/tests/test_precedence.rs
++++ b/third_party/rust/syn/tests/test_precedence.rs
+@@ -4,35 +4,26 @@
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -73,7 +64,7 @@ fn test_simple_precedence() {
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+@@ -91,8 +82,8 @@ fn test_simple_precedence() {
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -118,15 +109,6 @@ fn test_rustc_precedence() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+@@ -134,8 +116,9 @@ fn test_rustc_precedence() {
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+@@ -169,36 +152,36 @@ fn test_rustc_precedence() {
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+@@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+@@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+@@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+@@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+@@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+index 0000000000..923df96ba9
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+index 2fc9cecd86..260dd0c3d9 100644
+--- a/third_party/rust/syn/tests/test_round_trip.rs
++++ b/third_party/rust/syn/tests/test_round_trip.rs
+@@ -2,22 +2,20 @@
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -38,8 +36,8 @@ mod repo;
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -78,11 +76,12 @@ fn test_round_trip() {
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+@@ -93,7 +92,7 @@ fn test_round_trip() {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+@@ -101,10 +100,10 @@ fn test_round_trip() {
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+@@ -130,7 +129,7 @@ fn test_round_trip() {
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+@@ -147,7 +146,7 @@ fn test_round_trip() {
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+index 0000000000..dc26b9aab3
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+index aadf42e3af..180d859916 100644
+--- a/third_party/rust/syn/tests/test_should_parse.rs
++++ b/third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+index 386d4df889..01e8401158 100644
+--- a/third_party/rust/syn/tests/test_size.rs
++++ b/third_party/rust/syn/tests/test_size.rs
+@@ -1,7 +1,5 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+index 0000000000..d68b47fd2f
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+index 70a9a72aab..5b00448af8 100644
+--- a/third_party/rust/syn/tests/test_token_trees.rs
++++ b/third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,9 +1,3 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -21,7 +15,11 @@ fn test_struct() {
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+index 0000000000..9cbdcd6b99
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+index 0000000000..c3d0ac7a5b
+--- /dev/null
++++ b/third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+index a81b3df4d0..a1a670d9ed 100644
+--- a/third_party/rust/syn/tests/zzz_stable.rs
++++ b/third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,7 +1,5 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+@@ -10,7 +8,7 @@ const MSG: &str = "\
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+--
+2.28.0
+
diff --git a/source/xap/seamonkey/seamonkey.SlackBuild b/source/xap/seamonkey/seamonkey.SlackBuild
index feacb5c1..cd4a58f2 100755
--- a/source/xap/seamonkey/seamonkey.SlackBuild
+++ b/source/xap/seamonkey/seamonkey.SlackBuild
@@ -68,8 +68,8 @@ else
fi
# Choose a compiler (gcc/g++ or clang/clang++):
-export CC=${CC:-clang}
-export CXX=${CXX:-clang++}
+export CC=${CC:-gcc}
+export CXX=${CXX:-g++}
# -Wformat is needed for -Werror=format-security
# -fno-delete-null-pointer-checks disables gcc6 optimization that leads to instability