[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[RFC PATCH v5 8/8] rust/pl011: vendor dependencies
From: |
Manos Pitsidianakis |
Subject: |
[RFC PATCH v5 8/8] rust/pl011: vendor dependencies |
Date: |
Mon, 22 Jul 2024 14:43:38 +0300 |
Signed-off-by: Manos Pitsidianakis <manos.pitsidianakis@linaro.org>
---
rust/hw/char/pl011/vendor/either/README.rst | 185 +
.../vendor/arbitrary-int/.cargo-checksum.json | 1 +
.../pl011/vendor/arbitrary-int/CHANGELOG.md | 47 +
.../pl011/vendor/arbitrary-int/Cargo.toml | 54 +
.../pl011/vendor/arbitrary-int/LICENSE.txt | 21 +
.../char/pl011/vendor/arbitrary-int/README.md | 72 +
.../pl011/vendor/arbitrary-int/meson.build | 14 +
.../pl011/vendor/arbitrary-int/src/lib.rs | 1489 +++++
.../pl011/vendor/arbitrary-int/tests/tests.rs | 1913 ++++++
.../vendor/bilge-impl/.cargo-checksum.json | 1 +
.../char/pl011/vendor/bilge-impl/Cargo.toml | 54 +
.../hw/char/pl011/vendor/bilge-impl/README.md | 327 ++
.../char/pl011/vendor/bilge-impl/meson.build | 24 +
.../pl011/vendor/bilge-impl/src/bitsize.rs | 187 +
.../vendor/bilge-impl/src/bitsize/split.rs | 185 +
.../vendor/bilge-impl/src/bitsize_internal.rs | 235 +
.../src/bitsize_internal/struct_gen.rs | 402 ++
.../pl011/vendor/bilge-impl/src/debug_bits.rs | 55 +
.../vendor/bilge-impl/src/default_bits.rs | 92 +
.../pl011/vendor/bilge-impl/src/fmt_bits.rs | 112 +
.../pl011/vendor/bilge-impl/src/from_bits.rs | 222 +
.../char/pl011/vendor/bilge-impl/src/lib.rs | 79 +
.../pl011/vendor/bilge-impl/src/shared.rs | 196 +
.../src/shared/discriminant_assigner.rs | 56 +
.../vendor/bilge-impl/src/shared/fallback.rs | 92 +
.../vendor/bilge-impl/src/shared/util.rs | 91 +
.../vendor/bilge-impl/src/try_from_bits.rs | 143 +
.../pl011/vendor/bilge/.cargo-checksum.json | 1 +
rust/hw/char/pl011/vendor/bilge/Cargo.toml | 69 +
.../hw/char/pl011/vendor/bilge/LICENSE-APACHE | 176 +
rust/hw/char/pl011/vendor/bilge/LICENSE-MIT | 17 +
rust/hw/char/pl011/vendor/bilge/README.md | 327 ++
rust/hw/char/pl011/vendor/bilge/meson.build | 17 +
rust/hw/char/pl011/vendor/bilge/src/lib.rs | 80 +
.../pl011/vendor/either/.cargo-checksum.json | 1 +
rust/hw/char/pl011/vendor/either/Cargo.toml | 54 +
.../char/pl011/vendor/either/LICENSE-APACHE | 201 +
rust/hw/char/pl011/vendor/either/LICENSE-MIT | 25 +
.../pl011/vendor/either/README-crates.io.md | 10 +
rust/hw/char/pl011/vendor/either/meson.build | 16 +
.../pl011/vendor/either/src/into_either.rs | 64 +
.../char/pl011/vendor/either/src/iterator.rs | 315 +
rust/hw/char/pl011/vendor/either/src/lib.rs | 1519 +++++
.../pl011/vendor/either/src/serde_untagged.rs | 69 +
.../either/src/serde_untagged_optional.rs | 74 +
.../vendor/itertools/.cargo-checksum.json | 1 +
.../char/pl011/vendor/itertools/CHANGELOG.md | 409 ++
.../hw/char/pl011/vendor/itertools/Cargo.lock | 681 +++
.../hw/char/pl011/vendor/itertools/Cargo.toml | 101 +
.../pl011/vendor/itertools/LICENSE-APACHE | 201 +
.../char/pl011/vendor/itertools/LICENSE-MIT | 25 +
rust/hw/char/pl011/vendor/itertools/README.md | 44 +
.../pl011/vendor/itertools/benches/bench1.rs | 877 +++
.../vendor/itertools/benches/combinations.rs | 125 +
.../benches/combinations_with_replacement.rs | 40 +
.../vendor/itertools/benches/extra/mod.rs | 2 +
.../itertools/benches/extra/zipslices.rs | 188 +
.../itertools/benches/fold_specialization.rs | 73 +
.../vendor/itertools/benches/powerset.rs | 44 +
.../vendor/itertools/benches/tree_fold1.rs | 144 +
.../itertools/benches/tuple_combinations.rs | 113 +
.../pl011/vendor/itertools/benches/tuples.rs | 213 +
.../pl011/vendor/itertools/examples/iris.data | 150 +
.../pl011/vendor/itertools/examples/iris.rs | 137 +
.../char/pl011/vendor/itertools/meson.build | 18 +
.../vendor/itertools/src/adaptors/coalesce.rs | 235 +
.../vendor/itertools/src/adaptors/map.rs | 124 +
.../vendor/itertools/src/adaptors/mod.rs | 1151 ++++
.../itertools/src/adaptors/multi_product.rs | 230 +
.../vendor/itertools/src/combinations.rs | 128 +
.../src/combinations_with_replacement.rs | 109 +
.../pl011/vendor/itertools/src/concat_impl.rs | 23 +
.../vendor/itertools/src/cons_tuples_impl.rs | 64 +
.../char/pl011/vendor/itertools/src/diff.rs | 61 +
.../vendor/itertools/src/duplicates_impl.rs | 216 +
.../vendor/itertools/src/either_or_both.rs | 495 ++
.../vendor/itertools/src/exactly_one_err.rs | 110 +
.../pl011/vendor/itertools/src/extrema_set.rs | 48 +
.../pl011/vendor/itertools/src/flatten_ok.rs | 165 +
.../char/pl011/vendor/itertools/src/format.rs | 168 +
.../char/pl011/vendor/itertools/src/free.rs | 286 +
.../pl011/vendor/itertools/src/group_map.rs | 32 +
.../pl011/vendor/itertools/src/groupbylazy.rs | 579 ++
.../vendor/itertools/src/grouping_map.rs | 535 ++
.../pl011/vendor/itertools/src/impl_macros.rs | 29 +
.../pl011/vendor/itertools/src/intersperse.rs | 118 +
.../pl011/vendor/itertools/src/k_smallest.rs | 20 +
.../pl011/vendor/itertools/src/kmerge_impl.rs | 227 +
.../pl011/vendor/itertools/src/lazy_buffer.rs | 63 +
.../hw/char/pl011/vendor/itertools/src/lib.rs | 3967 +++++++++++++
.../pl011/vendor/itertools/src/merge_join.rs | 220 +
.../char/pl011/vendor/itertools/src/minmax.rs | 115 +
.../vendor/itertools/src/multipeek_impl.rs | 101 +
.../pl011/vendor/itertools/src/pad_tail.rs | 96 +
.../pl011/vendor/itertools/src/peek_nth.rs | 102 +
.../itertools/src/peeking_take_while.rs | 177 +
.../vendor/itertools/src/permutations.rs | 277 +
.../pl011/vendor/itertools/src/powerset.rs | 90 +
.../itertools/src/process_results_impl.rs | 68 +
.../vendor/itertools/src/put_back_n_impl.rs | 61 +
.../pl011/vendor/itertools/src/rciter_impl.rs | 99 +
.../pl011/vendor/itertools/src/repeatn.rs | 59 +
.../pl011/vendor/itertools/src/size_hint.rs | 119 +
.../pl011/vendor/itertools/src/sources.rs | 183 +
.../itertools/src/take_while_inclusive.rs | 68 +
.../hw/char/pl011/vendor/itertools/src/tee.rs | 78 +
.../pl011/vendor/itertools/src/tuple_impl.rs | 331 ++
.../pl011/vendor/itertools/src/unique_impl.rs | 179 +
.../pl011/vendor/itertools/src/unziptuple.rs | 80 +
.../vendor/itertools/src/with_position.rs | 88 +
.../pl011/vendor/itertools/src/zip_eq_impl.rs | 60 +
.../pl011/vendor/itertools/src/zip_longest.rs | 83 +
.../pl011/vendor/itertools/src/ziptuple.rs | 138 +
.../itertools/tests/adaptors_no_collect.rs | 46 +
.../vendor/itertools/tests/flatten_ok.rs | 76 +
.../vendor/itertools/tests/macros_hygiene.rs | 13 +
.../vendor/itertools/tests/merge_join.rs | 108 +
.../itertools/tests/peeking_take_while.rs | 69 +
.../pl011/vendor/itertools/tests/quick.rs | 1849 ++++++
.../vendor/itertools/tests/specializations.rs | 153 +
.../pl011/vendor/itertools/tests/test_core.rs | 317 +
.../pl011/vendor/itertools/tests/test_std.rs | 1184 ++++
.../pl011/vendor/itertools/tests/tuples.rs | 86 +
.../char/pl011/vendor/itertools/tests/zip.rs | 77 +
rust/hw/char/pl011/vendor/meson.build | 18 +
.../.cargo-checksum.json | 1 +
.../vendor/proc-macro-error-attr/Cargo.toml | 33 +
.../proc-macro-error-attr/LICENSE-APACHE | 201 +
.../vendor/proc-macro-error-attr/LICENSE-MIT | 21 +
.../vendor/proc-macro-error-attr/build.rs | 5 +
.../vendor/proc-macro-error-attr/meson.build | 20 +
.../vendor/proc-macro-error-attr/src/lib.rs | 121 +
.../vendor/proc-macro-error-attr/src/parse.rs | 89 +
.../proc-macro-error-attr/src/settings.rs | 72 +
.../proc-macro-error/.cargo-checksum.json | 1 +
.../vendor/proc-macro-error/CHANGELOG.md | 162 +
.../pl011/vendor/proc-macro-error/Cargo.toml | 56 +
.../vendor/proc-macro-error/LICENSE-APACHE | 201 +
.../pl011/vendor/proc-macro-error/LICENSE-MIT | 21 +
.../pl011/vendor/proc-macro-error/README.md | 258 +
.../pl011/vendor/proc-macro-error/build.rs | 11 +
.../pl011/vendor/proc-macro-error/meson.build | 22 +
.../vendor/proc-macro-error/src/diagnostic.rs | 349 ++
.../vendor/proc-macro-error/src/dummy.rs | 150 +
.../proc-macro-error/src/imp/delegate.rs | 69 +
.../proc-macro-error/src/imp/fallback.rs | 30 +
.../pl011/vendor/proc-macro-error/src/lib.rs | 560 ++
.../vendor/proc-macro-error/src/macros.rs | 288 +
.../vendor/proc-macro-error/src/sealed.rs | 3 +
.../proc-macro-error/tests/macro-errors.rs | 8 +
.../pl011/vendor/proc-macro-error/tests/ok.rs | 10 +
.../proc-macro-error/tests/runtime-errors.rs | 13 +
.../vendor/proc-macro-error/tests/ui/abort.rs | 11 +
.../proc-macro-error/tests/ui/abort.stderr | 48 +
.../proc-macro-error/tests/ui/append_dummy.rs | 13 +
.../tests/ui/append_dummy.stderr | 5 +
.../tests/ui/children_messages.rs | 6 +
.../tests/ui/children_messages.stderr | 23 +
.../vendor/proc-macro-error/tests/ui/dummy.rs | 13 +
.../proc-macro-error/tests/ui/dummy.stderr | 5 +
.../vendor/proc-macro-error/tests/ui/emit.rs | 7 +
.../proc-macro-error/tests/ui/emit.stderr | 48 +
.../tests/ui/explicit_span_range.rs | 6 +
.../tests/ui/explicit_span_range.stderr | 5 +
.../proc-macro-error/tests/ui/misuse.rs | 11 +
.../proc-macro-error/tests/ui/misuse.stderr | 13 +
.../tests/ui/multiple_tokens.rs | 6 +
.../tests/ui/multiple_tokens.stderr | 5 +
.../tests/ui/not_proc_macro.rs | 4 +
.../tests/ui/not_proc_macro.stderr | 10 +
.../proc-macro-error/tests/ui/option_ext.rs | 6 +
.../tests/ui/option_ext.stderr | 7 +
.../tests/ui/proc_macro_hack.rs | 10 +
.../tests/ui/proc_macro_hack.stderr | 26 +
.../proc-macro-error/tests/ui/result_ext.rs | 7 +
.../tests/ui/result_ext.stderr | 11 +
.../tests/ui/to_tokens_span.rs | 6 +
.../tests/ui/to_tokens_span.stderr | 11 +
.../tests/ui/unknown_setting.rs | 4 +
.../tests/ui/unknown_setting.stderr | 5 +
.../tests/ui/unrelated_panic.rs | 6 +
.../tests/ui/unrelated_panic.stderr | 7 +
.../vendor/proc-macro2/.cargo-checksum.json | 1 +
.../char/pl011/vendor/proc-macro2/Cargo.toml | 104 +
.../pl011/vendor/proc-macro2/LICENSE-APACHE | 176 +
.../char/pl011/vendor/proc-macro2/LICENSE-MIT | 23 +
.../char/pl011/vendor/proc-macro2/README.md | 94 +
.../hw/char/pl011/vendor/proc-macro2/build.rs | 227 +
.../pl011/vendor/proc-macro2/build/probe.rs | 25 +
.../char/pl011/vendor/proc-macro2/meson.build | 19 +
.../vendor/proc-macro2/rust-toolchain.toml | 2 +
.../pl011/vendor/proc-macro2/src/detection.rs | 75 +
.../pl011/vendor/proc-macro2/src/extra.rs | 151 +
.../pl011/vendor/proc-macro2/src/fallback.rs | 1226 ++++
.../char/pl011/vendor/proc-macro2/src/lib.rs | 1369 +++++
.../pl011/vendor/proc-macro2/src/location.rs | 29 +
.../pl011/vendor/proc-macro2/src/marker.rs | 17 +
.../pl011/vendor/proc-macro2/src/parse.rs | 996 ++++
.../pl011/vendor/proc-macro2/src/rcvec.rs | 145 +
.../pl011/vendor/proc-macro2/src/wrapper.rs | 993 ++++
.../vendor/proc-macro2/tests/comments.rs | 105 +
.../vendor/proc-macro2/tests/features.rs | 8 +
.../pl011/vendor/proc-macro2/tests/marker.rs | 100 +
.../pl011/vendor/proc-macro2/tests/test.rs | 905 +++
.../vendor/proc-macro2/tests/test_fmt.rs | 28 +
.../vendor/proc-macro2/tests/test_size.rs | 73 +
.../pl011/vendor/quote/.cargo-checksum.json | 1 +
rust/hw/char/pl011/vendor/quote/Cargo.toml | 50 +
.../hw/char/pl011/vendor/quote/LICENSE-APACHE | 176 +
rust/hw/char/pl011/vendor/quote/LICENSE-MIT | 23 +
rust/hw/char/pl011/vendor/quote/README.md | 272 +
rust/hw/char/pl011/vendor/quote/meson.build | 17 +
.../pl011/vendor/quote/rust-toolchain.toml | 2 +
rust/hw/char/pl011/vendor/quote/src/ext.rs | 110 +
rust/hw/char/pl011/vendor/quote/src/format.rs | 168 +
.../pl011/vendor/quote/src/ident_fragment.rs | 88 +
rust/hw/char/pl011/vendor/quote/src/lib.rs | 1464 +++++
.../hw/char/pl011/vendor/quote/src/runtime.rs | 530 ++
.../hw/char/pl011/vendor/quote/src/spanned.rs | 50 +
.../char/pl011/vendor/quote/src/to_tokens.rs | 209 +
.../pl011/vendor/quote/tests/compiletest.rs | 7 +
rust/hw/char/pl011/vendor/quote/tests/test.rs | 549 ++
.../ui/does-not-have-iter-interpolated-dup.rs | 9 +
...does-not-have-iter-interpolated-dup.stderr | 11 +
.../ui/does-not-have-iter-interpolated.rs | 9 +
.../ui/does-not-have-iter-interpolated.stderr | 11 +
.../tests/ui/does-not-have-iter-separated.rs | 5 +
.../ui/does-not-have-iter-separated.stderr | 10 +
.../quote/tests/ui/does-not-have-iter.rs | 5 +
.../quote/tests/ui/does-not-have-iter.stderr | 10 +
.../vendor/quote/tests/ui/not-quotable.rs | 7 +
.../vendor/quote/tests/ui/not-quotable.stderr | 20 +
.../vendor/quote/tests/ui/not-repeatable.rs | 8 +
.../quote/tests/ui/not-repeatable.stderr | 34 +
.../vendor/quote/tests/ui/wrong-type-span.rs | 7 +
.../quote/tests/ui/wrong-type-span.stderr | 10 +
.../pl011/vendor/syn/.cargo-checksum.json | 1 +
rust/hw/char/pl011/vendor/syn/Cargo.toml | 260 +
rust/hw/char/pl011/vendor/syn/LICENSE-APACHE | 176 +
rust/hw/char/pl011/vendor/syn/LICENSE-MIT | 23 +
rust/hw/char/pl011/vendor/syn/README.md | 284 +
rust/hw/char/pl011/vendor/syn/benches/file.rs | 57 +
rust/hw/char/pl011/vendor/syn/benches/rust.rs | 182 +
rust/hw/char/pl011/vendor/syn/meson.build | 24 +
rust/hw/char/pl011/vendor/syn/src/attr.rs | 793 +++
rust/hw/char/pl011/vendor/syn/src/bigint.rs | 66 +
rust/hw/char/pl011/vendor/syn/src/buffer.rs | 434 ++
rust/hw/char/pl011/vendor/syn/src/classify.rs | 377 ++
.../pl011/vendor/syn/src/custom_keyword.rs | 260 +
.../vendor/syn/src/custom_punctuation.rs | 304 +
rust/hw/char/pl011/vendor/syn/src/data.rs | 423 ++
rust/hw/char/pl011/vendor/syn/src/derive.rs | 259 +
.../char/pl011/vendor/syn/src/discouraged.rs | 225 +
rust/hw/char/pl011/vendor/syn/src/drops.rs | 58 +
rust/hw/char/pl011/vendor/syn/src/error.rs | 467 ++
rust/hw/char/pl011/vendor/syn/src/export.rs | 73 +
rust/hw/char/pl011/vendor/syn/src/expr.rs | 3960 +++++++++++++
rust/hw/char/pl011/vendor/syn/src/ext.rs | 136 +
rust/hw/char/pl011/vendor/syn/src/file.rs | 130 +
rust/hw/char/pl011/vendor/syn/src/fixup.rs | 218 +
.../hw/char/pl011/vendor/syn/src/gen/clone.rs | 2209 +++++++
.../hw/char/pl011/vendor/syn/src/gen/debug.rs | 3160 ++++++++++
rust/hw/char/pl011/vendor/syn/src/gen/eq.rs | 2242 +++++++
rust/hw/char/pl011/vendor/syn/src/gen/fold.rs | 3779 ++++++++++++
rust/hw/char/pl011/vendor/syn/src/gen/hash.rs | 2807 +++++++++
.../hw/char/pl011/vendor/syn/src/gen/visit.rs | 3858 ++++++++++++
.../pl011/vendor/syn/src/gen/visit_mut.rs | 3855 ++++++++++++
rust/hw/char/pl011/vendor/syn/src/generics.rs | 1286 ++++
rust/hw/char/pl011/vendor/syn/src/group.rs | 291 +
rust/hw/char/pl011/vendor/syn/src/ident.rs | 108 +
rust/hw/char/pl011/vendor/syn/src/item.rs | 3441 +++++++++++
rust/hw/char/pl011/vendor/syn/src/lib.rs | 1019 ++++
rust/hw/char/pl011/vendor/syn/src/lifetime.rs | 156 +
rust/hw/char/pl011/vendor/syn/src/lit.rs | 1830 ++++++
.../hw/char/pl011/vendor/syn/src/lookahead.rs | 169 +
rust/hw/char/pl011/vendor/syn/src/mac.rs | 223 +
rust/hw/char/pl011/vendor/syn/src/macros.rs | 166 +
rust/hw/char/pl011/vendor/syn/src/meta.rs | 427 ++
rust/hw/char/pl011/vendor/syn/src/op.rs | 219 +
rust/hw/char/pl011/vendor/syn/src/parse.rs | 1397 +++++
.../pl011/vendor/syn/src/parse_macro_input.rs | 128 +
.../char/pl011/vendor/syn/src/parse_quote.rs | 210 +
rust/hw/char/pl011/vendor/syn/src/pat.rs | 953 +++
rust/hw/char/pl011/vendor/syn/src/path.rs | 886 +++
.../char/pl011/vendor/syn/src/precedence.rs | 163 +
rust/hw/char/pl011/vendor/syn/src/print.rs | 16 +
.../char/pl011/vendor/syn/src/punctuated.rs | 1132 ++++
.../char/pl011/vendor/syn/src/restriction.rs | 176 +
rust/hw/char/pl011/vendor/syn/src/sealed.rs | 4 +
rust/hw/char/pl011/vendor/syn/src/span.rs | 63 +
rust/hw/char/pl011/vendor/syn/src/spanned.rs | 118 +
rust/hw/char/pl011/vendor/syn/src/stmt.rs | 481 ++
rust/hw/char/pl011/vendor/syn/src/thread.rs | 60 +
rust/hw/char/pl011/vendor/syn/src/token.rs | 1138 ++++
rust/hw/char/pl011/vendor/syn/src/tt.rs | 107 +
rust/hw/char/pl011/vendor/syn/src/ty.rs | 1216 ++++
rust/hw/char/pl011/vendor/syn/src/verbatim.rs | 33 +
.../char/pl011/vendor/syn/src/whitespace.rs | 65 +
.../char/pl011/vendor/syn/tests/common/eq.rs | 900 +++
.../char/pl011/vendor/syn/tests/common/mod.rs | 28 +
.../pl011/vendor/syn/tests/common/parse.rs | 49 +
.../char/pl011/vendor/syn/tests/debug/gen.rs | 5163 +++++++++++++++++
.../char/pl011/vendor/syn/tests/debug/mod.rs | 147 +
.../char/pl011/vendor/syn/tests/macros/mod.rs | 93 +
.../char/pl011/vendor/syn/tests/regression.rs | 5 +
.../vendor/syn/tests/regression/issue1108.rs | 5 +
.../vendor/syn/tests/regression/issue1235.rs | 32 +
.../char/pl011/vendor/syn/tests/repo/mod.rs | 461 ++
.../pl011/vendor/syn/tests/repo/progress.rs | 37 +
.../pl011/vendor/syn/tests/test_asyncness.rs | 43 +
.../pl011/vendor/syn/tests/test_attribute.rs | 225 +
.../vendor/syn/tests/test_derive_input.rs | 781 +++
.../char/pl011/vendor/syn/tests/test_expr.rs | 692 +++
.../pl011/vendor/syn/tests/test_generics.rs | 282 +
.../pl011/vendor/syn/tests/test_grouping.rs | 53 +
.../char/pl011/vendor/syn/tests/test_ident.rs | 87 +
.../char/pl011/vendor/syn/tests/test_item.rs | 332 ++
.../pl011/vendor/syn/tests/test_iterators.rs | 70 +
.../char/pl011/vendor/syn/tests/test_lit.rs | 331 ++
.../char/pl011/vendor/syn/tests/test_meta.rs | 154 +
.../vendor/syn/tests/test_parse_buffer.rs | 103 +
.../vendor/syn/tests/test_parse_quote.rs | 166 +
.../vendor/syn/tests/test_parse_stream.rs | 187 +
.../char/pl011/vendor/syn/tests/test_pat.rs | 152 +
.../char/pl011/vendor/syn/tests/test_path.rs | 130 +
.../pl011/vendor/syn/tests/test_precedence.rs | 537 ++
.../pl011/vendor/syn/tests/test_receiver.rs | 321 +
.../pl011/vendor/syn/tests/test_round_trip.rs | 253 +
.../pl011/vendor/syn/tests/test_shebang.rs | 67 +
.../char/pl011/vendor/syn/tests/test_size.rs | 36 +
.../char/pl011/vendor/syn/tests/test_stmt.rs | 322 +
.../vendor/syn/tests/test_token_trees.rs | 32 +
.../hw/char/pl011/vendor/syn/tests/test_ty.rs | 397 ++
.../pl011/vendor/syn/tests/test_visibility.rs | 185 +
.../char/pl011/vendor/syn/tests/zzz_stable.rs | 33 +
.../vendor/unicode-ident/.cargo-checksum.json | 1 +
.../pl011/vendor/unicode-ident/Cargo.toml | 63 +
.../pl011/vendor/unicode-ident/LICENSE-APACHE | 176 +
.../pl011/vendor/unicode-ident/LICENSE-MIT | 23 +
.../vendor/unicode-ident/LICENSE-UNICODE | 46 +
.../char/pl011/vendor/unicode-ident/README.md | 283 +
.../pl011/vendor/unicode-ident/benches/xid.rs | 124 +
.../pl011/vendor/unicode-ident/meson.build | 14 +
.../pl011/vendor/unicode-ident/src/lib.rs | 269 +
.../pl011/vendor/unicode-ident/src/tables.rs | 651 +++
.../vendor/unicode-ident/tests/compare.rs | 67 +
.../vendor/unicode-ident/tests/fst/mod.rs | 11 +
.../unicode-ident/tests/fst/xid_continue.fst | Bin 0 -> 73249 bytes
.../unicode-ident/tests/fst/xid_start.fst | Bin 0 -> 65487 bytes
.../vendor/unicode-ident/tests/roaring/mod.rs | 21 +
.../vendor/unicode-ident/tests/static_size.rs | 95 +
.../vendor/unicode-ident/tests/tables/mod.rs | 7 +
.../unicode-ident/tests/tables/tables.rs | 347 ++
.../vendor/unicode-ident/tests/trie/mod.rs | 7 +
.../vendor/unicode-ident/tests/trie/trie.rs | 445 ++
.../vendor/version_check/.cargo-checksum.json | 1 +
.../pl011/vendor/version_check/Cargo.toml | 24 +
.../pl011/vendor/version_check/LICENSE-APACHE | 201 +
.../pl011/vendor/version_check/LICENSE-MIT | 19 +
.../char/pl011/vendor/version_check/README.md | 80 +
.../pl011/vendor/version_check/meson.build | 14 +
.../pl011/vendor/version_check/src/channel.rs | 193 +
.../pl011/vendor/version_check/src/date.rs | 203 +
.../pl011/vendor/version_check/src/lib.rs | 493 ++
.../pl011/vendor/version_check/src/version.rs | 316 +
365 files changed, 108770 insertions(+)
create mode 100644 rust/hw/char/pl011/vendor/either/README.rst
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/CHANGELOG.md
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/LICENSE.txt
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/README.md
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/meson.build
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/arbitrary-int/tests/tests.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/README.md
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/meson.build
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/bitsize.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/bitsize/split.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal.rs
create mode 100644
rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal/struct_gen.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/debug_bits.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/default_bits.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/fmt_bits.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/from_bits.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/shared.rs
create mode 100644
rust/hw/char/pl011/vendor/bilge-impl/src/shared/discriminant_assigner.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/shared/fallback.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/shared/util.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge-impl/src/try_from_bits.rs
create mode 100644 rust/hw/char/pl011/vendor/bilge/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/bilge/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/bilge/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/bilge/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/bilge/README.md
create mode 100644 rust/hw/char/pl011/vendor/bilge/meson.build
create mode 100644 rust/hw/char/pl011/vendor/bilge/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/either/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/either/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/either/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/either/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/either/README-crates.io.md
create mode 100644 rust/hw/char/pl011/vendor/either/meson.build
create mode 100644 rust/hw/char/pl011/vendor/either/src/into_either.rs
create mode 100644 rust/hw/char/pl011/vendor/either/src/iterator.rs
create mode 100644 rust/hw/char/pl011/vendor/either/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/either/src/serde_untagged.rs
create mode 100644
rust/hw/char/pl011/vendor/either/src/serde_untagged_optional.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/itertools/CHANGELOG.md
create mode 100644 rust/hw/char/pl011/vendor/itertools/Cargo.lock
create mode 100644 rust/hw/char/pl011/vendor/itertools/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/itertools/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/itertools/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/itertools/README.md
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/bench1.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/combinations.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/benches/combinations_with_replacement.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/extra/mod.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/benches/extra/zipslices.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/benches/fold_specialization.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/powerset.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/tree_fold1.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/benches/tuple_combinations.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/benches/tuples.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/examples/iris.data
create mode 100644 rust/hw/char/pl011/vendor/itertools/examples/iris.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/meson.build
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/adaptors/coalesce.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/adaptors/map.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/adaptors/mod.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/src/adaptors/multi_product.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/combinations.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/src/combinations_with_replacement.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/concat_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/cons_tuples_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/diff.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/duplicates_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/either_or_both.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/exactly_one_err.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/extrema_set.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/flatten_ok.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/format.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/free.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/group_map.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/groupbylazy.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/grouping_map.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/impl_macros.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/intersperse.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/k_smallest.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/kmerge_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/lazy_buffer.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/merge_join.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/minmax.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/multipeek_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/pad_tail.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/peek_nth.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/src/peeking_take_while.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/permutations.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/powerset.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/src/process_results_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/put_back_n_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/rciter_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/repeatn.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/size_hint.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/sources.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/src/take_while_inclusive.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/tee.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/tuple_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/unique_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/unziptuple.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/with_position.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/zip_eq_impl.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/zip_longest.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/src/ziptuple.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/tests/adaptors_no_collect.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/flatten_ok.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/macros_hygiene.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/merge_join.rs
create mode 100644
rust/hw/char/pl011/vendor/itertools/tests/peeking_take_while.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/quick.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/specializations.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/test_core.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/test_std.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/tuples.rs
create mode 100644 rust/hw/char/pl011/vendor/itertools/tests/zip.rs
create mode 100644 rust/hw/char/pl011/vendor/meson.build
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error-attr/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/Cargo.toml
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/build.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/meson.build
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error-attr/src/parse.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error-attr/src/settings.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/CHANGELOG.md
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/README.md
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/build.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/meson.build
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/src/diagnostic.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/src/dummy.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/src/imp/delegate.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/src/imp/fallback.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/src/macros.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/src/sealed.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/macro-errors.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/tests/ok.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/runtime-errors.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.stderr
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.stderr
create mode 100644 rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.stderr
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.rs
create mode 100644
rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.stderr
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/README.md
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/build.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/build/probe.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/meson.build
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/rust-toolchain.toml
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/detection.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/extra.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/fallback.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/location.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/marker.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/parse.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/rcvec.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/src/wrapper.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/comments.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/features.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/marker.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/test.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/test_fmt.rs
create mode 100644 rust/hw/char/pl011/vendor/proc-macro2/tests/test_size.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/quote/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/quote/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/quote/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/quote/README.md
create mode 100644 rust/hw/char/pl011/vendor/quote/meson.build
create mode 100644 rust/hw/char/pl011/vendor/quote/rust-toolchain.toml
create mode 100644 rust/hw/char/pl011/vendor/quote/src/ext.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/format.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/ident_fragment.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/runtime.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/spanned.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/src/to_tokens.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/compiletest.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/test.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.stderr
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.stderr
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.rs
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.stderr
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.stderr
create mode 100644 rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.rs
create mode 100644
rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.stderr
create mode 100644 rust/hw/char/pl011/vendor/syn/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/syn/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/syn/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/syn/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/syn/README.md
create mode 100644 rust/hw/char/pl011/vendor/syn/benches/file.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/benches/rust.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/meson.build
create mode 100644 rust/hw/char/pl011/vendor/syn/src/attr.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/bigint.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/buffer.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/classify.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/custom_keyword.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/custom_punctuation.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/data.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/derive.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/discouraged.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/drops.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/error.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/export.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/expr.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/ext.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/file.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/fixup.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/clone.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/debug.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/eq.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/fold.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/hash.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/visit.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/gen/visit_mut.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/generics.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/group.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/ident.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/item.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/lifetime.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/lit.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/lookahead.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/mac.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/macros.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/meta.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/op.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/parse.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/parse_macro_input.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/parse_quote.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/pat.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/path.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/precedence.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/print.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/punctuated.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/restriction.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/sealed.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/span.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/spanned.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/stmt.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/thread.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/token.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/tt.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/ty.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/verbatim.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/src/whitespace.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/common/eq.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/common/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/common/parse.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/debug/gen.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/debug/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/macros/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/regression.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/regression/issue1108.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/regression/issue1235.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/repo/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/repo/progress.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_asyncness.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_attribute.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_derive_input.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_expr.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_generics.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_grouping.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_ident.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_item.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_iterators.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_lit.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_meta.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_parse_buffer.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_parse_quote.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_parse_stream.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_pat.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_path.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_precedence.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_receiver.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_round_trip.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_shebang.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_size.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_stmt.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_token_trees.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_ty.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/test_visibility.rs
create mode 100644 rust/hw/char/pl011/vendor/syn/tests/zzz_stable.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/LICENSE-UNICODE
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/README.md
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/benches/xid.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/meson.build
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/src/tables.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/compare.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/fst/mod.rs
create mode 100644
rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_continue.fst
create mode 100644
rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_start.fst
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/roaring/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/static_size.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/tables/mod.rs
create mode 100644
rust/hw/char/pl011/vendor/unicode-ident/tests/tables/tables.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/trie/mod.rs
create mode 100644 rust/hw/char/pl011/vendor/unicode-ident/tests/trie/trie.rs
create mode 100644 rust/hw/char/pl011/vendor/version_check/.cargo-checksum.json
create mode 100644 rust/hw/char/pl011/vendor/version_check/Cargo.toml
create mode 100644 rust/hw/char/pl011/vendor/version_check/LICENSE-APACHE
create mode 100644 rust/hw/char/pl011/vendor/version_check/LICENSE-MIT
create mode 100644 rust/hw/char/pl011/vendor/version_check/README.md
create mode 100644 rust/hw/char/pl011/vendor/version_check/meson.build
create mode 100644 rust/hw/char/pl011/vendor/version_check/src/channel.rs
create mode 100644 rust/hw/char/pl011/vendor/version_check/src/date.rs
create mode 100644 rust/hw/char/pl011/vendor/version_check/src/lib.rs
create mode 100644 rust/hw/char/pl011/vendor/version_check/src/version.rs
diff --git a/rust/hw/char/pl011/vendor/either/README.rst
b/rust/hw/char/pl011/vendor/either/README.rst
new file mode 100644
index 0000000000..659257fdcd
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/README.rst
@@ -0,0 +1,185 @@
+
+Either
+======
+
+The enum ``Either`` with variants ``Left`` and ``Right`` and trait
+implementations including Iterator, Read, Write.
+
+Either has methods that are similar to Option and Result.
+
+Includes convenience macros ``try_left!()`` and ``try_right!()`` to use for
+short-circuiting logic.
+
+Please read the `API documentation here`__
+
+__ https://docs.rs/either/
+
+|build_status|_ |crates|_
+
+.. |build_status| image::
https://github.com/rayon-rs/either/workflows/CI/badge.svg?branch=main
+.. _build_status: https://github.com/rayon-rs/either/actions
+
+.. |crates| image:: https://img.shields.io/crates/v/either.svg
+.. _crates: https://crates.io/crates/either
+
+How to use with cargo::
+
+ [dependencies]
+ either = "1.12"
+
+
+Recent Changes
+--------------
+
+- 1.12.0
+
+ - **MSRV**: ``either`` now requires Rust 1.37 or later.
+
+ - Specialize ``nth_back`` for ``Either`` and ``IterEither``, by @cuviper
(#106)
+
+- 1.11.0
+
+ - Add new trait ``IntoEither`` that is useful to convert to ``Either`` in
method chains,
+ by @SFM61319 (#101)
+
+- 1.10.0
+
+ - Add new methods ``.factor_iter()``, ``.factor_iter_mut()``, and
``.factor_into_iter()``
+ that return ``Either`` items, plus ``.iter()`` and ``.iter_mut()`` to
convert to direct
+ referene iterators; by @aj-bagwell and @cuviper (#91)
+
+- 1.9.0
+
+ - Add new methods ``.map_either()`` and ``.map_either_with()``, by
@nasadorian (#82)
+
+- 1.8.1
+
+ - Clarified that the multiple licenses are combined with OR.
+
+- 1.8.0
+
+ - **MSRV**: ``either`` now requires Rust 1.36 or later.
+
+ - Add new methods ``.as_pin_ref()`` and ``.as_pin_mut()`` to project a
+ pinned ``Either`` as inner ``Pin`` variants, by @cuviper (#77)
+
+ - Implement the ``Future`` trait, by @cuviper (#77)
+
+ - Specialize more methods of the ``io`` traits, by @Kixunil and @cuviper
(#75)
+
+- 1.7.0
+
+ - **MSRV**: ``either`` now requires Rust 1.31 or later.
+
+ - Export the macro ``for_both!``, by @thomaseizinger (#58)
+
+ - Implement the ``io::Seek`` trait, by @Kerollmops (#60)
+
+ - Add new method ``.either_into()`` for ``Into`` conversion, by
@TonalidadeHidrica (#63)
+
+ - Add new methods ``.factor_ok()``, ``.factor_err()``, and
``.factor_none()``,
+ by @zachs18 (#67)
+
+ - Specialize ``source`` in the ``Error`` implementation, by @thomaseizinger
(#69)
+
+ - Specialize more iterator methods and implement the ``FusedIterator`` trait,
+ by @Ten0 (#66) and @cuviper (#71)
+
+ - Specialize ``Clone::clone_from``, by @cuviper (#72)
+
+- 1.6.1
+
+ - Add new methods ``.expect_left()``, ``.unwrap_left()``,
+ and equivalents on the right, by @spenserblack (#51)
+
+- 1.6.0
+
+ - Add new modules ``serde_untagged`` and ``serde_untagged_optional`` to
customize
+ how ``Either`` fields are serialized in other types, by @MikailBag (#49)
+
+- 1.5.3
+
+ - Add new method ``.map()`` for ``Either<T, T>`` by @nvzqz (#40).
+
+- 1.5.2
+
+ - Add new methods ``.left_or()``, ``.left_or_default()``,
``.left_or_else()``,
+ and equivalents on the right, by @DCjanus (#36)
+
+- 1.5.1
+
+ - Add ``AsRef`` and ``AsMut`` implementations for common unsized types:
+ ``str``, ``[T]``, ``CStr``, ``OsStr``, and ``Path``, by @mexus (#29)
+
+- 1.5.0
+
+ - Add new methods ``.factor_first()``, ``.factor_second()`` and
``.into_inner()``
+ by @mathstuf (#19)
+
+- 1.4.0
+
+ - Add inherent method ``.into_iter()`` by @cuviper (#12)
+
+- 1.3.0
+
+ - Add opt-in serde support by @hcpl
+
+- 1.2.0
+
+ - Add method ``.either_with()`` by @Twey (#13)
+
+- 1.1.0
+
+ - Add methods ``left_and_then``, ``right_and_then`` by @rampantmonkey
+ - Include license files in the repository and released crate
+
+- 1.0.3
+
+ - Add crate categories
+
+- 1.0.2
+
+ - Forward more ``Iterator`` methods
+ - Implement ``Extend`` for ``Either<L, R>`` if ``L, R`` do.
+
+- 1.0.1
+
+ - Fix ``Iterator`` impl for ``Either`` to forward ``.fold()``.
+
+- 1.0.0
+
+ - Add default crate feature ``use_std`` so that you can opt out of linking to
+ std.
+
+- 0.1.7
+
+ - Add methods ``.map_left()``, ``.map_right()`` and ``.either()``.
+ - Add more documentation
+
+- 0.1.3
+
+ - Implement Display, Error
+
+- 0.1.2
+
+ - Add macros ``try_left!`` and ``try_right!``.
+
+- 0.1.1
+
+ - Implement Deref, DerefMut
+
+- 0.1.0
+
+ - Initial release
+ - Support Iterator, Read, Write
+
+License
+-------
+
+Dual-licensed to be compatible with the Rust project.
+
+Licensed under the Apache License, Version 2.0
+https://www.apache.org/licenses/LICENSE-2.0 or the MIT license
+https://opensource.org/licenses/MIT, at your
+option. This file may not be copied, modified, or distributed
+except according to those terms.
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/arbitrary-int/.cargo-checksum.json
new file mode 100644
index 0000000000..39c2d4d0e0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"d34e39d5bd6b0ba740cae9b7afe9fdf73ae1bedc080de338d238ef577cffe963","Cargo.toml":"0a410a8ab28d72b00c04feeb289be7b725347732443cee6e1a91fb3f193e907b","LICENSE.txt":"6982f0cd109b04512cbb5f0e0f0ef82154f33a57d2127afe058ecc72039ab88c","README.md":"c3ee6e3ec5365bd9f6daddacf2b49204d7d777d09afe896b57451bb0365bea21","src/lib.rs":"9bda88688cfebe72e386d9fbb0bd4570a7631ccc20eef58a0e14b6aadd4724ea","tests/tests.rs":"116002067e9b697d4f22b5f28f23363ade2ed9dd6b59661388272f7c6d4b20f1"},"package":"c84fc003e338a6f69fbd4f7fe9f92b535ff13e9af8997f3b14b6ddff8b1df46d"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/CHANGELOG.md
b/rust/hw/char/pl011/vendor/arbitrary-int/CHANGELOG.md
new file mode 100644
index 0000000000..a31fa94c96
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/CHANGELOG.md
@@ -0,0 +1,47 @@
+# Changelog
+
+## arbitrary-int 1.2.7
+
+### Added
+
+- Support `Step` so that arbitrary-int can be used in a range expression, e.g.
`for n in u3::MIN..=u3::MAX { println!("{n}") }`. Note this trait is currently
unstable, and so is only usable in nightly. Enable this feature with
`step_trait`.
+- Support formatting via [defmt](https://crates.io/crates/defmt). Enable the
option `defmt` feature
+- Support serializing and deserializing via
[serde](https://crates.io/crates/serde). Enable the option `serde` feature
+- Support `Mul`, `MulAssign`, `Div`, `DivAssign`
+- The following new methods were implemented to make arbitrary ints feel more
like built-in types:
+ * `wrapping_add`, `wrapping_sub`, `wrapping_mul`, `wrapping_div`,
`wrapping_shl`, `wrapping_shr`
+ * `saturating_add`, `saturating_sub`, `saturating_mul`, `saturating_div`,
`saturating_pow`
+ * `checked_add`, `checked_sub`, `checked_mul`, `checked_div`,
`checked_shl`, `checked_shr`
+ * `overflowing_add`, `overflowing_sub`, `overflowing_mul`,
`overflowing_div`, `overflowing_shl`, `overflowing_shr`
+
+### Changed
+- In debug builds, `<<` (`Shl`, `ShlAssign`) and `>>` (`Shr`, `ShrAssign`) now
bounds-check the shift amount using the same semantics as built-in shifts. For
example, shifting a u5 by 5 or more bits will now panic as expected.
+
+## arbitrary-int 1.2.6
+
+### Added
+
+- Support `LowerHex`, `UpperHex`, `Octal`, `Binary` so that arbitrary-int can
be printed via e.g. `format!("{:x}", u4::new(12))`
+- Support `Hash` so that arbitrary-int can be used in hash tables
+
+### Changed
+
+- As support for `[const_trait]` has recently been removed from structs like
`From<T>` in upstream Rust, opting-in to the `nightly` feature no longer
enables this behavior as that would break the build. To continue using this
feature with older compiler versions, use `const_convert_and_const_trait_impl`
instead.
+
+## arbitrary-int 1.2.5
+
+### Added
+
+- Types that can be expressed as full bytes (e.g. u24, u48) have the following
new methods:
+ * `swap_bytes()`
+ * `to_le_bytes()`
+ * `to_be_bytes()`
+ * `to_ne_bytes()`
+ * `to_be()`
+ * `to_le()`
+
+### Changed
+
+- `#[inline]` is specified in more places
+
+### Fixed
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/Cargo.toml
b/rust/hw/char/pl011/vendor/arbitrary-int/Cargo.toml
new file mode 100644
index 0000000000..810071d602
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/Cargo.toml
@@ -0,0 +1,54 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "arbitrary-int"
+version = "1.2.7"
+authors = ["Daniel Lehmann <danlehmannmuc@gmail.com>"]
+description = "Modern and lightweight implementation of u2, u3, u4, ..., u127."
+readme = "README.md"
+keywords = [
+ "integer",
+ "unaligned",
+ "misaligned",
+]
+categories = [
+ "embedded",
+ "no-std",
+ "data-structures",
+]
+license = "MIT"
+repository = "https://github.com/danlehmann/arbitrary-int"
+
+[dependencies.defmt]
+version = "0.3.5"
+optional = true
+
+[dependencies.num-traits]
+version = "0.2.17"
+optional = true
+default-features = false
+
+[dependencies.serde]
+version = "1.0"
+optional = true
+default-features = false
+
+[dev-dependencies.serde_test]
+version = "1.0"
+
+[features]
+const_convert_and_const_trait_impl = []
+defmt = ["dep:defmt"]
+serde = ["dep:serde"]
+std = []
+step_trait = []
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/LICENSE.txt
b/rust/hw/char/pl011/vendor/arbitrary-int/LICENSE.txt
new file mode 100644
index 0000000000..eb8c29c461
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 Daniel Lehmann
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/README.md
b/rust/hw/char/pl011/vendor/arbitrary-int/README.md
new file mode 100644
index 0000000000..d34676fd93
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/README.md
@@ -0,0 +1,72 @@
+# arbitrary-int
+
+This crate implements arbitrary numbers for Rust. Once included, you can use
types like `u5` or `u120`.
+
+## Why yet another arbitrary integer crate?
+
+There are quite a few similar crates to this one (the most famous being
https://crates.io/crates/ux). After trying out a
+few of them I just realized that they are all very heavy: They create a ton of
classes and take seconds to compile.
+
+This crate is designed to be very short, using const generics. Instead of
introducing ~123 new structs, this crates only
+introduces 5 (one for `u8`, `u16`, `u32`, `u64`, `u128`) and uses const
generics for the specific bit depth.
+It does introduce 123 new type aliases (`u1`, `u2`, etc.), but these don't
stress the compiler nearly as much.
+
+Additionally, most of its functions are const, so that they can be used in
const contexts.
+
+## How to use
+
+Unlike primitive data types like `u32`, there is no intrinsic syntax (Rust
does not allow that). An instance is created as
+follows:
+
+```rust
+let value9 = u9::new(30);
+```
+
+This will create a value with 9 bits. If the value passed into `new()` doesn't
fit, a panic! will be raised. This means
+that a function that accepts a `u9` as an argument can be certain that its
contents are never larger than an `u9`.
+
+Standard operators are all overloaded, so it is possible to perform
calculations using this type. Note that addition
+and subtraction (at least in debug mode) performs bounds check. If this is
undesired, see chapter num-traits below.
+
+Internally, `u9` will hold its data in an `u16`. It is possible to get this
value:
+
+```rust
+let value9 = u9::new(30).value();
+```
+
+## Underlying data type
+
+This crate defines types `u1`, `u2`, .., `u126`, `u127` (skipping the normal
`u8`, `u16`, `u32`, `u64`, `u128`). Each of those types holds
+its actual data in the next larger data type (e.g. a `u14` internally has an
`u16`, a `u120` internally has an `u128`). However,
+`uXX` are just type aliases; it is also possible to use the actual underlying
generic struct:
+
+```rust
+let a = UInt::<u8, 5>::new(0b10101));
+let b = UInt::<u32, 5>::new(0b10101));
+```
+
+In this example, `a` will have 5 bits and be represented by a `u8`. This is
identical to `u5`. `b` however is represented by a
+`u32`, so it is a different type from `u5`.
+
+## Extract
+
+A common source for arbitrary integers is by extracting them from bitfields.
For example, if data contained 32 bits and
+we want to extract bits `4..=9`, we could perform the following:
+
+```rust
+let a = u6::new(((data >> 4) & 0b111111) as u8);
+```
+
+This is a pretty common operation, but it's easy to get it wrong: The number
of 1s and `u6` have to match. Also, `new()`
+will internally perform a bounds-check, which can panic. Thirdly, a type-cast
is often needed.
+To make this easier, various extract methods exist that handle shifting and
masking, for example:
+
+```rust
+let a = u6::extract_u32(data, 4);
+let b = u12::extract_u128(data2, 63);
+```
+
+## num-traits
+
+By default, arbitrary-int doesn't require any other traits. It has optional
support for num-traits however. It
+implements `WrappingAdd`, `WrappingSub`, which (unlike the regular addition
and subtraction) don't perform bounds checks.
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/meson.build
b/rust/hw/char/pl011/vendor/arbitrary-int/meson.build
new file mode 100644
index 0000000000..e02139a5bc
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/meson.build
@@ -0,0 +1,14 @@
+_arbitrary_int_rs = static_library(
+ 'arbitrary_int',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ ],
+ dependencies: [],
+)
+
+dep_arbitrary_int = declare_dependency(
+ link_with: _arbitrary_int_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/src/lib.rs
b/rust/hw/char/pl011/vendor/arbitrary-int/src/lib.rs
new file mode 100644
index 0000000000..4c2b9c3997
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/src/lib.rs
@@ -0,0 +1,1489 @@
+#![cfg_attr(not(feature = "std"), no_std)]
+#![cfg_attr(
+ feature = "const_convert_and_const_trait_impl",
+ feature(const_convert, const_trait_impl)
+)]
+#![cfg_attr(feature = "step_trait", feature(step_trait))]
+
+use core::fmt::{Binary, Debug, Display, Formatter, LowerHex, Octal, UpperHex};
+use core::hash::{Hash, Hasher};
+#[cfg(feature = "step_trait")]
+use core::iter::Step;
+#[cfg(feature = "num-traits")]
+use core::num::Wrapping;
+use core::ops::{
+ Add, AddAssign, BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor,
BitXorAssign, Div, DivAssign,
+ Mul, MulAssign, Not, Shl, ShlAssign, Shr, ShrAssign, Sub, SubAssign,
+};
+#[cfg(feature = "serde")]
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TryNewError;
+
+impl Display for TryNewError {
+ fn fmt(&self, f: &mut Formatter) -> core::fmt::Result {
+ write!(f, "Value too large to fit within this integer type")
+ }
+}
+
+#[cfg_attr(feature = "const_convert_and_const_trait_impl", const_trait)]
+pub trait Number: Sized {
+ type UnderlyingType: Debug
+ + From<u8>
+ + TryFrom<u16>
+ + TryFrom<u32>
+ + TryFrom<u64>
+ + TryFrom<u128>;
+
+ /// Number of bits that can fit in this type
+ const BITS: usize;
+
+ /// Minimum value that can be represented by this type
+ const MIN: Self;
+
+ /// Maximum value that can be represented by this type
+ const MAX: Self;
+
+ fn new(value: Self::UnderlyingType) -> Self;
+
+ fn try_new(value: Self::UnderlyingType) -> Result<Self, TryNewError>;
+
+ fn value(self) -> Self::UnderlyingType;
+}
+
+#[cfg(feature = "const_convert_and_const_trait_impl")]
+macro_rules! impl_number_native {
+ ($( $type:ty ),+) => {
+ $(
+ impl const Number for $type {
+ type UnderlyingType = $type;
+ const BITS: usize = Self::BITS as usize;
+ const MIN: Self = Self::MIN;
+ const MAX: Self = Self::MAX;
+
+ #[inline]
+ fn new(value: Self::UnderlyingType) -> Self { value }
+
+ #[inline]
+ fn try_new(value: Self::UnderlyingType) -> Result<Self,
TryNewError> { Ok(value) }
+
+ #[inline]
+ fn value(self) -> Self::UnderlyingType { self }
+ }
+ )+
+ };
+}
+
+#[cfg(not(feature = "const_convert_and_const_trait_impl"))]
+macro_rules! impl_number_native {
+ ($( $type:ty ),+) => {
+ $(
+ impl Number for $type {
+ type UnderlyingType = $type;
+ const BITS: usize = Self::BITS as usize;
+ const MIN: Self = Self::MIN;
+ const MAX: Self = Self::MAX;
+
+ #[inline]
+ fn new(value: Self::UnderlyingType) -> Self { value }
+
+ #[inline]
+ fn try_new(value: Self::UnderlyingType) -> Result<Self,
TryNewError> { Ok(value) }
+
+ #[inline]
+ fn value(self) -> Self::UnderlyingType { self }
+ }
+ )+
+ };
+}
+
+impl_number_native!(u8, u16, u32, u64, u128);
+
+struct CompileTimeAssert<const A: usize, const B: usize> {}
+
+impl<const A: usize, const B: usize> CompileTimeAssert<A, B> {
+ pub const SMALLER_OR_EQUAL: () = {
+ assert!(A <= B);
+ };
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, Default, Ord, PartialOrd)]
+pub struct UInt<T, const BITS: usize> {
+ value: T,
+}
+
+impl<T: Copy, const BITS: usize> UInt<T, BITS> {
+ pub const BITS: usize = BITS;
+
+ /// Returns the type as a fundamental data type
+ #[inline]
+ pub const fn value(self) -> T {
+ self.value
+ }
+
+ /// Initializes a new value without checking the bounds
+ ///
+ /// # Safety
+ /// Must only be called with a value less than or equal to
[Self::MAX](Self::MAX) value.
+ #[inline]
+ pub const unsafe fn new_unchecked(value: T) -> Self {
+ Self { value }
+ }
+}
+
+impl<T, const BITS: usize> UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy,
+{
+ pub const MASK: T = Self::MAX.value;
+}
+
+// Next are specific implementations for u8, u16, u32, u64 and u128. A couple
notes:
+// - The existence of MAX also serves as a neat bounds-check for BITS: If BITS
is too large,
+// the subtraction overflows which will fail to compile. This simplifies
things a lot.
+// However, that only works if every constructor also uses MAX somehow
(doing let _ = MAX is enough)
+
+#[cfg(feature = "const_convert_and_const_trait_impl")]
+macro_rules! uint_impl_num {
+ ($($type:ident),+) => {
+ $(
+ impl<const BITS: usize> const Number for UInt<$type, BITS> {
+ type UnderlyingType = $type;
+
+ const BITS: usize = BITS;
+
+ const MIN: Self = Self { value: 0 };
+
+ // The existence of MAX also serves as a bounds check: If
NUM_BITS is > available bits,
+ // we will get a compiler error right here
+ const MAX: Self = Self { value: (<$type as Number>::MAX >>
(<$type as Number>::BITS - Self::BITS)) };
+
+ #[inline]
+ fn try_new(value: Self::UnderlyingType) -> Result<Self,
TryNewError> {
+ if value <= Self::MAX.value {
+ Ok(Self { value })
+ } else {
+ Err(TryNewError{})
+ }
+ }
+
+ #[inline]
+ fn new(value: $type) -> Self {
+ assert!(value <= Self::MAX.value);
+
+ Self { value }
+ }
+
+ #[inline]
+ fn value(self) -> $type {
+ self.value
+ }
+ }
+ )+
+ };
+}
+
+#[cfg(not(feature = "const_convert_and_const_trait_impl"))]
+macro_rules! uint_impl_num {
+ ($($type:ident),+) => {
+ $(
+ impl<const BITS: usize> Number for UInt<$type, BITS> {
+ type UnderlyingType = $type;
+
+ const BITS: usize = BITS;
+
+ const MIN: Self = Self { value: 0 };
+
+ // The existence of MAX also serves as a bounds check: If
NUM_BITS is > available bits,
+ // we will get a compiler error right here
+ const MAX: Self = Self { value: (<$type as Number>::MAX >>
(<$type as Number>::BITS - Self::BITS)) };
+
+ #[inline]
+ fn try_new(value: Self::UnderlyingType) -> Result<Self,
TryNewError> {
+ if value <= Self::MAX.value {
+ Ok(Self { value })
+ } else {
+ Err(TryNewError{})
+ }
+ }
+
+ #[inline]
+ fn new(value: $type) -> Self {
+ assert!(value <= Self::MAX.value);
+
+ Self { value }
+ }
+
+ #[inline]
+ fn value(self) -> $type {
+ self.value
+ }
+ }
+ )+
+ };
+}
+
+uint_impl_num!(u8, u16, u32, u64, u128);
+
+macro_rules! uint_impl {
+ ($($type:ident),+) => {
+ $(
+ impl<const BITS: usize> UInt<$type, BITS> {
+ /// Creates an instance. Panics if the given value is outside
of the valid range
+ #[inline]
+ pub const fn new(value: $type) -> Self {
+ assert!(value <= Self::MAX.value);
+
+ Self { value }
+ }
+
+ /// Creates an instance or an error if the given value is
outside of the valid range
+ #[inline]
+ pub const fn try_new(value: $type) -> Result<Self,
TryNewError> {
+ if value <= Self::MAX.value {
+ Ok(Self { value })
+ } else {
+ Err(TryNewError {})
+ }
+ }
+
+ #[deprecated(note = "Use one of the specific functions like
extract_u32")]
+ pub const fn extract(value: $type, start_bit: usize) -> Self {
+ assert!(start_bit + BITS <= $type::BITS as usize);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: (value >> start_bit) & Self::MAX.value,
+ }
+ }
+
+ /// Extracts bits from a given value. The extract is
equivalent to: `new((value >> start_bit) & MASK)`
+ /// Unlike new, extract doesn't perform range-checking so it
is slightly more efficient.
+ /// panics if start_bit+<number of bits> doesn't fit within an
u8, e.g. u5::extract_u8(8, 4);
+ #[inline]
+ pub const fn extract_u8(value: u8, start_bit: usize) -> Self {
+ assert!(start_bit + BITS <= 8);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: ((value >> start_bit) as $type) &
Self::MAX.value,
+ }
+ }
+
+ /// Extracts bits from a given value. The extract is
equivalent to: `new((value >> start_bit) & MASK)`
+ /// Unlike new, extract doesn't perform range-checking so it
is slightly more efficient
+ /// panics if start_bit+<number of bits> doesn't fit within a
u16, e.g. u15::extract_u16(8, 2);
+ #[inline]
+ pub const fn extract_u16(value: u16, start_bit: usize) -> Self
{
+ assert!(start_bit + BITS <= 16);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: ((value >> start_bit) as $type) &
Self::MAX.value,
+ }
+ }
+
+ /// Extracts bits from a given value. The extract is
equivalent to: `new((value >> start_bit) & MASK)`
+ /// Unlike new, extract doesn't perform range-checking so it
is slightly more efficient
+ /// panics if start_bit+<number of bits> doesn't fit within a
u32, e.g. u30::extract_u32(8, 4);
+ #[inline]
+ pub const fn extract_u32(value: u32, start_bit: usize) -> Self
{
+ assert!(start_bit + BITS <= 32);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: ((value >> start_bit) as $type) &
Self::MAX.value,
+ }
+ }
+
+ /// Extracts bits from a given value. The extract is
equivalent to: `new((value >> start_bit) & MASK)`
+ /// Unlike new, extract doesn't perform range-checking so it
is slightly more efficient
+ /// panics if start_bit+<number of bits> doesn't fit within a
u64, e.g. u60::extract_u64(8, 5);
+ #[inline]
+ pub const fn extract_u64(value: u64, start_bit: usize) -> Self
{
+ assert!(start_bit + BITS <= 64);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: ((value >> start_bit) as $type) &
Self::MAX.value,
+ }
+ }
+
+ /// Extracts bits from a given value. The extract is
equivalent to: `new((value >> start_bit) & MASK)`
+ /// Unlike new, extract doesn't perform range-checking so it
is slightly more efficient
+ /// panics if start_bit+<number of bits> doesn't fit within a
u128, e.g. u120::extract_u64(8, 9);
+ #[inline]
+ pub const fn extract_u128(value: u128, start_bit: usize) ->
Self {
+ assert!(start_bit + BITS <= 128);
+ // Query MAX to ensure that we get a compiler error if the
current definition is bogus (e.g. <u8, 9>)
+ let _ = Self::MAX;
+
+ Self {
+ value: ((value >> start_bit) as $type) &
Self::MAX.value,
+ }
+ }
+
+ /// Returns a UInt with a wider bit depth but with the same
base data type
+ pub const fn widen<const BITS_RESULT: usize>(
+ self,
+ ) -> UInt<$type, BITS_RESULT> {
+ let _ = CompileTimeAssert::<BITS,
BITS_RESULT>::SMALLER_OR_EQUAL;
+ // Query MAX of the result to ensure we get a compiler
error if the current definition is bogus (e.g. <u8, 9>)
+ let _ = UInt::<$type, BITS_RESULT>::MAX;
+ UInt::<$type, BITS_RESULT> { value: self.value }
+ }
+
+ pub const fn wrapping_add(self, rhs: Self) -> Self {
+ let sum = self.value.wrapping_add(rhs.value);
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+
+ pub const fn wrapping_sub(self, rhs: Self) -> Self {
+ let sum = self.value.wrapping_sub(rhs.value);
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+
+ pub const fn wrapping_mul(self, rhs: Self) -> Self {
+ let sum = self.value.wrapping_mul(rhs.value);
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+
+ pub const fn wrapping_div(self, rhs: Self) -> Self {
+ let sum = self.value.wrapping_div(rhs.value);
+ Self {
+ // No need to mask here - divisions always produce a
result that is <= self
+ value: sum,
+ }
+ }
+
+ pub const fn wrapping_shl(self, rhs: u32) -> Self {
+ // modulo is expensive on some platforms, so only do it
when necessary
+ let shift_amount = if rhs >= (BITS as u32) {
+ rhs % (BITS as u32)
+ } else {
+ rhs
+ };
+
+ Self {
+ // We could use wrapping_shl here to make Debug builds
slightly smaller;
+ // the downside would be that on weird CPUs that don't
do wrapping_shl by
+ // default release builds would get slightly worse.
Using << should give
+ // good release performance everywere
+ value: (self.value << shift_amount) & Self::MASK,
+ }
+ }
+
+ pub const fn wrapping_shr(self, rhs: u32) -> Self {
+ // modulo is expensive on some platforms, so only do it
when necessary
+ let shift_amount = if rhs >= (BITS as u32) {
+ rhs % (BITS as u32)
+ } else {
+ rhs
+ };
+
+ Self {
+ value: (self.value >> shift_amount),
+ }
+ }
+
+ pub const fn saturating_add(self, rhs: Self) -> Self {
+ let saturated = if core::mem::size_of::<$type>() << 3 ==
BITS {
+ // We are something like a UInt::<u8; 8>. We can
fallback to the base implementation
+ self.value.saturating_add(rhs.value)
+ } else {
+ // We're dealing with fewer bits than the underlying
type (e.g. u7).
+ // That means the addition can never overflow the
underlying type
+ let sum = self.value.wrapping_add(rhs.value);
+ let max = Self::MAX.value();
+ if sum > max { max } else { sum }
+ };
+ Self {
+ value: saturated,
+ }
+ }
+
+ pub const fn saturating_sub(self, rhs: Self) -> Self {
+ // For unsigned numbers, the only difference is when we
reach 0 - which is the same
+ // no matter the data size
+ Self {
+ value: self.value.saturating_sub(rhs.value),
+ }
+ }
+
+ pub const fn saturating_mul(self, rhs: Self) -> Self {
+ let product = if BITS << 1 <=
(core::mem::size_of::<$type>() << 3) {
+ // We have half the bits (e.g. u4 * u4) of the base
type, so we can't overflow the base type
+ // wrapping_mul likely provides the best performance
on all cpus
+ self.value.wrapping_mul(rhs.value)
+ } else {
+ // We have more than half the bits (e.g. u6 * u6)
+ self.value.saturating_mul(rhs.value)
+ };
+
+ let max = Self::MAX.value();
+ let saturated = if product > max { max } else { product };
+ Self {
+ value: saturated,
+ }
+ }
+
+ pub const fn saturating_div(self, rhs: Self) -> Self {
+ // When dividing unsigned numbers, we never need to
saturate.
+ // Divison by zero in saturating_div throws an exception
(in debug and release mode),
+ // so no need to do anything special there either
+ Self {
+ value: self.value.saturating_div(rhs.value),
+ }
+ }
+
+ pub const fn saturating_pow(self, exp: u32) -> Self {
+ // It might be possible to handwrite this to be slightly
faster as both
+ // saturating_pow has to do a bounds-check and then we do
second one
+ let powed = self.value.saturating_pow(exp);
+ let max = Self::MAX.value();
+ let saturated = if powed > max { max } else { powed };
+ Self {
+ value: saturated,
+ }
+ }
+
+ pub const fn checked_add(self, rhs: Self) -> Option<Self> {
+ if core::mem::size_of::<$type>() << 3 == BITS {
+ // We are something like a UInt::<u8; 8>. We can
fallback to the base implementation
+ match self.value.checked_add(rhs.value) {
+ Some(value) => Some(Self { value }),
+ None => None
+ }
+ } else {
+ // We're dealing with fewer bits than the underlying
type (e.g. u7).
+ // That means the addition can never overflow the
underlying type
+ let sum = self.value.wrapping_add(rhs.value);
+ if sum > Self::MAX.value() { None } else { Some(Self {
value: sum })}
+ }
+ }
+
+ pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
+ match self.value.checked_sub(rhs.value) {
+ Some(value) => Some(Self { value }),
+ None => None
+ }
+ }
+
+ pub const fn checked_mul(self, rhs: Self) -> Option<Self> {
+ let product = if BITS << 1 <=
(core::mem::size_of::<$type>() << 3) {
+ // We have half the bits (e.g. u4 * u4) of the base
type, so we can't overflow the base type
+ // wrapping_mul likely provides the best performance
on all cpus
+ Some(self.value.wrapping_mul(rhs.value))
+ } else {
+ // We have more than half the bits (e.g. u6 * u6)
+ self.value.checked_mul(rhs.value)
+ };
+
+ match product {
+ Some(value) => {
+ if value > Self::MAX.value() {
+ None
+ } else {
+ Some(Self {value})
+ }
+ }
+ None => None
+ }
+ }
+
+ pub const fn checked_div(self, rhs: Self) -> Option<Self> {
+ match self.value.checked_div(rhs.value) {
+ Some(value) => Some(Self { value }),
+ None => None
+ }
+ }
+
+ pub const fn checked_shl(self, rhs: u32) -> Option<Self> {
+ if rhs >= (BITS as u32) {
+ None
+ } else {
+ Some(Self {
+ value: (self.value << rhs) & Self::MASK,
+ })
+ }
+ }
+
+ pub const fn checked_shr(self, rhs: u32) -> Option<Self> {
+ if rhs >= (BITS as u32) {
+ None
+ } else {
+ Some(Self {
+ value: (self.value >> rhs),
+ })
+ }
+ }
+
+ pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) {
+ let (value, overflow) = if core::mem::size_of::<$type>()
<< 3 == BITS {
+ // We are something like a UInt::<u8; 8>. We can
fallback to the base implementation
+ self.value.overflowing_add(rhs.value)
+ } else {
+ // We're dealing with fewer bits than the underlying
type (e.g. u7).
+ // That means the addition can never overflow the
underlying type
+ let sum = self.value.wrapping_add(rhs.value);
+ let masked = sum & Self::MASK;
+ (masked, masked != sum)
+ };
+ (Self { value }, overflow)
+ }
+
+ pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) {
+ // For unsigned numbers, the only difference is when we
reach 0 - which is the same
+ // no matter the data size. In the case of overflow we do
have the mask the result though
+ let (value, overflow) =
self.value.overflowing_sub(rhs.value);
+ (Self { value: value & Self::MASK }, overflow)
+ }
+
+ pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) {
+ let (wrapping_product, overflow) = if BITS << 1 <=
(core::mem::size_of::<$type>() << 3) {
+ // We have half the bits (e.g. u4 * u4) of the base
type, so we can't overflow the base type
+ // wrapping_mul likely provides the best performance
on all cpus
+ self.value.overflowing_mul(rhs.value)
+ } else {
+ // We have more than half the bits (e.g. u6 * u6)
+ self.value.overflowing_mul(rhs.value)
+ };
+
+ let masked = wrapping_product & Self::MASK;
+ let overflow2 = masked != wrapping_product;
+ (Self { value: masked }, overflow || overflow2 )
+ }
+
+ pub const fn overflowing_div(self, rhs: Self) -> (Self, bool) {
+ let value = self.value.wrapping_div(rhs.value);
+ (Self { value }, false )
+ }
+
+ pub const fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
+ if rhs >= (BITS as u32) {
+ (Self { value: self.value << (rhs % (BITS as u32)) },
true)
+ } else {
+ (Self { value: self.value << rhs }, false)
+ }
+ }
+
+ pub const fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
+ if rhs >= (BITS as u32) {
+ (Self { value: self.value >> (rhs % (BITS as u32)) },
true)
+ } else {
+ (Self { value: self.value >> rhs }, false)
+ }
+ }
+
+ /// Reverses the order of bits in the integer. The least
significant bit becomes the most significant bit, second least-significant bit
becomes second most-significant bit, etc.
+ pub const fn reverse_bits(self) -> Self {
+ let shift_right = (core::mem::size_of::<$type>() << 3) -
BITS;
+ Self { value: self.value.reverse_bits() >> shift_right }
+ }
+
+ /// Returns the number of ones in the binary representation of
self.
+ pub const fn count_ones(self) -> u32 {
+ // The upper bits are zero, so we can ignore them
+ self.value.count_ones()
+ }
+
+ /// Returns the number of zeros in the binary representation
of self.
+ pub const fn count_zeros(self) -> u32 {
+ // The upper bits are zero, so we can have to subtract
them from the result
+ let filler_bits = ((core::mem::size_of::<$type>() << 3) -
BITS) as u32;
+ self.value.count_zeros() - filler_bits
+ }
+
+ /// Returns the number of leading ones in the binary
representation of self.
+ pub const fn leading_ones(self) -> u32 {
+ let shift = ((core::mem::size_of::<$type>() << 3) - BITS)
as u32;
+ (self.value << shift).leading_ones()
+ }
+
+ /// Returns the number of leading zeros in the binary
representation of self.
+ pub const fn leading_zeros(self) -> u32 {
+ let shift = ((core::mem::size_of::<$type>() << 3) - BITS)
as u32;
+ (self.value << shift).leading_zeros()
+ }
+
+ /// Returns the number of leading ones in the binary
representation of self.
+ pub const fn trailing_ones(self) -> u32 {
+ self.value.trailing_ones()
+ }
+
+ /// Returns the number of leading zeros in the binary
representation of self.
+ pub const fn trailing_zeros(self) -> u32 {
+ self.value.trailing_zeros()
+ }
+
+ /// Shifts the bits to the left by a specified amount, n,
wrapping the truncated bits to the end of the resulting integer.
+ /// Please note this isn't the same operation as the <<
shifting operator!
+ pub const fn rotate_left(self, n: u32) -> Self {
+ let b = BITS as u32;
+ let n = if n >= b { n % b } else { n };
+
+ let moved_bits = (self.value << n) & Self::MASK;
+ let truncated_bits = self.value >> (b - n);
+ Self { value: moved_bits | truncated_bits }
+ }
+
+ /// Shifts the bits to the right by a specified amount, n,
wrapping the truncated bits to the beginning of the resulting integer.
+ /// Please note this isn't the same operation as the >>
shifting operator!
+ pub const fn rotate_right(self, n: u32) -> Self {
+ let b = BITS as u32;
+ let n = if n >= b { n % b } else { n };
+
+ let moved_bits = self.value >> n;
+ let truncated_bits = (self.value << (b - n)) & Self::MASK;
+ Self { value: moved_bits | truncated_bits }
+ }
+ }
+ )+
+ };
+}
+
+uint_impl!(u8, u16, u32, u64, u128);
+
+// Arithmetic implementations
+impl<T, const BITS: usize> Add for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq
+ + Copy
+ + BitAnd<T, Output = T>
+ + Not<Output = T>
+ + Add<T, Output = T>
+ + Sub<T, Output = T>
+ + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn add(self, rhs: Self) -> Self::Output {
+ let sum = self.value + rhs.value;
+ #[cfg(debug_assertions)]
+ if (sum & !Self::MASK) != T::from(0) {
+ panic!("attempt to add with overflow");
+ }
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+}
+
+impl<T, const BITS: usize> AddAssign for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq
+ + Eq
+ + Not<Output = T>
+ + Copy
+ + AddAssign<T>
+ + BitAnd<T, Output = T>
+ + BitAndAssign<T>
+ + From<u8>,
+{
+ fn add_assign(&mut self, rhs: Self) {
+ self.value += rhs.value;
+ #[cfg(debug_assertions)]
+ if (self.value & !Self::MASK) != T::from(0) {
+ panic!("attempt to add with overflow");
+ }
+ self.value &= Self::MASK;
+ }
+}
+
+impl<T, const BITS: usize> Sub for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy + BitAnd<T, Output = T> + Sub<T, Output = T>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn sub(self, rhs: Self) -> Self::Output {
+ // No need for extra overflow checking as the regular minus operator
already handles it for us
+ Self {
+ value: (self.value - rhs.value) & Self::MASK,
+ }
+ }
+}
+
+impl<T, const BITS: usize> SubAssign for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy + SubAssign<T> + BitAnd<T, Output = T> + BitAndAssign<T> + Sub<T,
Output = T>,
+{
+ fn sub_assign(&mut self, rhs: Self) {
+ // No need for extra overflow checking as the regular minus operator
already handles it for us
+ self.value -= rhs.value;
+ self.value &= Self::MASK;
+ }
+}
+
+impl<T, const BITS: usize> Mul for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq + Copy + BitAnd<T, Output = T> + Not<Output = T> + Mul<T,
Output = T> + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn mul(self, rhs: Self) -> Self::Output {
+ // In debug builds, this will perform two bounds checks: Initial
multiplication, followed by
+ // our bounds check. As wrapping_mul isn't available as a trait bound
(in regular Rust), this
+ // is unavoidable
+ let product = self.value * rhs.value;
+ #[cfg(debug_assertions)]
+ if (product & !Self::MASK) != T::from(0) {
+ panic!("attempt to multiply with overflow");
+ }
+ Self {
+ value: product & Self::MASK,
+ }
+ }
+}
+
+impl<T, const BITS: usize> MulAssign for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq
+ + Eq
+ + Not<Output = T>
+ + Copy
+ + MulAssign<T>
+ + BitAnd<T, Output = T>
+ + BitAndAssign<T>
+ + From<u8>,
+{
+ fn mul_assign(&mut self, rhs: Self) {
+ self.value *= rhs.value;
+ #[cfg(debug_assertions)]
+ if (self.value & !Self::MASK) != T::from(0) {
+ panic!("attempt to multiply with overflow");
+ }
+ self.value &= Self::MASK;
+ }
+}
+
+impl<T, const BITS: usize> Div for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq + Div<T, Output = T>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn div(self, rhs: Self) -> Self::Output {
+ // Integer division can only make the value smaller. And as the result
is same type as
+ // Self, there's no need to range-check or mask
+ Self {
+ value: self.value / rhs.value,
+ }
+ }
+}
+
+impl<T, const BITS: usize> DivAssign for UInt<T, BITS>
+where
+ Self: Number,
+ T: PartialEq + DivAssign<T>,
+{
+ fn div_assign(&mut self, rhs: Self) {
+ self.value /= rhs.value;
+ }
+}
+
+impl<T, const BITS: usize> BitAnd for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy
+ + BitAnd<T, Output = T>
+ + Sub<T, Output = T>
+ + Shl<usize, Output = T>
+ + Shr<usize, Output = T>
+ + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn bitand(self, rhs: Self) -> Self::Output {
+ Self {
+ value: self.value & rhs.value,
+ }
+ }
+}
+
+impl<T, const BITS: usize> BitAndAssign for UInt<T, BITS>
+where
+ T: Copy + BitAndAssign<T> + Sub<T, Output = T> + Shl<usize, Output = T> +
From<u8>,
+{
+ fn bitand_assign(&mut self, rhs: Self) {
+ self.value &= rhs.value;
+ }
+}
+
+impl<T, const BITS: usize> BitOr for UInt<T, BITS>
+where
+ T: Copy + BitOr<T, Output = T> + Sub<T, Output = T> + Shl<usize, Output =
T> + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn bitor(self, rhs: Self) -> Self::Output {
+ Self {
+ value: self.value | rhs.value,
+ }
+ }
+}
+
+impl<T, const BITS: usize> BitOrAssign for UInt<T, BITS>
+where
+ T: Copy + BitOrAssign<T> + Sub<T, Output = T> + Shl<usize, Output = T> +
From<u8>,
+{
+ fn bitor_assign(&mut self, rhs: Self) {
+ self.value |= rhs.value;
+ }
+}
+
+impl<T, const BITS: usize> BitXor for UInt<T, BITS>
+where
+ T: Copy + BitXor<T, Output = T> + Sub<T, Output = T> + Shl<usize, Output =
T> + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn bitxor(self, rhs: Self) -> Self::Output {
+ Self {
+ value: self.value ^ rhs.value,
+ }
+ }
+}
+
+impl<T, const BITS: usize> BitXorAssign for UInt<T, BITS>
+where
+ T: Copy + BitXorAssign<T> + Sub<T, Output = T> + Shl<usize, Output = T> +
From<u8>,
+{
+ fn bitxor_assign(&mut self, rhs: Self) {
+ self.value ^= rhs.value;
+ }
+}
+
+impl<T, const BITS: usize> Not for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy
+ + BitAnd<T, Output = T>
+ + BitXor<T, Output = T>
+ + Sub<T, Output = T>
+ + Shl<usize, Output = T>
+ + Shr<usize, Output = T>
+ + From<u8>,
+{
+ type Output = UInt<T, BITS>;
+
+ fn not(self) -> Self::Output {
+ Self {
+ value: self.value ^ Self::MASK,
+ }
+ }
+}
+
+impl<T, TSHIFTBITS, const BITS: usize> Shl<TSHIFTBITS> for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy
+ + BitAnd<T, Output = T>
+ + Shl<TSHIFTBITS, Output = T>
+ + Sub<T, Output = T>
+ + Shl<usize, Output = T>
+ + Shr<usize, Output = T>
+ + From<u8>,
+ TSHIFTBITS: TryInto<usize> + Copy,
+{
+ type Output = UInt<T, BITS>;
+
+ fn shl(self, rhs: TSHIFTBITS) -> Self::Output {
+ // With debug assertions, the << and >> operators throw an exception
if the shift amount
+ // is larger than the number of bits (in which case the result would
always be 0)
+ #[cfg(debug_assertions)]
+ if rhs.try_into().unwrap_or(usize::MAX) >= BITS {
+ panic!("attempt to shift left with overflow")
+ }
+
+ Self {
+ value: (self.value << rhs) & Self::MASK,
+ }
+ }
+}
+
+impl<T, TSHIFTBITS, const BITS: usize> ShlAssign<TSHIFTBITS> for UInt<T, BITS>
+where
+ Self: Number,
+ T: Copy
+ + BitAnd<T, Output = T>
+ + BitAndAssign<T>
+ + ShlAssign<TSHIFTBITS>
+ + Sub<T, Output = T>
+ + Shr<usize, Output = T>
+ + Shl<usize, Output = T>
+ + From<u8>,
+ TSHIFTBITS: TryInto<usize> + Copy,
+{
+ fn shl_assign(&mut self, rhs: TSHIFTBITS) {
+ // With debug assertions, the << and >> operators throw an exception
if the shift amount
+ // is larger than the number of bits (in which case the result would
always be 0)
+ #[cfg(debug_assertions)]
+ if rhs.try_into().unwrap_or(usize::MAX) >= BITS {
+ panic!("attempt to shift left with overflow")
+ }
+ self.value <<= rhs;
+ self.value &= Self::MASK;
+ }
+}
+
+impl<T, TSHIFTBITS, const BITS: usize> Shr<TSHIFTBITS> for UInt<T, BITS>
+where
+ T: Copy + Shr<TSHIFTBITS, Output = T> + Sub<T, Output = T> + Shl<usize,
Output = T> + From<u8>,
+ TSHIFTBITS: TryInto<usize> + Copy,
+{
+ type Output = UInt<T, BITS>;
+
+ fn shr(self, rhs: TSHIFTBITS) -> Self::Output {
+ // With debug assertions, the << and >> operators throw an exception
if the shift amount
+ // is larger than the number of bits (in which case the result would
always be 0)
+ #[cfg(debug_assertions)]
+ if rhs.try_into().unwrap_or(usize::MAX) >= BITS {
+ panic!("attempt to shift left with overflow")
+ }
+ Self {
+ value: self.value >> rhs,
+ }
+ }
+}
+
+impl<T, TSHIFTBITS, const BITS: usize> ShrAssign<TSHIFTBITS> for UInt<T, BITS>
+where
+ T: Copy + ShrAssign<TSHIFTBITS> + Sub<T, Output = T> + Shl<usize, Output =
T> + From<u8>,
+ TSHIFTBITS: TryInto<usize> + Copy,
+{
+ fn shr_assign(&mut self, rhs: TSHIFTBITS) {
+ // With debug assertions, the << and >> operators throw an exception
if the shift amount
+ // is larger than the number of bits (in which case the result would
always be 0)
+ #[cfg(debug_assertions)]
+ if rhs.try_into().unwrap_or(usize::MAX) >= BITS {
+ panic!("attempt to shift left with overflow")
+ }
+ self.value >>= rhs;
+ }
+}
+
+impl<T, const BITS: usize> Display for UInt<T, BITS>
+where
+ T: Display,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+impl<T, const BITS: usize> Debug for UInt<T, BITS>
+where
+ T: Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+impl<T, const BITS: usize> LowerHex for UInt<T, BITS>
+where
+ T: LowerHex,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+impl<T, const BITS: usize> UpperHex for UInt<T, BITS>
+where
+ T: UpperHex,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+impl<T, const BITS: usize> Octal for UInt<T, BITS>
+where
+ T: Octal,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+impl<T, const BITS: usize> Binary for UInt<T, BITS>
+where
+ T: Binary,
+{
+ #[inline]
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ self.value.fmt(f)
+ }
+}
+
+#[cfg(feature = "defmt")]
+impl<T, const BITS: usize> defmt::Format for UInt<T, BITS>
+where
+ T: defmt::Format,
+{
+ #[inline]
+ fn format(&self, f: defmt::Formatter) {
+ self.value.format(f)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<T, const BITS: usize> Serialize for UInt<T, BITS>
+where
+ T: Serialize,
+{
+ fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok,
S::Error> {
+ self.value.serialize(serializer)
+ }
+}
+
+// Serde's invalid_value error
(https://rust-lang.github.io/hashbrown/serde/de/trait.Error.html#method.invalid_value)
+// takes an Unexpected
(https://rust-lang.github.io/hashbrown/serde/de/enum.Unexpected.html) which
only accepts a 64 bit
+// unsigned integer. This is a problem for us because we want to support 128
bit unsigned integers. To work around this
+// we define our own error type using the UInt's underlying type which
implements Display and then use
+// serde::de::Error::custom to create an error with our custom type.
+#[cfg(feature = "serde")]
+struct InvalidUIntValueError<T: Display> {
+ value: T,
+ max: T,
+}
+
+#[cfg(feature = "serde")]
+impl<T: Display> Display for InvalidUIntValueError<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
+ write!(
+ f,
+ "invalid value: integer `{}`, expected a value between `0` and
`{}`",
+ self.value, self.max
+ )
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de, T: Display, const BITS: usize> Deserialize<'de> for UInt<T, BITS>
+where
+ Self: Number,
+ T: Deserialize<'de> + PartialOrd,
+{
+ fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self,
D::Error> {
+ let value = T::deserialize(deserializer)?;
+
+ if value <= Self::MAX.value {
+ Ok(Self { value })
+ } else {
+ Err(serde::de::Error::custom(InvalidUIntValueError {
+ value,
+ max: Self::MAX.value,
+ }))
+ }
+ }
+}
+
+impl<T, const BITS: usize> Hash for UInt<T, BITS>
+where
+ T: Hash,
+{
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.value.hash(state)
+ }
+}
+
+#[cfg(feature = "step_trait")]
+impl<T, const BITS: usize> Step for UInt<T, BITS>
+where
+ Self: Number<UnderlyingType = T>,
+ T: Copy + Step,
+{
+ #[inline]
+ fn steps_between(start: &Self, end: &Self) -> Option<usize> {
+ Step::steps_between(&start.value(), &end.value())
+ }
+
+ #[inline]
+ fn forward_checked(start: Self, count: usize) -> Option<Self> {
+ if let Some(res) = Step::forward_checked(start.value(), count) {
+ Self::try_new(res).ok()
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn backward_checked(start: Self, count: usize) -> Option<Self> {
+ if let Some(res) = Step::backward_checked(start.value(), count) {
+ Self::try_new(res).ok()
+ } else {
+ None
+ }
+ }
+}
+
+#[cfg(feature = "num-traits")]
+impl<T, const NUM_BITS: usize> num_traits::WrappingAdd for UInt<T, NUM_BITS>
+where
+ Self: Number,
+ T: PartialEq
+ + Eq
+ + Copy
+ + Add<T, Output = T>
+ + Sub<T, Output = T>
+ + BitAnd<T, Output = T>
+ + Not<Output = T>
+ + Shr<usize, Output = T>
+ + Shl<usize, Output = T>
+ + From<u8>,
+ Wrapping<T>: Add<Wrapping<T>, Output = Wrapping<T>>,
+{
+ #[inline]
+ fn wrapping_add(&self, rhs: &Self) -> Self {
+ let sum = (Wrapping(self.value) + Wrapping(rhs.value)).0;
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+}
+
+#[cfg(feature = "num-traits")]
+impl<T, const NUM_BITS: usize> num_traits::WrappingSub for UInt<T, NUM_BITS>
+where
+ Self: Number,
+ T: PartialEq
+ + Eq
+ + Copy
+ + Add<T, Output = T>
+ + Sub<T, Output = T>
+ + BitAnd<T, Output = T>
+ + Not<Output = T>
+ + Shr<usize, Output = T>
+ + Shl<usize, Output = T>
+ + From<u8>,
+ Wrapping<T>: Sub<Wrapping<T>, Output = Wrapping<T>>,
+{
+ #[inline]
+ fn wrapping_sub(&self, rhs: &Self) -> Self {
+ let sum = (Wrapping(self.value) - Wrapping(rhs.value)).0;
+ Self {
+ value: sum & Self::MASK,
+ }
+ }
+}
+
+#[cfg(feature = "num-traits")]
+impl<T, const NUM_BITS: usize> num_traits::bounds::Bounded for UInt<T,
NUM_BITS>
+where
+ Self: Number,
+{
+ fn min_value() -> Self {
+ Self::MIN
+ }
+
+ fn max_value() -> Self {
+ Self::MAX
+ }
+}
+
+macro_rules! bytes_operation_impl {
+ ($base_data_type:ty, $bits:expr, [$($indices:expr),+]) => {
+ impl UInt<$base_data_type, $bits>
+ {
+ /// Reverses the byte order of the integer.
+ #[inline]
+ pub const fn swap_bytes(&self) -> Self {
+ // swap_bytes() of the underlying type does most of the work.
Then, we just need to shift
+ const SHIFT_RIGHT: usize =
(core::mem::size_of::<$base_data_type>() << 3) - $bits;
+ Self { value: self.value.swap_bytes() >> SHIFT_RIGHT }
+ }
+
+ pub const fn to_le_bytes(&self) -> [u8; $bits >> 3] {
+ let v = self.value();
+
+ [ $( (v >> ($indices << 3)) as u8, )+ ]
+ }
+
+ pub const fn from_le_bytes(from: [u8; $bits >> 3]) -> Self {
+ let value = { 0 $( | (from[$indices] as $base_data_type) <<
($indices << 3))+ };
+ Self { value }
+ }
+
+ pub const fn to_be_bytes(&self) -> [u8; $bits >> 3] {
+ let v = self.value();
+
+ [ $( (v >> ($bits - 8 - ($indices << 3))) as u8, )+ ]
+ }
+
+ pub const fn from_be_bytes(from: [u8; $bits >> 3]) -> Self {
+ let value = { 0 $( | (from[$indices] as $base_data_type) <<
($bits - 8 - ($indices << 3)))+ };
+ Self { value }
+ }
+
+ #[inline]
+ pub const fn to_ne_bytes(&self) -> [u8; $bits >> 3] {
+ #[cfg(target_endian = "little")]
+ {
+ self.to_le_bytes()
+ }
+ #[cfg(target_endian = "big")]
+ {
+ self.to_be_bytes()
+ }
+ }
+
+ #[inline]
+ pub const fn from_ne_bytes(bytes: [u8; $bits >> 3]) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ Self::from_le_bytes(bytes)
+ }
+ #[cfg(target_endian = "big")]
+ {
+ Self::from_be_bytes(bytes)
+ }
+ }
+
+ #[inline]
+ pub const fn to_le(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self
+ }
+ #[cfg(target_endian = "big")]
+ {
+ self.swap_bytes()
+ }
+ }
+
+ #[inline]
+ pub const fn to_be(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self.swap_bytes()
+ }
+ #[cfg(target_endian = "big")]
+ {
+ self
+ }
+ }
+
+ #[inline]
+ pub const fn from_le(value: Self) -> Self {
+ value.to_le()
+ }
+
+ #[inline]
+ pub const fn from_be(value: Self) -> Self {
+ value.to_be()
+ }
+ }
+ };
+}
+
+bytes_operation_impl!(u32, 24, [0, 1, 2]);
+bytes_operation_impl!(u64, 24, [0, 1, 2]);
+bytes_operation_impl!(u128, 24, [0, 1, 2]);
+bytes_operation_impl!(u64, 40, [0, 1, 2, 3, 4]);
+bytes_operation_impl!(u128, 40, [0, 1, 2, 3, 4]);
+bytes_operation_impl!(u64, 48, [0, 1, 2, 3, 4, 5]);
+bytes_operation_impl!(u128, 48, [0, 1, 2, 3, 4, 5]);
+bytes_operation_impl!(u64, 56, [0, 1, 2, 3, 4, 5, 6]);
+bytes_operation_impl!(u128, 56, [0, 1, 2, 3, 4, 5, 6]);
+bytes_operation_impl!(u128, 72, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
+bytes_operation_impl!(u128, 80, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+bytes_operation_impl!(u128, 88, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+bytes_operation_impl!(u128, 96, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]);
+bytes_operation_impl!(u128, 104, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
+bytes_operation_impl!(u128, 112, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13]);
+bytes_operation_impl!(
+ u128,
+ 120,
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
+);
+
+// Conversions
+
+#[cfg(feature = "const_convert_and_const_trait_impl")]
+macro_rules! from_arbitrary_int_impl {
+ ($from:ty, [$($into:ty),+]) => {
+ $(
+ impl<const BITS: usize, const BITS_FROM: usize> const
From<UInt<$from, BITS_FROM>>
+ for UInt<$into, BITS>
+ {
+ #[inline]
+ fn from(item: UInt<$from, BITS_FROM>) -> Self {
+ let _ = CompileTimeAssert::<BITS_FROM,
BITS>::SMALLER_OR_EQUAL;
+ Self { value: item.value as $into }
+ }
+ }
+ )+
+ };
+}
+
+#[cfg(not(feature = "const_convert_and_const_trait_impl"))]
+macro_rules! from_arbitrary_int_impl {
+ ($from:ty, [$($into:ty),+]) => {
+ $(
+ impl<const BITS: usize, const BITS_FROM: usize> From<UInt<$from,
BITS_FROM>>
+ for UInt<$into, BITS>
+ {
+ #[inline]
+ fn from(item: UInt<$from, BITS_FROM>) -> Self {
+ let _ = CompileTimeAssert::<BITS_FROM,
BITS>::SMALLER_OR_EQUAL;
+ Self { value: item.value as $into }
+ }
+ }
+ )+
+ };
+}
+
+#[cfg(feature = "const_convert_and_const_trait_impl")]
+macro_rules! from_native_impl {
+ ($from:ty, [$($into:ty),+]) => {
+ $(
+ impl<const BITS: usize> const From<$from> for UInt<$into, BITS> {
+ #[inline]
+ fn from(from: $from) -> Self {
+ let _ = CompileTimeAssert::<{ <$from>::BITS as usize },
BITS>::SMALLER_OR_EQUAL;
+ Self { value: from as $into }
+ }
+ }
+
+ impl<const BITS: usize> const From<UInt<$from, BITS>> for $into {
+ #[inline]
+ fn from(from: UInt<$from, BITS>) -> Self {
+ let _ = CompileTimeAssert::<BITS, { <$into>::BITS as usize
}>::SMALLER_OR_EQUAL;
+ from.value as $into
+ }
+ }
+ )+
+ };
+}
+
+#[cfg(not(feature = "const_convert_and_const_trait_impl"))]
+macro_rules! from_native_impl {
+ ($from:ty, [$($into:ty),+]) => {
+ $(
+ impl<const BITS: usize> From<$from> for UInt<$into, BITS> {
+ #[inline]
+ fn from(from: $from) -> Self {
+ let _ = CompileTimeAssert::<{ <$from>::BITS as usize },
BITS>::SMALLER_OR_EQUAL;
+ Self { value: from as $into }
+ }
+ }
+
+ impl<const BITS: usize> From<UInt<$from, BITS>> for $into {
+ #[inline]
+ fn from(from: UInt<$from, BITS>) -> Self {
+ let _ = CompileTimeAssert::<BITS, { <$into>::BITS as usize
}>::SMALLER_OR_EQUAL;
+ from.value as $into
+ }
+ }
+ )+
+ };
+}
+
+from_arbitrary_int_impl!(u8, [u16, u32, u64, u128]);
+from_arbitrary_int_impl!(u16, [u8, u32, u64, u128]);
+from_arbitrary_int_impl!(u32, [u8, u16, u64, u128]);
+from_arbitrary_int_impl!(u64, [u8, u16, u32, u128]);
+from_arbitrary_int_impl!(u128, [u8, u32, u64, u16]);
+
+from_native_impl!(u8, [u8, u16, u32, u64, u128]);
+from_native_impl!(u16, [u8, u16, u32, u64, u128]);
+from_native_impl!(u32, [u8, u16, u32, u64, u128]);
+from_native_impl!(u64, [u8, u16, u32, u64, u128]);
+from_native_impl!(u128, [u8, u16, u32, u64, u128]);
+
+// Define type aliases like u1, u63 and u80 using the smallest possible
underlying data type.
+// These are for convenience only - UInt<u32, 15> is still legal
+macro_rules! type_alias {
+ ($storage:ty, $(($name:ident, $bits:expr)),+) => {
+ $( pub type $name = crate::UInt<$storage, $bits>; )+
+ }
+}
+
+pub use aliases::*;
+
+#[allow(non_camel_case_types)]
+#[rustfmt::skip]
+mod aliases {
+ type_alias!(u8, (u1, 1), (u2, 2), (u3, 3), (u4, 4), (u5, 5), (u6, 6), (u7,
7));
+ type_alias!(u16, (u9, 9), (u10, 10), (u11, 11), (u12, 12), (u13, 13),
(u14, 14), (u15, 15));
+ type_alias!(u32, (u17, 17), (u18, 18), (u19, 19), (u20, 20), (u21, 21),
(u22, 22), (u23, 23), (u24, 24), (u25, 25), (u26, 26), (u27, 27), (u28, 28),
(u29, 29), (u30, 30), (u31, 31));
+ type_alias!(u64, (u33, 33), (u34, 34), (u35, 35), (u36, 36), (u37, 37),
(u38, 38), (u39, 39), (u40, 40), (u41, 41), (u42, 42), (u43, 43), (u44, 44),
(u45, 45), (u46, 46), (u47, 47), (u48, 48), (u49, 49), (u50, 50), (u51, 51),
(u52, 52), (u53, 53), (u54, 54), (u55, 55), (u56, 56), (u57, 57), (u58, 58),
(u59, 59), (u60, 60), (u61, 61), (u62, 62), (u63, 63));
+ type_alias!(u128, (u65, 65), (u66, 66), (u67, 67), (u68, 68), (u69, 69),
(u70, 70), (u71, 71), (u72, 72), (u73, 73), (u74, 74), (u75, 75), (u76, 76),
(u77, 77), (u78, 78), (u79, 79), (u80, 80), (u81, 81), (u82, 82), (u83, 83),
(u84, 84), (u85, 85), (u86, 86), (u87, 87), (u88, 88), (u89, 89), (u90, 90),
(u91, 91), (u92, 92), (u93, 93), (u94, 94), (u95, 95), (u96, 96), (u97, 97),
(u98, 98), (u99, 99), (u100, 100), (u101, 101), (u102, 102), (u103, 103),
(u104, 104), (u105, 105), (u106, 106), (u107, 107), (u108, 108), (u109, 109),
(u110, 110), (u111, 111), (u112, 112), (u113, 113), (u114, 114), (u115, 115),
(u116, 116), (u117, 117), (u118, 118), (u119, 119), (u120, 120), (u121, 121),
(u122, 122), (u123, 123), (u124, 124), (u125, 125), (u126, 126), (u127, 127));
+}
+
+// We need to wrap this in a macro, currently:
https://github.com/rust-lang/rust/issues/67792#issuecomment-1130369066
+
+#[cfg(feature = "const_convert_and_const_trait_impl")]
+macro_rules! boolu1 {
+ () => {
+ impl const From<bool> for u1 {
+ #[inline]
+ fn from(value: bool) -> Self {
+ u1::new(value as u8)
+ }
+ }
+ impl const From<u1> for bool {
+ #[inline]
+ fn from(value: u1) -> Self {
+ match value.value() {
+ 0 => false,
+ 1 => true,
+ _ => panic!("arbitrary_int_type already validates that
this is unreachable"), //TODO: unreachable!() is not const yet
+ }
+ }
+ }
+ };
+}
+
+#[cfg(not(feature = "const_convert_and_const_trait_impl"))]
+macro_rules! boolu1 {
+ () => {
+ impl From<bool> for u1 {
+ #[inline]
+ fn from(value: bool) -> Self {
+ u1::new(value as u8)
+ }
+ }
+ impl From<u1> for bool {
+ #[inline]
+ fn from(value: u1) -> Self {
+ match value.value() {
+ 0 => false,
+ 1 => true,
+ _ => panic!("arbitrary_int_type already validates that
this is unreachable"), //TODO: unreachable!() is not const yet
+ }
+ }
+ }
+ };
+}
+
+boolu1!();
diff --git a/rust/hw/char/pl011/vendor/arbitrary-int/tests/tests.rs
b/rust/hw/char/pl011/vendor/arbitrary-int/tests/tests.rs
new file mode 100644
index 0000000000..e050f00c99
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/arbitrary-int/tests/tests.rs
@@ -0,0 +1,1913 @@
+#![cfg_attr(feature = "step_trait", feature(step_trait))]
+
+extern crate core;
+
+use arbitrary_int::*;
+use std::collections::HashMap;
+#[cfg(feature = "step_trait")]
+use std::iter::Step;
+
+#[test]
+fn constants() {
+ // Make a constant to ensure new().value() works in a const-context
+ const TEST_CONSTANT: u8 = u7::new(127).value();
+ assert_eq!(TEST_CONSTANT, 127u8);
+
+ // Same with widen()
+ const TEST_CONSTANT2: u7 = u6::new(63).widen();
+ assert_eq!(TEST_CONSTANT2, u7::new(63));
+
+ // Same with widen()
+ const TEST_CONSTANT3A: Result<u6, TryNewError> = u6::try_new(62);
+ assert_eq!(TEST_CONSTANT3A, Ok(u6::new(62)));
+ const TEST_CONSTANT3B: Result<u6, TryNewError> = u6::try_new(64);
+ assert!(TEST_CONSTANT3B.is_err());
+}
+
+#[test]
+fn create_simple() {
+ let value7 = u7::new(123);
+ let value8 = UInt::<u8, 8>::new(189);
+
+ let value13 = u13::new(123);
+ let value16 = UInt::<u16, 16>::new(60000);
+
+ let value23 = u23::new(123);
+ let value67 = u67::new(123);
+
+ assert_eq!(value7.value(), 123);
+ assert_eq!(value8.value(), 189);
+
+ assert_eq!(value13.value(), 123);
+ assert_eq!(value16.value(), 60000);
+
+ assert_eq!(value23.value(), 123);
+ assert_eq!(value67.value(), 123);
+}
+
+#[test]
+fn create_try_new() {
+ assert_eq!(u7::new(123).value(), 123);
+ assert_eq!(u7::try_new(190).expect_err("No error seen"), TryNewError {});
+}
+
+#[test]
+#[should_panic]
+fn create_panic_u7() {
+ u7::new(128);
+}
+
+#[test]
+#[should_panic]
+fn create_panic_u15() {
+ u15::new(32768);
+}
+
+#[test]
+#[should_panic]
+fn create_panic_u31() {
+ u31::new(2147483648);
+}
+
+#[test]
+#[should_panic]
+fn create_panic_u63() {
+ u63::new(0x8000_0000_0000_0000);
+}
+
+#[test]
+#[should_panic]
+fn create_panic_u127() {
+ u127::new(0x8000_0000_0000_0000_0000_0000_0000_0000);
+}
+
+#[test]
+fn add() {
+ assert_eq!(u7::new(10) + u7::new(20), u7::new(30));
+ assert_eq!(u7::new(100) + u7::new(27), u7::new(127));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn add_overflow() {
+ let _ = u7::new(127) + u7::new(3);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn add_no_overflow() {
+ let _ = u7::new(127) + u7::new(3);
+}
+
+#[cfg(feature = "num-traits")]
+#[test]
+fn num_traits_add_wrapping() {
+ let v1 = u7::new(120);
+ let v2 = u7::new(10);
+ let v3 = num_traits::WrappingAdd::wrapping_add(&v1, &v2);
+ assert_eq!(v3, u7::new(2));
+}
+
+#[cfg(feature = "num-traits")]
+#[test]
+fn num_traits_sub_wrapping() {
+ let v1 = u7::new(15);
+ let v2 = u7::new(20);
+ let v3 = num_traits::WrappingSub::wrapping_sub(&v1, &v2);
+ assert_eq!(v3, u7::new(123));
+}
+
+#[cfg(feature = "num-traits")]
+#[test]
+fn num_traits_bounded() {
+ use num_traits::bounds::Bounded;
+ assert_eq!(u7::MAX, u7::max_value());
+ assert_eq!(u119::MAX, u119::max_value());
+ assert_eq!(u7::new(0), u7::min_value());
+ assert_eq!(u119::new(0), u119::min_value());
+}
+
+#[test]
+fn addassign() {
+ let mut value = u9::new(500);
+ value += u9::new(11);
+ assert_eq!(value, u9::new(511));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn addassign_overflow() {
+ let mut value = u9::new(500);
+ value += u9::new(40);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn addassign_no_overflow() {
+ let mut value = u9::new(500);
+ value += u9::new(28);
+ assert_eq!(value, u9::new(16));
+}
+
+#[test]
+fn sub() {
+ assert_eq!(u7::new(22) - u7::new(10), u7::new(12));
+ assert_eq!(u7::new(127) - u7::new(127), u7::new(0));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn sub_overflow() {
+ let _ = u7::new(100) - u7::new(127);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn sub_no_overflow() {
+ let value = u7::new(100) - u7::new(127);
+ assert_eq!(value, u7::new(101));
+}
+
+#[test]
+fn subassign() {
+ let mut value = u9::new(500);
+ value -= u9::new(11);
+ assert_eq!(value, u9::new(489));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn subassign_overflow() {
+ let mut value = u9::new(30);
+ value -= u9::new(40);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn subassign_no_overflow() {
+ let mut value = u9::new(30);
+ value -= u9::new(40);
+ assert_eq!(value, u9::new(502));
+}
+
+#[test]
+fn mul() {
+ assert_eq!(u7::new(22) * u7::new(4), u7::new(88));
+ assert_eq!(u7::new(127) * u7::new(0), u7::new(0));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn mul_overflow() {
+ let _ = u7::new(100) * u7::new(2);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn mul_no_overflow() {
+ let result = u7::new(100) * u7::new(2);
+ assert_eq!(result, u7::new(72));
+}
+
+#[test]
+fn mulassign() {
+ let mut value = u9::new(240);
+ value *= u9::new(2);
+ assert_eq!(value, u9::new(480));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn mulassign_overflow() {
+ let mut value = u9::new(500);
+ value *= u9::new(2);
+}
+
+#[cfg(not(debug_assertions))]
+#[test]
+fn mulassign_no_overflow() {
+ let mut value = u9::new(500);
+ value *= u9::new(40);
+ assert_eq!(value, u9::new(32));
+}
+
+#[test]
+fn div() {
+ // div just forwards to the underlying type, so there isn't much to do
+ assert_eq!(u7::new(22) / u7::new(4), u7::new(5));
+ assert_eq!(u7::new(127) / u7::new(1), u7::new(127));
+ assert_eq!(u7::new(127) / u7::new(127), u7::new(1));
+}
+
+#[should_panic]
+#[test]
+fn div_by_zero() {
+ let _ = u7::new(22) / u7::new(0);
+}
+
+#[test]
+fn divassign() {
+ let mut value = u9::new(240);
+ value /= u9::new(2);
+ assert_eq!(value, u9::new(120));
+}
+
+#[should_panic]
+#[test]
+fn divassign_by_zero() {
+ let mut value = u9::new(240);
+ value /= u9::new(0);
+}
+
+#[test]
+fn bitand() {
+ assert_eq!(
+ u17::new(0b11001100) & u17::new(0b01101001),
+ u17::new(0b01001000)
+ );
+ assert_eq!(u17::new(0b11001100) & u17::new(0), u17::new(0));
+ assert_eq!(
+ u17::new(0b11001100) & u17::new(0x1_FFFF),
+ u17::new(0b11001100)
+ );
+}
+
+#[test]
+fn bitandassign() {
+ let mut value = u4::new(0b0101);
+ value &= u4::new(0b0110);
+ assert_eq!(value, u4::new(0b0100));
+}
+
+#[test]
+fn bitor() {
+ assert_eq!(
+ u17::new(0b11001100) | u17::new(0b01101001),
+ u17::new(0b11101101)
+ );
+ assert_eq!(u17::new(0b11001100) | u17::new(0), u17::new(0b11001100));
+ assert_eq!(
+ u17::new(0b11001100) | u17::new(0x1_FFFF),
+ u17::new(0x1_FFFF)
+ );
+}
+
+#[test]
+fn bitorassign() {
+ let mut value = u4::new(0b0101);
+ value |= u4::new(0b0110);
+ assert_eq!(value, u4::new(0b0111));
+}
+
+#[test]
+fn bitxor() {
+ assert_eq!(
+ u17::new(0b11001100) ^ u17::new(0b01101001),
+ u17::new(0b10100101)
+ );
+ assert_eq!(u17::new(0b11001100) ^ u17::new(0), u17::new(0b11001100));
+ assert_eq!(
+ u17::new(0b11001100) ^ u17::new(0x1_FFFF),
+ u17::new(0b1_11111111_00110011)
+ );
+}
+
+#[test]
+fn bitxorassign() {
+ let mut value = u4::new(0b0101);
+ value ^= u4::new(0b0110);
+ assert_eq!(value, u4::new(0b0011));
+}
+
+#[test]
+fn not() {
+ assert_eq!(!u17::new(0), u17::new(0b1_11111111_11111111));
+ assert_eq!(!u5::new(0b10101), u5::new(0b01010));
+}
+
+#[test]
+fn shl() {
+ assert_eq!(u17::new(0b1) << 5u8, u17::new(0b100000));
+ // Ensure bits on the left are shifted out
+ assert_eq!(u9::new(0b11110000) << 3u64, u9::new(0b1_10000000));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much8() {
+ let _ = u53::new(123) << 53u8;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much16() {
+ let _ = u53::new(123) << 53u16;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much32() {
+ let _ = u53::new(123) << 53u32;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much64() {
+ let _ = u53::new(123) << 53u64;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much128() {
+ let _ = u53::new(123) << 53u128;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shl_too_much_usize() {
+ let _ = u53::new(123) << 53usize;
+}
+
+#[test]
+fn shlassign() {
+ let mut value = u9::new(0b11110000);
+ value <<= 3;
+ assert_eq!(value, u9::new(0b1_10000000));
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shlassign_too_much() {
+ let mut value = u9::new(0b11110000);
+ value <<= 9;
+}
+
+#[cfg(debug_assertions)]
+#[test]
+#[should_panic]
+fn shlassign_too_much2() {
+ let mut value = u9::new(0b11110000);
+ value <<= 10;
+}
+
+#[test]
+fn shr() {
+ assert_eq!(u17::new(0b100110) >> 5usize, u17::new(1));
+
+ // Ensure there's no sign extension
+ assert_eq!(u17::new(0b1_11111111_11111111) >> 8, u17::new(0b1_11111111));
+}
+
+#[test]
+fn shrassign() {
+ let mut value = u9::new(0b1_11110000);
+ value >>= 6;
+ assert_eq!(value, u9::new(0b0_00000111));
+}
+
+#[test]
+fn compare() {
+ assert_eq!(true, u4::new(0b1100) > u4::new(0b0011));
+ assert_eq!(true, u4::new(0b1100) >= u4::new(0b0011));
+ assert_eq!(false, u4::new(0b1100) < u4::new(0b0011));
+ assert_eq!(false, u4::new(0b1100) <= u4::new(0b0011));
+ assert_eq!(true, u4::new(0b1100) != u4::new(0b0011));
+ assert_eq!(false, u4::new(0b1100) == u4::new(0b0011));
+
+ assert_eq!(false, u4::new(0b1100) > u4::new(0b1100));
+ assert_eq!(true, u4::new(0b1100) >= u4::new(0b1100));
+ assert_eq!(false, u4::new(0b1100) < u4::new(0b1100));
+ assert_eq!(true, u4::new(0b1100) <= u4::new(0b1100));
+ assert_eq!(false, u4::new(0b1100) != u4::new(0b1100));
+ assert_eq!(true, u4::new(0b1100) == u4::new(0b1100));
+
+ assert_eq!(false, u4::new(0b0011) > u4::new(0b1100));
+ assert_eq!(false, u4::new(0b0011) >= u4::new(0b1100));
+ assert_eq!(true, u4::new(0b0011) < u4::new(0b1100));
+ assert_eq!(true, u4::new(0b0011) <= u4::new(0b1100));
+ assert_eq!(true, u4::new(0b0011) != u4::new(0b1100));
+ assert_eq!(false, u4::new(0b0011) == u4::new(0b1100));
+}
+
+#[test]
+fn min_max() {
+ assert_eq!(0, u4::MIN.value());
+ assert_eq!(0b1111, u4::MAX.value());
+ assert_eq!(u4::new(0b1111), u4::MAX);
+
+ assert_eq!(0, u15::MIN.value());
+ assert_eq!(32767, u15::MAX.value());
+ assert_eq!(u15::new(32767), u15::MAX);
+
+ assert_eq!(0, u31::MIN.value());
+ assert_eq!(2147483647, u31::MAX.value());
+
+ assert_eq!(0, u63::MIN.value());
+ assert_eq!(0x7FFF_FFFF_FFFF_FFFF, u63::MAX.value());
+
+ assert_eq!(0, u127::MIN.value());
+ assert_eq!(0x7FFF_FFFF_FFFF_FFFF_FFFF_FFFF_FFFF_FFFF, u127::MAX.value());
+}
+
+#[test]
+fn bits() {
+ assert_eq!(4, u4::BITS);
+ assert_eq!(12, u12::BITS);
+ assert_eq!(120, u120::BITS);
+ assert_eq!(13, UInt::<u128, 13usize>::BITS);
+
+ assert_eq!(8, u8::BITS);
+ assert_eq!(16, u16::BITS);
+}
+
+#[test]
+fn mask() {
+ assert_eq!(0x1u8, u1::MASK);
+ assert_eq!(0xFu8, u4::MASK);
+ assert_eq!(0x3FFFFu32, u18::MASK);
+ assert_eq!(0x7FFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFFu128, u127::MASK);
+ assert_eq!(0x7FFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFFu128, u127::MASK);
+ assert_eq!(0xFFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFFu128, u128::MAX);
+}
+
+#[test]
+fn min_max_fullwidth() {
+ assert_eq!(u8::MIN, UInt::<u8, 8>::MIN.value());
+ assert_eq!(u8::MAX, UInt::<u8, 8>::MAX.value());
+
+ assert_eq!(u16::MIN, UInt::<u16, 16>::MIN.value());
+ assert_eq!(u16::MAX, UInt::<u16, 16>::MAX.value());
+
+ assert_eq!(u32::MIN, UInt::<u32, 32>::MIN.value());
+ assert_eq!(u32::MAX, UInt::<u32, 32>::MAX.value());
+
+ assert_eq!(u64::MIN, UInt::<u64, 64>::MIN.value());
+ assert_eq!(u64::MAX, UInt::<u64, 64>::MAX.value());
+
+ assert_eq!(u128::MIN, UInt::<u128, 128>::MIN.value());
+ assert_eq!(u128::MAX, UInt::<u128, 128>::MAX.value());
+}
+
+#[allow(deprecated)]
+#[test]
+fn extract() {
+ assert_eq!(u5::new(0b10000), u5::extract(0b11110000, 0));
+ assert_eq!(u5::new(0b11100), u5::extract(0b11110000, 2));
+ assert_eq!(u5::new(0b11110), u5::extract(0b11110000, 3));
+
+ // Use extract with a custom type (5 bits of u32)
+ assert_eq!(
+ UInt::<u32, 5>::new(0b11110),
+ UInt::<u32, 5>::extract(0b11110000, 3)
+ );
+ assert_eq!(
+ u5::new(0b11110),
+ UInt::<u32, 5>::extract(0b11110000, 3).into()
+ );
+}
+
+#[test]
+fn extract_typed() {
+ assert_eq!(u5::new(0b10000), u5::extract_u8(0b11110000, 0));
+ assert_eq!(u5::new(0b00011), u5::extract_u16(0b11110000_11110110, 6));
+ assert_eq!(
+ u5::new(0b01011),
+ u5::extract_u32(0b11110010_11110110_00000000_00000000, 22)
+ );
+ assert_eq!(
+ u5::new(0b01011),
+ u5::extract_u64(
+
0b11110010_11110110_00000000_00000000_00000000_00000000_00000000_00000000,
+ 54
+ )
+ );
+ assert_eq!(u5::new(0b01011),
u5::extract_u128(0b11110010_11110110_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000,
118));
+}
+
+#[test]
+fn extract_full_width_typed() {
+ assert_eq!(
+ 0b1010_0011,
+ UInt::<u8, 8>::extract_u8(0b1010_0011, 0).value()
+ );
+ assert_eq!(
+ 0b1010_0011,
+ UInt::<u8, 8>::extract_u16(0b1111_1111_1010_0011, 0).value()
+ );
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_8() {
+ let _ = u5::extract_u8(0b11110000, 4);
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_8_full_width() {
+ let _ = UInt::<u8, 8>::extract_u8(0b11110000, 1);
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_16() {
+ let _ = u5::extract_u16(0b11110000, 12);
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_32() {
+ let _ = u5::extract_u32(0b11110000, 28);
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_64() {
+ let _ = u5::extract_u64(0b11110000, 60);
+}
+
+#[test]
+#[should_panic]
+fn extract_not_enough_bits_128() {
+ let _ = u5::extract_u128(0b11110000, 124);
+}
+
+#[test]
+fn from_same_bit_widths() {
+ assert_eq!(u5::from(UInt::<u8, 5>::new(0b10101)), u5::new(0b10101));
+ assert_eq!(u5::from(UInt::<u16, 5>::new(0b10101)), u5::new(0b10101));
+ assert_eq!(u5::from(UInt::<u32, 5>::new(0b10101)), u5::new(0b10101));
+ assert_eq!(u5::from(UInt::<u64, 5>::new(0b10101)), u5::new(0b10101));
+ assert_eq!(u5::from(UInt::<u128, 5>::new(0b10101)), u5::new(0b10101));
+
+ assert_eq!(
+ UInt::<u8, 8>::from(UInt::<u128, 8>::new(0b1110_0101)),
+ UInt::<u8, 8>::new(0b1110_0101)
+ );
+
+ assert_eq!(
+ UInt::<u16, 6>::from(UInt::<u8, 5>::new(0b10101)),
+ UInt::<u16, 6>::new(0b10101)
+ );
+ assert_eq!(u15::from(UInt::<u16, 15>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u32, 15>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u64, 15>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u128, 15>::new(0b10101)), u15::new(0b10101));
+
+ assert_eq!(
+ UInt::<u32, 6>::from(u6::new(0b10101)),
+ UInt::<u32, 6>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u32, 14>::from(u14::new(0b10101)),
+ UInt::<u32, 14>::new(0b10101)
+ );
+ assert_eq!(u30::from(UInt::<u32, 30>::new(0b10101)), u30::new(0b10101));
+ assert_eq!(u30::from(UInt::<u64, 30>::new(0b10101)), u30::new(0b10101));
+ assert_eq!(u30::from(UInt::<u128, 30>::new(0b10101)), u30::new(0b10101));
+
+ assert_eq!(
+ UInt::<u64, 7>::from(UInt::<u8, 7>::new(0b10101)),
+ UInt::<u64, 7>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u64, 12>::from(UInt::<u16, 12>::new(0b10101)),
+ UInt::<u64, 12>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u64, 28>::from(UInt::<u32, 28>::new(0b10101)),
+ UInt::<u64, 28>::new(0b10101)
+ );
+ assert_eq!(u60::from(u60::new(0b10101)), u60::new(0b10101));
+ assert_eq!(u60::from(UInt::<u128, 60>::new(0b10101)), u60::new(0b10101));
+
+ assert_eq!(
+ UInt::<u128, 5>::from(UInt::<u8, 5>::new(0b10101)),
+ UInt::<u128, 5>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u128, 12>::from(UInt::<u16, 12>::new(0b10101)),
+ UInt::<u128, 12>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u128, 26>::from(UInt::<u32, 26>::new(0b10101)),
+ UInt::<u128, 26>::new(0b10101)
+ );
+ assert_eq!(
+ UInt::<u128, 60>::from(UInt::<u64, 60>::new(0b10101)),
+ UInt::<u128, 60>::new(0b10101)
+ );
+ assert_eq!(
+ u120::from(UInt::<u128, 120>::new(0b10101)),
+ u120::new(0b10101)
+ );
+}
+
+#[cfg(feature = "num-traits")]
+#[test]
+fn calculation_with_number_trait() {
+ fn increment_by_1<T: num_traits::WrappingAdd + Number>(foo: T) -> T {
+ foo.wrapping_add(&T::new(1.into()))
+ }
+
+ fn increment_by_512<T: num_traits::WrappingAdd + Number>(
+ foo: T,
+ ) -> Result<T, <<T as Number>::UnderlyingType as TryFrom<u32>>::Error>
+ where
+ <<T as Number>::UnderlyingType as TryFrom<u32>>::Error:
core::fmt::Debug,
+ {
+ Ok(foo.wrapping_add(&T::new(512u32.try_into()?)))
+ }
+
+ assert_eq!(increment_by_1(0u16), 1u16);
+ assert_eq!(increment_by_1(u7::new(3)), u7::new(4));
+ assert_eq!(increment_by_1(u15::new(3)), u15::new(4));
+
+ assert_eq!(increment_by_512(0u16), Ok(512u16));
+ assert!(increment_by_512(u7::new(3)).is_err());
+ assert_eq!(increment_by_512(u15::new(3)), Ok(u15::new(515)));
+}
+
+#[test]
+fn from_smaller_bit_widths() {
+ // The code to get more bits from fewer bits (through From) is the same as
the code above
+ // for identical bitwidths. Therefore just do a few point checks to ensure
things compile
+
+ // There are compile-breakers for the opposite direction (e.g. tryint to
do u5 = From(u17),
+ // but we can't test compile failures here
+
+ // from is not yet supported if the bitcounts are different but the base
data types are the same (need
+ // fancier Rust features to support that)
+ assert_eq!(u6::from(UInt::<u16, 5>::new(0b10101)), u6::new(0b10101));
+ assert_eq!(u6::from(UInt::<u32, 5>::new(0b10101)), u6::new(0b10101));
+ assert_eq!(u6::from(UInt::<u64, 5>::new(0b10101)), u6::new(0b10101));
+ assert_eq!(u6::from(UInt::<u128, 5>::new(0b10101)), u6::new(0b10101));
+
+ assert_eq!(u15::from(UInt::<u8, 7>::new(0b10101)), u15::new(0b10101));
+ //assert_eq!(u15::from(UInt::<u16, 15>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u32, 14>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u64, 14>::new(0b10101)), u15::new(0b10101));
+ assert_eq!(u15::from(UInt::<u128, 14>::new(0b10101)), u15::new(0b10101));
+}
+
+#[allow(non_camel_case_types)]
+#[test]
+fn from_native_ints_same_bits() {
+ use std::primitive;
+
+ type u8 = UInt<primitive::u8, 8>;
+ type u16 = UInt<primitive::u16, 16>;
+ type u32 = UInt<primitive::u32, 32>;
+ type u64 = UInt<primitive::u64, 64>;
+ type u128 = UInt<primitive::u128, 128>;
+
+ assert_eq!(u8::from(0x80_u8), u8::new(0x80));
+ assert_eq!(u16::from(0x8000_u16), u16::new(0x8000));
+ assert_eq!(u32::from(0x8000_0000_u32), u32::new(0x8000_0000));
+ assert_eq!(
+ u64::from(0x8000_0000_0000_0000_u64),
+ u64::new(0x8000_0000_0000_0000)
+ );
+ assert_eq!(
+ u128::from(0x8000_0000_0000_0000_0000_0000_0000_0000_u128),
+ u128::new(0x8000_0000_0000_0000_0000_0000_0000_0000)
+ );
+}
+
+#[test]
+fn from_native_ints_fewer_bits() {
+ assert_eq!(u9::from(0x80_u8), u9::new(0x80));
+
+ assert_eq!(u17::from(0x80_u8), u17::new(0x80));
+ assert_eq!(u17::from(0x8000_u16), u17::new(0x8000));
+
+ assert_eq!(u33::from(0x80_u8), u33::new(0x80));
+ assert_eq!(u33::from(0x8000_u16), u33::new(0x8000));
+ assert_eq!(u33::from(0x8000_0000_u32), u33::new(0x8000_0000));
+
+ assert_eq!(u65::from(0x80_u8), u65::new(0x80));
+ assert_eq!(u65::from(0x8000_u16), u65::new(0x8000));
+ assert_eq!(u65::from(0x8000_0000_u32), u65::new(0x8000_0000));
+ assert_eq!(
+ u65::from(0x8000_0000_0000_0000_u64),
+ u65::new(0x8000_0000_0000_0000)
+ );
+}
+
+#[allow(non_camel_case_types)]
+#[test]
+fn into_native_ints_same_bits() {
+ assert_eq!(u8::from(UInt::<u8, 8>::new(0x80)), 0x80);
+ assert_eq!(u16::from(UInt::<u16, 16>::new(0x8000)), 0x8000);
+ assert_eq!(u32::from(UInt::<u32, 32>::new(0x8000_0000)), 0x8000_0000);
+ assert_eq!(
+ u64::from(UInt::<u64, 64>::new(0x8000_0000_0000_0000)),
+ 0x8000_0000_0000_0000
+ );
+ assert_eq!(
+ u128::from(UInt::<u128, 128>::new(
+ 0x8000_0000_0000_0000_0000_0000_0000_0000
+ )),
+ 0x8000_0000_0000_0000_0000_0000_0000_0000
+ );
+}
+
+#[test]
+fn into_native_ints_fewer_bits() {
+ assert_eq!(u8::from(u7::new(0x40)), 0x40);
+ assert_eq!(u16::from(u15::new(0x4000)), 0x4000);
+ assert_eq!(u32::from(u31::new(0x4000_0000)), 0x4000_0000);
+ assert_eq!(
+ u64::from(u63::new(0x4000_0000_0000_0000)),
+ 0x4000_0000_0000_0000
+ );
+ assert_eq!(
+ u128::from(u127::new(0x4000_0000_0000_0000_0000_0000_0000_0000)),
+ 0x4000_0000_0000_0000_0000_0000_0000_0000
+ );
+}
+
+#[test]
+fn from_into_bool() {
+ assert_eq!(u1::from(true), u1::new(1));
+ assert_eq!(u1::from(false), u1::new(0));
+ assert_eq!(bool::from(u1::new(1)), true);
+ assert_eq!(bool::from(u1::new(0)), false);
+}
+
+#[test]
+fn widen() {
+ // As From() can't be used while keeping the base-data-type, there's widen
+
+ assert_eq!(u5::new(0b11011).widen::<6>(), u6::new(0b11011));
+ assert_eq!(u5::new(0b11011).widen::<8>(), UInt::<u8, 8>::new(0b11011));
+ assert_eq!(u10::new(0b11011).widen::<11>(), u11::new(0b11011));
+ assert_eq!(u20::new(0b11011).widen::<24>(), u24::new(0b11011));
+ assert_eq!(u60::new(0b11011).widen::<61>(), u61::new(0b11011));
+ assert_eq!(u80::new(0b11011).widen::<127>().value(), 0b11011);
+}
+
+#[test]
+fn to_string() {
+ assert_eq!("Value: 5", format!("Value: {}", 5u32.to_string()));
+ assert_eq!("Value: 5", format!("Value: {}", u5::new(5).to_string()));
+ assert_eq!("Value: 5", format!("Value: {}", u11::new(5).to_string()));
+ assert_eq!("Value: 5", format!("Value: {}", u17::new(5).to_string()));
+ assert_eq!("Value: 5", format!("Value: {}", u38::new(5).to_string()));
+ assert_eq!("Value: 60", format!("Value: {}", u65::new(60).to_string()));
+}
+
+#[test]
+fn display() {
+ assert_eq!("Value: 5", format!("Value: {}", 5u32));
+ assert_eq!("Value: 5", format!("Value: {}", u5::new(5)));
+ assert_eq!("Value: 5", format!("Value: {}", u11::new(5)));
+ assert_eq!("Value: 5", format!("Value: {}", u17::new(5)));
+ assert_eq!("Value: 5", format!("Value: {}", u38::new(5)));
+ assert_eq!("Value: 60", format!("Value: {}", u65::new(60)));
+}
+
+#[test]
+fn debug() {
+ assert_eq!("Value: 5", format!("Value: {:?}", 5u32));
+ assert_eq!("Value: 5", format!("Value: {:?}", u5::new(5)));
+ assert_eq!("Value: 5", format!("Value: {:?}", u11::new(5)));
+ assert_eq!("Value: 5", format!("Value: {:?}", u17::new(5)));
+ assert_eq!("Value: 5", format!("Value: {:?}", u38::new(5)));
+ assert_eq!("Value: 60", format!("Value: {:?}", u65::new(60)));
+}
+
+#[test]
+fn lower_hex() {
+ assert_eq!("Value: a", format!("Value: {:x}", 10u32));
+ assert_eq!("Value: a", format!("Value: {:x}", u5::new(10)));
+ assert_eq!("Value: a", format!("Value: {:x}", u11::new(10)));
+ assert_eq!("Value: a", format!("Value: {:x}", u17::new(10)));
+ assert_eq!("Value: a", format!("Value: {:x}", u38::new(10)));
+ assert_eq!("Value: 3c", format!("Value: {:x}", 60));
+ assert_eq!("Value: 3c", format!("Value: {:x}", u65::new(60)));
+}
+
+#[test]
+fn upper_hex() {
+ assert_eq!("Value: A", format!("Value: {:X}", 10u32));
+ assert_eq!("Value: A", format!("Value: {:X}", u5::new(10)));
+ assert_eq!("Value: A", format!("Value: {:X}", u11::new(10)));
+ assert_eq!("Value: A", format!("Value: {:X}", u17::new(10)));
+ assert_eq!("Value: A", format!("Value: {:X}", u38::new(10)));
+ assert_eq!("Value: 3C", format!("Value: {:X}", 60));
+ assert_eq!("Value: 3C", format!("Value: {:X}", u65::new(60)));
+}
+
+#[test]
+fn lower_hex_fancy() {
+ assert_eq!("Value: 0xa", format!("Value: {:#x}", 10u32));
+ assert_eq!("Value: 0xa", format!("Value: {:#x}", u5::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x}", u11::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x}", u17::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x}", u38::new(10)));
+ assert_eq!("Value: 0x3c", format!("Value: {:#x}", 60));
+ assert_eq!("Value: 0x3c", format!("Value: {:#x}", u65::new(60)));
+}
+
+#[test]
+fn upper_hex_fancy() {
+ assert_eq!("Value: 0xA", format!("Value: {:#X}", 10u32));
+ assert_eq!("Value: 0xA", format!("Value: {:#X}", u5::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X}", u11::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X}", u17::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X}", u38::new(10)));
+ assert_eq!("Value: 0x3C", format!("Value: {:#X}", 60));
+ assert_eq!("Value: 0x3C", format!("Value: {:#X}", u65::new(60)));
+}
+
+#[test]
+fn debug_lower_hex_fancy() {
+ assert_eq!("Value: 0xa", format!("Value: {:#x?}", 10u32));
+ assert_eq!("Value: 0xa", format!("Value: {:#x?}", u5::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x?}", u11::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x?}", u17::new(10)));
+ assert_eq!("Value: 0xa", format!("Value: {:#x?}", u38::new(10)));
+ assert_eq!("Value: 0x3c", format!("Value: {:#x?}", 60));
+ assert_eq!("Value: 0x3c", format!("Value: {:#x?}", u65::new(60)));
+}
+
+#[test]
+fn debug_upper_hex_fancy() {
+ assert_eq!("Value: 0xA", format!("Value: {:#X?}", 10u32));
+ assert_eq!("Value: 0xA", format!("Value: {:#X?}", u5::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X?}", u11::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X?}", u17::new(10)));
+ assert_eq!("Value: 0xA", format!("Value: {:#X?}", u38::new(10)));
+ assert_eq!("Value: 0x3C", format!("Value: {:#X?}", 60));
+ assert_eq!("Value: 0x3C", format!("Value: {:#X?}", u65::new(60)));
+}
+
+#[test]
+fn octal() {
+ assert_eq!("Value: 12", format!("Value: {:o}", 10u32));
+ assert_eq!("Value: 12", format!("Value: {:o}", u5::new(10)));
+ assert_eq!("Value: 12", format!("Value: {:o}", u11::new(10)));
+ assert_eq!("Value: 12", format!("Value: {:o}", u17::new(10)));
+ assert_eq!("Value: 12", format!("Value: {:o}", u38::new(10)));
+ assert_eq!("Value: 74", format!("Value: {:o}", 0o74));
+ assert_eq!("Value: 74", format!("Value: {:o}", u65::new(0o74)));
+}
+
+#[test]
+fn binary() {
+ assert_eq!("Value: 1010", format!("Value: {:b}", 10u32));
+ assert_eq!("Value: 1010", format!("Value: {:b}", u5::new(10)));
+ assert_eq!("Value: 1010", format!("Value: {:b}", u11::new(10)));
+ assert_eq!("Value: 1010", format!("Value: {:b}", u17::new(10)));
+ assert_eq!("Value: 1010", format!("Value: {:b}", u38::new(10)));
+ assert_eq!("Value: 111100", format!("Value: {:b}", 0b111100));
+ assert_eq!("Value: 111100", format!("Value: {:b}", u65::new(0b111100)));
+}
+
+#[test]
+fn hash() {
+ let mut hashmap = HashMap::<u5, u7>::new();
+
+ hashmap.insert(u5::new(11), u7::new(9));
+
+ assert_eq!(Some(&u7::new(9)), hashmap.get(&u5::new(11)));
+ assert_eq!(None, hashmap.get(&u5::new(12)));
+}
+
+#[test]
+fn swap_bytes() {
+ assert_eq!(u24::new(0x12_34_56).swap_bytes(), u24::new(0x56_34_12));
+ assert_eq!(
+ UInt::<u64, 24>::new(0x12_34_56).swap_bytes(),
+ UInt::<u64, 24>::new(0x56_34_12)
+ );
+ assert_eq!(
+ UInt::<u128, 24>::new(0x12_34_56).swap_bytes(),
+ UInt::<u128, 24>::new(0x56_34_12)
+ );
+
+ assert_eq!(
+ u40::new(0x12_34_56_78_9A).swap_bytes(),
+ u40::new(0x9A_78_56_34_12)
+ );
+ assert_eq!(
+ UInt::<u128, 40>::new(0x12_34_56_78_9A).swap_bytes(),
+ UInt::<u128, 40>::new(0x9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u48::new(0x12_34_56_78_9A_BC).swap_bytes(),
+ u48::new(0xBC_9A_78_56_34_12)
+ );
+ assert_eq!(
+ UInt::<u128, 48>::new(0x12_34_56_78_9A_BC).swap_bytes(),
+ UInt::<u128, 48>::new(0xBC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u56::new(0x12_34_56_78_9A_BC_DE).swap_bytes(),
+ u56::new(0xDE_BC_9A_78_56_34_12)
+ );
+ assert_eq!(
+ UInt::<u128, 56>::new(0x12_34_56_78_9A_BC_DE).swap_bytes(),
+ UInt::<u128, 56>::new(0xDE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u72::new(0x12_34_56_78_9A_BC_DE_FE_DC).swap_bytes(),
+ u72::new(0xDC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u80::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA).swap_bytes(),
+ u80::new(0xBA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u88::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98).swap_bytes(),
+ u88::new(0x98_BA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u96::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76).swap_bytes(),
+ u96::new(0x76_98_BA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u104::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54).swap_bytes(),
+ u104::new(0x54_76_98_BA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u112::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32).swap_bytes(),
+ u112::new(0x32_54_76_98_BA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+
+ assert_eq!(
+ u120::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32_10).swap_bytes(),
+ u120::new(0x10_32_54_76_98_BA_DC_FE_DE_BC_9A_78_56_34_12)
+ );
+}
+
+#[test]
+fn to_le_and_be_bytes() {
+ assert_eq!(u24::new(0x12_34_56).to_le_bytes(), [0x56, 0x34, 0x12]);
+ assert_eq!(
+ UInt::<u64, 24>::new(0x12_34_56).to_le_bytes(),
+ [0x56, 0x34, 0x12]
+ );
+ assert_eq!(
+ UInt::<u128, 24>::new(0x12_34_56).to_le_bytes(),
+ [0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(u24::new(0x12_34_56).to_be_bytes(), [0x12, 0x34, 0x56]);
+ assert_eq!(
+ UInt::<u64, 24>::new(0x12_34_56).to_be_bytes(),
+ [0x12, 0x34, 0x56]
+ );
+ assert_eq!(
+ UInt::<u128, 24>::new(0x12_34_56).to_be_bytes(),
+ [0x12, 0x34, 0x56]
+ );
+
+ assert_eq!(
+ u40::new(0x12_34_56_78_9A).to_le_bytes(),
+ [0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+ assert_eq!(
+ UInt::<u128, 40>::new(0x12_34_56_78_9A).to_le_bytes(),
+ [0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u40::new(0x12_34_56_78_9A).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A]
+ );
+ assert_eq!(
+ UInt::<u128, 40>::new(0x12_34_56_78_9A).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A]
+ );
+
+ assert_eq!(
+ u48::new(0x12_34_56_78_9A_BC).to_le_bytes(),
+ [0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+ assert_eq!(
+ UInt::<u128, 48>::new(0x12_34_56_78_9A_BC).to_le_bytes(),
+ [0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u48::new(0x12_34_56_78_9A_BC).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC]
+ );
+ assert_eq!(
+ UInt::<u128, 48>::new(0x12_34_56_78_9A_BC).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC]
+ );
+
+ assert_eq!(
+ u56::new(0x12_34_56_78_9A_BC_DE).to_le_bytes(),
+ [0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+ assert_eq!(
+ UInt::<u128, 56>::new(0x12_34_56_78_9A_BC_DE).to_le_bytes(),
+ [0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u56::new(0x12_34_56_78_9A_BC_DE).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE]
+ );
+ assert_eq!(
+ UInt::<u128, 56>::new(0x12_34_56_78_9A_BC_DE).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE]
+ );
+
+ assert_eq!(
+ u72::new(0x12_34_56_78_9A_BC_DE_FE_DC).to_le_bytes(),
+ [0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u72::new(0x12_34_56_78_9A_BC_DE_FE_DC).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC]
+ );
+
+ assert_eq!(
+ u80::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA).to_le_bytes(),
+ [0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u80::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA]
+ );
+
+ assert_eq!(
+ u88::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98).to_le_bytes(),
+ [0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u88::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98]
+ );
+
+ assert_eq!(
+ u96::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76).to_le_bytes(),
+ [0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
0x12]
+ );
+
+ assert_eq!(
+ u96::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76]
+ );
+
+ assert_eq!(
+ u104::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54).to_le_bytes(),
+ [0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56,
0x34, 0x12]
+ );
+
+ assert_eq!(
+ u104::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54]
+ );
+
+ assert_eq!(
+ u112::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32).to_le_bytes(),
+ [0x32, 0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78,
0x56, 0x34, 0x12]
+ );
+
+ assert_eq!(
+ u112::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32).to_be_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54, 0x32]
+ );
+
+ assert_eq!(
+
u120::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32_10).to_le_bytes(),
+ [
+ 0x10, 0x32, 0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A,
0x78, 0x56, 0x34,
+ 0x12
+ ]
+ );
+
+ assert_eq!(
+
u120::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32_10).to_be_bytes(),
+ [
+ 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54, 0x32,
+ 0x10
+ ]
+ );
+}
+
+#[test]
+fn from_le_and_be_bytes() {
+ assert_eq!(u24::from_le_bytes([0x56, 0x34, 0x12]), u24::new(0x12_34_56));
+ assert_eq!(
+ UInt::<u64, 24>::from_le_bytes([0x56, 0x34, 0x12]),
+ UInt::<u64, 24>::new(0x12_34_56)
+ );
+ assert_eq!(
+ UInt::<u128, 24>::from_le_bytes([0x56, 0x34, 0x12]),
+ UInt::<u128, 24>::new(0x12_34_56)
+ );
+
+ assert_eq!(u24::from_be_bytes([0x12, 0x34, 0x56]), u24::new(0x12_34_56));
+ assert_eq!(
+ UInt::<u64, 24>::from_be_bytes([0x12, 0x34, 0x56]),
+ UInt::<u64, 24>::new(0x12_34_56)
+ );
+ assert_eq!(
+ UInt::<u128, 24>::from_be_bytes([0x12, 0x34, 0x56]),
+ UInt::<u128, 24>::new(0x12_34_56)
+ );
+
+ assert_eq!(
+ u40::from_le_bytes([0x9A, 0x78, 0x56, 0x34, 0x12]),
+ u40::new(0x12_34_56_78_9A)
+ );
+ assert_eq!(
+ UInt::<u128, 40>::from_le_bytes([0x9A, 0x78, 0x56, 0x34, 0x12]),
+ UInt::<u128, 40>::new(0x12_34_56_78_9A)
+ );
+
+ assert_eq!(
+ u40::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A]),
+ u40::new(0x12_34_56_78_9A)
+ );
+ assert_eq!(
+ UInt::<u128, 40>::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A]),
+ UInt::<u128, 40>::new(0x12_34_56_78_9A)
+ );
+
+ assert_eq!(
+ u48::from_le_bytes([0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]),
+ u48::new(0x12_34_56_78_9A_BC)
+ );
+ assert_eq!(
+ UInt::<u128, 48>::from_le_bytes([0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]),
+ UInt::<u128, 48>::new(0x12_34_56_78_9A_BC)
+ );
+
+ assert_eq!(
+ u48::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC]),
+ u48::new(0x12_34_56_78_9A_BC)
+ );
+ assert_eq!(
+ UInt::<u128, 48>::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC]),
+ UInt::<u128, 48>::new(0x12_34_56_78_9A_BC)
+ );
+
+ assert_eq!(
+ u56::from_le_bytes([0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12]),
+ u56::new(0x12_34_56_78_9A_BC_DE)
+ );
+ assert_eq!(
+ UInt::<u128, 56>::from_le_bytes([0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
0x12]),
+ UInt::<u128, 56>::new(0x12_34_56_78_9A_BC_DE)
+ );
+
+ assert_eq!(
+ u56::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE]),
+ u56::new(0x12_34_56_78_9A_BC_DE)
+ );
+ assert_eq!(
+ UInt::<u128, 56>::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC,
0xDE]),
+ UInt::<u128, 56>::new(0x12_34_56_78_9A_BC_DE)
+ );
+
+ assert_eq!(
+ u72::from_le_bytes([0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
0x12]),
+ u72::new(0x12_34_56_78_9A_BC_DE_FE_DC)
+ );
+
+ assert_eq!(
+ u72::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE,
0xDC]),
+ u72::new(0x12_34_56_78_9A_BC_DE_FE_DC)
+ );
+
+ assert_eq!(
+ u80::from_le_bytes([0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56,
0x34, 0x12]),
+ u80::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA)
+ );
+
+ assert_eq!(
+ u80::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE,
0xDC, 0xBA]),
+ u80::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA)
+ );
+
+ assert_eq!(
+ u88::from_le_bytes([0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78,
0x56, 0x34, 0x12]),
+ u88::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98)
+ );
+
+ assert_eq!(
+ u88::from_be_bytes([0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE,
0xDC, 0xBA, 0x98]),
+ u88::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98)
+ );
+
+ assert_eq!(
+ u96::from_le_bytes([
+ 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
0x12
+ ]),
+ u96::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76)
+ );
+
+ assert_eq!(
+ u96::from_be_bytes([
+ 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76
+ ]),
+ u96::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76)
+ );
+
+ assert_eq!(
+ u104::from_le_bytes([
+ 0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78, 0x56,
0x34, 0x12
+ ]),
+ u104::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54)
+ );
+
+ assert_eq!(
+ u104::from_be_bytes([
+ 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54
+ ]),
+ u104::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54)
+ );
+
+ assert_eq!(
+ u112::from_le_bytes([
+ 0x32, 0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A, 0x78,
0x56, 0x34, 0x12
+ ]),
+ u112::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32)
+ );
+
+ assert_eq!(
+ u112::from_be_bytes([
+ 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54, 0x32
+ ]),
+ u112::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32)
+ );
+
+ assert_eq!(
+ u120::from_le_bytes([
+ 0x10, 0x32, 0x54, 0x76, 0x98, 0xBA, 0xDC, 0xFE, 0xDE, 0xBC, 0x9A,
0x78, 0x56, 0x34,
+ 0x12
+ ]),
+ u120::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32_10)
+ );
+
+ assert_eq!(
+ u120::from_be_bytes([
+ 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xFE, 0xDC, 0xBA, 0x98,
0x76, 0x54, 0x32,
+ 0x10
+ ]),
+ u120::new(0x12_34_56_78_9A_BC_DE_FE_DC_BA_98_76_54_32_10)
+ );
+}
+
+#[test]
+fn to_ne_bytes() {
+ if cfg!(target_endian = "little") {
+ assert_eq!(
+ u40::new(0x12_34_56_78_9A).to_ne_bytes(),
+ [0x9A, 0x78, 0x56, 0x34, 0x12]
+ );
+ } else {
+ assert_eq!(
+ u40::new(0x12_34_56_78_9A).to_ne_bytes(),
+ [0x12, 0x34, 0x56, 0x78, 0x9A]
+ );
+ }
+}
+
+#[test]
+fn from_ne_bytes() {
+ if cfg!(target_endian = "little") {
+ assert_eq!(
+ u40::from_ne_bytes([0x9A, 0x78, 0x56, 0x34, 0x12]),
+ u40::new(0x12_34_56_78_9A)
+ );
+ } else {
+ assert_eq!(
+ u40::from_ne_bytes([0x12, 0x34, 0x56, 0x78, 0x9A]),
+ u40::new(0x12_34_56_78_9A)
+ );
+ }
+}
+
+#[test]
+fn simple_le_be() {
+ const REGULAR: u40 = u40::new(0x12_34_56_78_9A);
+ const SWAPPED: u40 = u40::new(0x9A_78_56_34_12);
+ if cfg!(target_endian = "little") {
+ assert_eq!(REGULAR.to_le(), REGULAR);
+ assert_eq!(REGULAR.to_be(), SWAPPED);
+ assert_eq!(u40::from_le(REGULAR), REGULAR);
+ assert_eq!(u40::from_be(REGULAR), SWAPPED);
+ } else {
+ assert_eq!(REGULAR.to_le(), SWAPPED);
+ assert_eq!(REGULAR.to_be(), REGULAR);
+ assert_eq!(u40::from_le(REGULAR), SWAPPED);
+ assert_eq!(u40::from_be(REGULAR), REGULAR);
+ }
+}
+
+#[test]
+fn wrapping_add() {
+ assert_eq!(u7::new(120).wrapping_add(u7::new(1)), u7::new(121));
+ assert_eq!(u7::new(120).wrapping_add(u7::new(10)), u7::new(2));
+ assert_eq!(u7::new(127).wrapping_add(u7::new(127)), u7::new(126));
+}
+
+#[test]
+fn wrapping_sub() {
+ assert_eq!(u7::new(120).wrapping_sub(u7::new(1)), u7::new(119));
+ assert_eq!(u7::new(10).wrapping_sub(u7::new(20)), u7::new(118));
+ assert_eq!(u7::new(0).wrapping_sub(u7::new(1)), u7::new(127));
+}
+
+#[test]
+fn wrapping_mul() {
+ assert_eq!(u7::new(120).wrapping_mul(u7::new(0)), u7::new(0));
+ assert_eq!(u7::new(120).wrapping_mul(u7::new(1)), u7::new(120));
+
+ // Overflow u7
+ assert_eq!(u7::new(120).wrapping_mul(u7::new(2)), u7::new(112));
+
+ // Overflow the underlying type
+ assert_eq!(u7::new(120).wrapping_mul(u7::new(3)), u7::new(104));
+}
+
+#[test]
+fn wrapping_div() {
+ assert_eq!(u7::new(120).wrapping_div(u7::new(1)), u7::new(120));
+ assert_eq!(u7::new(120).wrapping_div(u7::new(2)), u7::new(60));
+ assert_eq!(u7::new(120).wrapping_div(u7::new(120)), u7::new(1));
+ assert_eq!(u7::new(120).wrapping_div(u7::new(121)), u7::new(0));
+}
+
+#[should_panic]
+#[test]
+fn wrapping_div_by_zero() {
+ let _ = u7::new(120).wrapping_div(u7::new(0));
+}
+
+#[test]
+fn wrapping_shl() {
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(0), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(1), u7::new(0b101_1010));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(6), u7::new(0b100_0000));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(7), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(8), u7::new(0b101_1010));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(14), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shl(15), u7::new(0b101_1010));
+}
+
+#[test]
+fn wrapping_shr() {
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(0), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(1), u7::new(0b001_0110));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(5), u7::new(0b000_0001));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(7), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(8), u7::new(0b001_0110));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(14), u7::new(0b010_1101));
+ assert_eq!(u7::new(0b010_1101).wrapping_shr(15), u7::new(0b001_0110));
+}
+
+#[test]
+fn saturating_add() {
+ assert_eq!(u7::new(120).saturating_add(u7::new(1)), u7::new(121));
+ assert_eq!(u7::new(120).saturating_add(u7::new(10)), u7::new(127));
+ assert_eq!(u7::new(127).saturating_add(u7::new(127)), u7::new(127));
+ assert_eq!(
+ UInt::<u8, 8>::new(250).saturating_add(UInt::<u8, 8>::new(10)),
+ UInt::<u8, 8>::new(255)
+ );
+}
+
+#[test]
+fn saturating_sub() {
+ assert_eq!(u7::new(120).saturating_sub(u7::new(30)), u7::new(90));
+ assert_eq!(u7::new(120).saturating_sub(u7::new(119)), u7::new(1));
+ assert_eq!(u7::new(120).saturating_sub(u7::new(120)), u7::new(0));
+ assert_eq!(u7::new(120).saturating_sub(u7::new(121)), u7::new(0));
+ assert_eq!(u7::new(0).saturating_sub(u7::new(127)), u7::new(0));
+}
+
+#[test]
+fn saturating_mul() {
+ // Fast-path: Only the arbitrary int is bounds checked
+ assert_eq!(u4::new(5).saturating_mul(u4::new(2)), u4::new(10));
+ assert_eq!(u4::new(5).saturating_mul(u4::new(3)), u4::new(15));
+ assert_eq!(u4::new(5).saturating_mul(u4::new(4)), u4::new(15));
+ assert_eq!(u4::new(5).saturating_mul(u4::new(5)), u4::new(15));
+ assert_eq!(u4::new(5).saturating_mul(u4::new(6)), u4::new(15));
+ assert_eq!(u4::new(5).saturating_mul(u4::new(7)), u4::new(15));
+
+ // Slow-path (well, one more comparison)
+ assert_eq!(u5::new(5).saturating_mul(u5::new(2)), u5::new(10));
+ assert_eq!(u5::new(5).saturating_mul(u5::new(3)), u5::new(15));
+ assert_eq!(u5::new(5).saturating_mul(u5::new(4)), u5::new(20));
+ assert_eq!(u5::new(5).saturating_mul(u5::new(5)), u5::new(25));
+ assert_eq!(u5::new(5).saturating_mul(u5::new(6)), u5::new(30));
+ assert_eq!(u5::new(5).saturating_mul(u5::new(7)), u5::new(31));
+ assert_eq!(u5::new(30).saturating_mul(u5::new(1)), u5::new(30));
+ assert_eq!(u5::new(30).saturating_mul(u5::new(2)), u5::new(31));
+ assert_eq!(u5::new(30).saturating_mul(u5::new(10)), u5::new(31));
+}
+
+#[test]
+fn saturating_div() {
+ assert_eq!(u4::new(5).saturating_div(u4::new(1)), u4::new(5));
+ assert_eq!(u4::new(5).saturating_div(u4::new(2)), u4::new(2));
+ assert_eq!(u4::new(5).saturating_div(u4::new(3)), u4::new(1));
+ assert_eq!(u4::new(5).saturating_div(u4::new(4)), u4::new(1));
+ assert_eq!(u4::new(5).saturating_div(u4::new(5)), u4::new(1));
+}
+
+#[test]
+#[should_panic]
+fn saturating_divby0() {
+ // saturating_div throws an exception on zero
+ let _ = u4::new(5).saturating_div(u4::new(0));
+}
+
+#[test]
+fn saturating_pow() {
+ assert_eq!(u7::new(5).saturating_pow(0), u7::new(1));
+ assert_eq!(u7::new(5).saturating_pow(1), u7::new(5));
+ assert_eq!(u7::new(5).saturating_pow(2), u7::new(25));
+ assert_eq!(u7::new(5).saturating_pow(3), u7::new(125));
+ assert_eq!(u7::new(5).saturating_pow(4), u7::new(127));
+ assert_eq!(u7::new(5).saturating_pow(255), u7::new(127));
+}
+
+#[test]
+fn checked_add() {
+ assert_eq!(u7::new(120).checked_add(u7::new(1)), Some(u7::new(121)));
+ assert_eq!(u7::new(120).checked_add(u7::new(7)), Some(u7::new(127)));
+ assert_eq!(u7::new(120).checked_add(u7::new(10)), None);
+ assert_eq!(u7::new(127).checked_add(u7::new(127)), None);
+ assert_eq!(
+ UInt::<u8, 8>::new(250).checked_add(UInt::<u8, 8>::new(10)),
+ None
+ );
+}
+
+#[test]
+fn checked_sub() {
+ assert_eq!(u7::new(120).checked_sub(u7::new(30)), Some(u7::new(90)));
+ assert_eq!(u7::new(120).checked_sub(u7::new(119)), Some(u7::new(1)));
+ assert_eq!(u7::new(120).checked_sub(u7::new(120)), Some(u7::new(0)));
+ assert_eq!(u7::new(120).checked_sub(u7::new(121)), None);
+ assert_eq!(u7::new(0).checked_sub(u7::new(127)), None);
+}
+
+#[test]
+fn checked_mul() {
+ // Fast-path: Only the arbitrary int is bounds checked
+ assert_eq!(u4::new(5).checked_mul(u4::new(2)), Some(u4::new(10)));
+ assert_eq!(u4::new(5).checked_mul(u4::new(3)), Some(u4::new(15)));
+ assert_eq!(u4::new(5).checked_mul(u4::new(4)), None);
+ assert_eq!(u4::new(5).checked_mul(u4::new(5)), None);
+ assert_eq!(u4::new(5).checked_mul(u4::new(6)), None);
+ assert_eq!(u4::new(5).checked_mul(u4::new(7)), None);
+
+ // Slow-path (well, one more comparison)
+ assert_eq!(u5::new(5).checked_mul(u5::new(2)), Some(u5::new(10)));
+ assert_eq!(u5::new(5).checked_mul(u5::new(3)), Some(u5::new(15)));
+ assert_eq!(u5::new(5).checked_mul(u5::new(4)), Some(u5::new(20)));
+ assert_eq!(u5::new(5).checked_mul(u5::new(5)), Some(u5::new(25)));
+ assert_eq!(u5::new(5).checked_mul(u5::new(6)), Some(u5::new(30)));
+ assert_eq!(u5::new(5).checked_mul(u5::new(7)), None);
+ assert_eq!(u5::new(30).checked_mul(u5::new(1)), Some(u5::new(30)));
+ assert_eq!(u5::new(30).checked_mul(u5::new(2)), None);
+ assert_eq!(u5::new(30).checked_mul(u5::new(10)), None);
+}
+
+#[test]
+fn checked_div() {
+ // checked_div handles division by zero without exception, unlike
saturating_div
+ assert_eq!(u4::new(5).checked_div(u4::new(0)), None);
+ assert_eq!(u4::new(5).checked_div(u4::new(1)), Some(u4::new(5)));
+ assert_eq!(u4::new(5).checked_div(u4::new(2)), Some(u4::new(2)));
+ assert_eq!(u4::new(5).checked_div(u4::new(3)), Some(u4::new(1)));
+ assert_eq!(u4::new(5).checked_div(u4::new(4)), Some(u4::new(1)));
+ assert_eq!(u4::new(5).checked_div(u4::new(5)), Some(u4::new(1)));
+}
+
+#[test]
+fn checked_shl() {
+ assert_eq!(
+ u7::new(0b010_1101).checked_shl(0),
+ Some(u7::new(0b010_1101))
+ );
+ assert_eq!(
+ u7::new(0b010_1101).checked_shl(1),
+ Some(u7::new(0b101_1010))
+ );
+ assert_eq!(
+ u7::new(0b010_1101).checked_shl(6),
+ Some(u7::new(0b100_0000))
+ );
+ assert_eq!(u7::new(0b010_1101).checked_shl(7), None);
+ assert_eq!(u7::new(0b010_1101).checked_shl(8), None);
+ assert_eq!(u7::new(0b010_1101).checked_shl(14), None);
+ assert_eq!(u7::new(0b010_1101).checked_shl(15), None);
+}
+
+#[test]
+fn checked_shr() {
+ assert_eq!(
+ u7::new(0b010_1101).checked_shr(0),
+ Some(u7::new(0b010_1101))
+ );
+ assert_eq!(
+ u7::new(0b010_1101).checked_shr(1),
+ Some(u7::new(0b001_0110))
+ );
+ assert_eq!(
+ u7::new(0b010_1101).checked_shr(5),
+ Some(u7::new(0b000_0001))
+ );
+ assert_eq!(u7::new(0b010_1101).checked_shr(7), None);
+ assert_eq!(u7::new(0b010_1101).checked_shr(8), None);
+ assert_eq!(u7::new(0b010_1101).checked_shr(14), None);
+ assert_eq!(u7::new(0b010_1101).checked_shr(15), None);
+}
+
+#[test]
+fn overflowing_add() {
+ assert_eq!(
+ u7::new(120).overflowing_add(u7::new(1)),
+ (u7::new(121), false)
+ );
+ assert_eq!(
+ u7::new(120).overflowing_add(u7::new(7)),
+ (u7::new(127), false)
+ );
+ assert_eq!(
+ u7::new(120).overflowing_add(u7::new(10)),
+ (u7::new(2), true)
+ );
+ assert_eq!(
+ u7::new(127).overflowing_add(u7::new(127)),
+ (u7::new(126), true)
+ );
+ assert_eq!(
+ UInt::<u8, 8>::new(250).overflowing_add(UInt::<u8, 8>::new(5)),
+ (UInt::<u8, 8>::new(255), false)
+ );
+ assert_eq!(
+ UInt::<u8, 8>::new(250).overflowing_add(UInt::<u8, 8>::new(10)),
+ (UInt::<u8, 8>::new(4), true)
+ );
+}
+
+#[test]
+fn overflowing_sub() {
+ assert_eq!(
+ u7::new(120).overflowing_sub(u7::new(30)),
+ (u7::new(90), false)
+ );
+ assert_eq!(
+ u7::new(120).overflowing_sub(u7::new(119)),
+ (u7::new(1), false)
+ );
+ assert_eq!(
+ u7::new(120).overflowing_sub(u7::new(120)),
+ (u7::new(0), false)
+ );
+ assert_eq!(
+ u7::new(120).overflowing_sub(u7::new(121)),
+ (u7::new(127), true)
+ );
+ assert_eq!(u7::new(0).overflowing_sub(u7::new(127)), (u7::new(1), true));
+}
+
+#[test]
+fn overflowing_mul() {
+ // Fast-path: Only the arbitrary int is bounds checked
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(2)), (u4::new(10), false));
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(3)), (u4::new(15), false));
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(4)), (u4::new(4), true));
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(5)), (u4::new(9), true));
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(6)), (u4::new(14), true));
+ assert_eq!(u4::new(5).overflowing_mul(u4::new(7)), (u4::new(3), true));
+
+ // Slow-path (well, one more comparison)
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(2)), (u5::new(10), false));
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(3)), (u5::new(15), false));
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(4)), (u5::new(20), false));
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(5)), (u5::new(25), false));
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(6)), (u5::new(30), false));
+ assert_eq!(u5::new(5).overflowing_mul(u5::new(7)), (u5::new(3), true));
+ assert_eq!(
+ u5::new(30).overflowing_mul(u5::new(1)),
+ (u5::new(30), false)
+ );
+ assert_eq!(u5::new(30).overflowing_mul(u5::new(2)), (u5::new(28), true));
+ assert_eq!(
+ u5::new(30).overflowing_mul(u5::new(10)),
+ (u5::new(12), true)
+ );
+}
+
+#[test]
+fn overflowing_div() {
+ assert_eq!(u4::new(5).overflowing_div(u4::new(1)), (u4::new(5), false));
+ assert_eq!(u4::new(5).overflowing_div(u4::new(2)), (u4::new(2), false));
+ assert_eq!(u4::new(5).overflowing_div(u4::new(3)), (u4::new(1), false));
+ assert_eq!(u4::new(5).overflowing_div(u4::new(4)), (u4::new(1), false));
+ assert_eq!(u4::new(5).overflowing_div(u4::new(5)), (u4::new(1), false));
+}
+
+#[should_panic]
+#[test]
+fn overflowing_div_by_zero() {
+ let _ = u4::new(5).overflowing_div(u4::new(0));
+}
+
+#[test]
+fn overflowing_shl() {
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(0),
+ (u7::new(0b010_1101), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(1),
+ (u7::new(0b101_1010), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(6),
+ (u7::new(0b100_0000), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(7),
+ (u7::new(0b010_1101), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(8),
+ (u7::new(0b101_1010), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(14),
+ (u7::new(0b010_1101), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shl(15),
+ (u7::new(0b101_1010), true)
+ );
+}
+
+#[test]
+fn overflowing_shr() {
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(0),
+ (u7::new(0b010_1101), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(1),
+ (u7::new(0b001_0110), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(5),
+ (u7::new(0b000_0001), false)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(7),
+ (u7::new(0b010_1101), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(8),
+ (u7::new(0b001_0110), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(14),
+ (u7::new(0b010_1101), true)
+ );
+ assert_eq!(
+ u7::new(0b010_1101).overflowing_shr(15),
+ (u7::new(0b001_0110), true)
+ );
+}
+
+#[test]
+fn reverse_bits() {
+ const A: u5 = u5::new(0b11101);
+ const B: u5 = A.reverse_bits();
+ assert_eq!(B, u5::new(0b10111));
+
+ assert_eq!(
+ UInt::<u128, 6>::new(0b101011),
+ UInt::<u128, 6>::new(0b110101).reverse_bits()
+ );
+
+ assert_eq!(u1::new(1).reverse_bits().value(), 1);
+ assert_eq!(u1::new(0).reverse_bits().value(), 0);
+}
+
+#[test]
+fn count_ones_and_zeros() {
+ assert_eq!(4, u5::new(0b10111).count_ones());
+ assert_eq!(1, u5::new(0b10111).count_zeros());
+ assert_eq!(1, u5::new(0b10111).leading_ones());
+ assert_eq!(0, u5::new(0b10111).leading_zeros());
+ assert_eq!(3, u5::new(0b10111).trailing_ones());
+ assert_eq!(0, u5::new(0b10111).trailing_zeros());
+
+ assert_eq!(2, u5::new(0b10100).trailing_zeros());
+ assert_eq!(3, u5::new(0b00011).leading_zeros());
+
+ assert_eq!(0, u5::new(0b00000).count_ones());
+ assert_eq!(5, u5::new(0b00000).count_zeros());
+
+ assert_eq!(5, u5::new(0b11111).count_ones());
+ assert_eq!(0, u5::new(0b11111).count_zeros());
+
+ assert_eq!(3, u127::new(0b111).count_ones());
+ assert_eq!(124, u127::new(0b111).count_zeros());
+}
+
+#[test]
+fn rotate_left() {
+ assert_eq!(u1::new(0b1), u1::new(0b1).rotate_left(1));
+ assert_eq!(u2::new(0b01), u2::new(0b10).rotate_left(1));
+
+ assert_eq!(u5::new(0b10111), u5::new(0b10111).rotate_left(0));
+ assert_eq!(u5::new(0b01111), u5::new(0b10111).rotate_left(1));
+ assert_eq!(u5::new(0b11110), u5::new(0b10111).rotate_left(2));
+ assert_eq!(u5::new(0b11101), u5::new(0b10111).rotate_left(3));
+ assert_eq!(u5::new(0b11011), u5::new(0b10111).rotate_left(4));
+ assert_eq!(u5::new(0b10111), u5::new(0b10111).rotate_left(5));
+ assert_eq!(u5::new(0b01111), u5::new(0b10111).rotate_left(6));
+ assert_eq!(u5::new(0b01111), u5::new(0b10111).rotate_left(556));
+
+ assert_eq!(u24::new(0x0FFEEC), u24::new(0xC0FFEE).rotate_left(4));
+}
+
+#[test]
+fn rotate_right() {
+ assert_eq!(u1::new(0b1), u1::new(0b1).rotate_right(1));
+ assert_eq!(u2::new(0b01), u2::new(0b10).rotate_right(1));
+
+ assert_eq!(u5::new(0b10011), u5::new(0b10011).rotate_right(0));
+ assert_eq!(u5::new(0b11001), u5::new(0b10011).rotate_right(1));
+ assert_eq!(u5::new(0b11100), u5::new(0b10011).rotate_right(2));
+ assert_eq!(u5::new(0b01110), u5::new(0b10011).rotate_right(3));
+ assert_eq!(u5::new(0b00111), u5::new(0b10011).rotate_right(4));
+ assert_eq!(u5::new(0b10011), u5::new(0b10011).rotate_right(5));
+ assert_eq!(u5::new(0b11001), u5::new(0b10011).rotate_right(6));
+
+ assert_eq!(u24::new(0xEC0FFE), u24::new(0xC0FFEE).rotate_right(4));
+}
+
+#[cfg(feature = "step_trait")]
+#[test]
+fn range_agrees_with_underlying() {
+ compare_range(u19::MIN, u19::MAX);
+ compare_range(u37::new(95_993), u37::new(1_994_910));
+ compare_range(u68::new(58_858_348), u68::new(58_860_000));
+ compare_range(u122::new(111_222_333_444), u122::new(111_222_444_555));
+ compare_range(u5::MIN, u5::MAX);
+ compare_range(u23::MIN, u23::MAX);
+ compare_range(u48::new(999_444), u48::new(1_005_000));
+ compare_range(u99::new(12345), u99::new(54321));
+
+ fn compare_range<T, const BITS: usize>(arb_start: UInt<T, BITS>, arb_end:
UInt<T, BITS>)
+ where
+ T: Copy + Step,
+ UInt<T, BITS>: Step,
+ {
+ let arbint_range = (arb_start..=arb_end).map(UInt::value);
+ let underlying_range = arb_start.value()..=arb_end.value();
+
+ assert!(arbint_range.eq(underlying_range));
+ }
+}
+
+#[cfg(feature = "step_trait")]
+#[test]
+fn forward_checked() {
+ // In range
+ assert_eq!(Some(u7::new(121)), Step::forward_checked(u7::new(120), 1));
+ assert_eq!(Some(u7::new(127)), Step::forward_checked(u7::new(120), 7));
+
+ // Out of range
+ assert_eq!(None, Step::forward_checked(u7::new(120), 8));
+
+ // Out of range for the underlying type
+ assert_eq!(None, Step::forward_checked(u7::new(120), 140));
+}
+
+#[cfg(feature = "step_trait")]
+#[test]
+fn backward_checked() {
+ // In range
+ assert_eq!(Some(u7::new(1)), Step::backward_checked(u7::new(10), 9));
+ assert_eq!(Some(u7::new(0)), Step::backward_checked(u7::new(10), 10));
+
+ // Out of range (for both the arbitrary int and and the underlying type)
+ assert_eq!(None, Step::backward_checked(u7::new(10), 11));
+}
+
+#[cfg(feature = "step_trait")]
+#[test]
+fn steps_between() {
+ assert_eq!(Some(0), Step::steps_between(&u50::new(50), &u50::new(50)));
+
+ assert_eq!(Some(4), Step::steps_between(&u24::new(5), &u24::new(9)));
+ assert_eq!(None, Step::steps_between(&u24::new(9), &u24::new(5)));
+
+ // this assumes usize is <= 64 bits. a test like this one exists in
`core::iter::step`.
+ assert_eq!(
+ Some(usize::MAX),
+ Step::steps_between(&u125::new(0x7),
&u125::new(0x1_0000_0000_0000_0006))
+ );
+ assert_eq!(
+ None,
+ Step::steps_between(&u125::new(0x7),
&u125::new(0x1_0000_0000_0000_0007))
+ );
+}
+
+#[cfg(feature = "serde")]
+#[test]
+fn serde() {
+ use serde_test::{assert_de_tokens_error, assert_tokens, Token};
+
+ let a = u7::new(0b0101_0101);
+ assert_tokens(&a, &[Token::U8(0b0101_0101)]);
+
+ let b = u63::new(0x1234_5678_9ABC_DEFE);
+ assert_tokens(&b, &[Token::U64(0x1234_5678_9ABC_DEFE)]);
+
+ // This requires https://github.com/serde-rs/test/issues/18 (Add
Token::I128 and Token::U128 to serde_test)
+ // let c = u127::new(0x1234_5678_9ABC_DEFE_DCBA_9876_5432_1010);
+ // assert_tokens(&c,
&[Token::U128(0x1234_5678_9ABC_DEFE_DCBA_9876_5432_1010)]);
+
+ assert_de_tokens_error::<u2>(
+ &[Token::U8(0b0101_0101)],
+ "invalid value: integer `85`, expected a value between `0` and `3`",
+ );
+
+ assert_de_tokens_error::<u100>(
+ &[Token::I64(-1)],
+ "invalid value: integer `-1`, expected u128",
+ );
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/bilge-impl/.cargo-checksum.json
new file mode 100644
index 0000000000..304736708c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"b1ebf0b5d89b3e8387d70b589b2557196f0dc902364900889acabed886b3ce1f","README.md":"6d4fcc631ed47bbe8e654649185ce987e9630192ea25c84edd264674e30efa4d","src/bitsize.rs":"8a0878699f18889c987954e1ab918d37c434a387a5dec0f1da8864596fcb14b4","src/bitsize/split.rs":"a59469023f48c5675159b6b46c3655033b4d9adefaba3575301fb485b4868e3d","src/bitsize_internal.rs":"30e67efe8e7baff1514b1a63f1a470701dfcfbf9933cc28aaccef663069a37b0","src/bitsize_internal/struct_gen.rs":"e04bd0346cd393467b3821e977c5deddfde11603ccfa9b63b5e1e60d726d51bb","src/debug_bits.rs":"e28a9e9101c2b365d21c2f6777a389d72dde03f2fcf5fc5add2c7aed278fc1a1","src/default_bits.rs":"bd1943f685f590cdb740b0071de302725dd9c8696d5ca83c7ce9e1dea967d573","src/fmt_bits.rs":"e656c5c019081a6322a678b4bc8c259493081b5888be3a982a12b896ce63deb7","src/from_bits.rs":"fa0acec12ccf1692f47f1b44d6b8ecce0f7da5bfdb465a85546304ede15efd4f","src/lib.rs":"e402a6aabc5b3715a1be94e022f27bf8a3760248ac62d3de7b4c0112cf53b7a2","src/shared.rs":"ac0fb16da63e96d7916f3d8e43e65895c0f0bf14f1afdb2196ec0a7ae5aa2aa2","src/shared/discriminant_assigner.rs":"1d719c4c1d8e1111888d32e930dbaf83a532b4df2b774faa8a0f8cdc6050682a","src/shared/fallback.rs":"8e8af0f66991fd93891d0d9eb1379ed7ead68725100568211677529c9007162c","src/shared/util.rs":"3c191d8585837b2ef391c05df1c201c4beedef0161f0bf37e19b292feef7ef5f","src/try_from_bits.rs":"bda602a90dd6df33e308f8ba9433032cd409213649ab5a0d0297199f4d93b2dd"},"package":"feb11e002038ad243af39c2068c8a72bcf147acf05025dcdb916fcc000adb2d8"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/Cargo.toml
b/rust/hw/char/pl011/vendor/bilge-impl/Cargo.toml
new file mode 100644
index 0000000000..4cf7c59505
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/Cargo.toml
@@ -0,0 +1,54 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "bilge-impl"
+version = "0.2.0"
+authors = ["Hecatia Elegua"]
+description = "Use bitsized types as if they were a feature of rust."
+documentation = "https://docs.rs/bilge"
+readme = "README.md"
+keywords = [
+ "bilge",
+ "bitfield",
+ "bits",
+ "register",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/hecatia-elegua/bilge"
+
+[lib]
+proc-macro = true
+
+[dependencies.itertools]
+version = "0.11.0"
+
+[dependencies.proc-macro-error]
+version = "1.0"
+default-features = false
+
+[dependencies.proc-macro2]
+version = "1.0"
+
+[dependencies.quote]
+version = "1.0"
+
+[dependencies.syn]
+version = "2.0"
+features = ["full"]
+
+[dev-dependencies.syn-path]
+version = "2.0"
+
+[features]
+default = []
+nightly = []
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/README.md
b/rust/hw/char/pl011/vendor/bilge-impl/README.md
new file mode 100644
index 0000000000..48daad0fcb
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/README.md
@@ -0,0 +1,327 @@
+# bilge: the most readable bitfields
+
+[![crates.io](https://img.shields.io/crates/v/bilge.svg)](https://crates.io/crates/bilge)
+[![docs.rs](https://docs.rs/bilge/badge.svg)](https://docs.rs/bilge)
+[![loc](https://tokei.rs/b1/github/hecatia-elegua/bilge?category=code)](https://github.com/Aaronepower/tokei#badges)
+
+_Y e s_, this is yet another bitfield crate, but hear me out:
+
+This is a _**bit**_ better than what we had before.
+
+I wanted a design fitting rust:
+
+- **safe**
+ - types model as much of the functionality as possible and don't allow
false usage
+- **fast**
+ - like handwritten bit fiddling code
+- **simple to complex**
+ - obvious and readable basic frontend, like normal structs
+ - only minimally and gradually introduce advanced concepts
+ - provide extension mechanisms
+
+The lib is **no-std** (and fully `const` behind a `"nightly"` feature gate).
+
+For some more explanations on the "why" and "how": [blog
post](https://hecatia-elegua.github.io/blog/no-more-bit-fiddling/) and [reddit
comments](https://www.reddit.com/r/rust/comments/13ic0mf/no_more_bit_fiddling_and_introducing_bilge/).
+
+## WARNING
+
+Our current version is still pre 1.0, which means nothing is completely stable.
+
+However, constructors, getters, setters and From/TryFrom should stay the same,
since their semantics are very clear.
+
+[//]: # (keep this fixed to the version in .github/workflows/ci.yml,
rust-toolchain.toml)
+
+The nightly feature is tested on `nightly-2022-11-03` and [will not work on
the newest nightly until const_convert comes
back](https://github.com/rust-lang/rust/issues/110395#issuecomment-1524775763).
+
+## Usage
+
+To make your life easier:
+
+```rust
+use bilge::prelude::*;
+```
+
+### Infallible (From)
+
+You can just specify bitsized fields like normal fields:
+
+```rust
+#[bitsize(14)]
+struct Register {
+ header: u4,
+ body: u7,
+ footer: Footer,
+}
+```
+
+The attribute `bitsize` generates the bitfield, while `14` works as a
failsafe, emitting a compile error if your struct definition doesn't declare 14
bits.
+Let's define the nested struct `Footer` as well:
+
+```rust
+#[bitsize(3)]
+#[derive(FromBits)]
+struct Footer {
+ is_last: bool,
+ code: Code,
+}
+```
+
+As you can see, we added `#[derive(FromBits)]`, which is needed for
`Register`'s getters and setters.
+Due to how rust macros work (outside-in), it needs to be below `#[bitsize]`.
+Also, `bool` can be used as one bit.
+
+`Code` is another nesting, this time an enum:
+
+```rust
+#[bitsize(2)]
+#[derive(FromBits)]
+enum Code { Success, Error, IoError, GoodExample }
+```
+
+Now we can construct `Register`:
+
+```rust
+let reg1 = Register::new(
+ u4::new(0b1010),
+ u7::new(0b010_1010),
+ Footer::new(true, Code::GoodExample)
+);
+```
+
+Or, if we add `#[derive(FromBits)]` to `Register` and want to parse a raw
register value:
+
+```rust
+let mut reg2 = Register::from(u14::new(0b11_1_0101010_1010));
+```
+
+And getting and setting fields is done like this:
+
+```rust
+let header = reg2.header();
+reg2.set_footer(Footer::new(false, Code::Success));
+```
+
+Any kinds of tuple and array are also supported:
+
+```rust
+#[bitsize(32)]
+#[derive(FromBits)]
+struct InterruptSetEnables([bool; 32]);
+```
+
+Which produces the usual getter and setter, but also element accessors:
+
+```rust
+let mut ise =
InterruptSetEnables::from(0b0000_0000_0000_0000_0000_0000_0001_0000);
+let ise5 = ise.val_0_at(4);
+ise.set_val_0_at(2, ise5);
+assert_eq!(0b0000_0000_0000_0000_0000_0000_0001_0100, ise.value);
+```
+
+Depending on what you're working with, only a subset of enum values might be
clear, or some values might be reserved.
+In that case, you can use a fallback variant, defined like this:
+
+```rust
+#[bitsize(32)]
+#[derive(FromBits, Debug, PartialEq)]
+enum Subclass {
+ Mouse,
+ Keyboard,
+ Speakers,
+ #[fallback]
+ Reserved,
+}
+```
+
+which will convert any undeclared bits to `Reserved`:
+
+```rust
+assert_eq!(Subclass::Reserved, Subclass::from(3));
+assert_eq!(Subclass::Reserved, Subclass::from(42));
+let num = u32::from(Subclass::from(42));
+assert_eq!(3, num);
+assert_ne!(42, num);
+```
+
+or, if you need to keep the exact number saved, use:
+
+```rust
+#[fallback]
+Reserved(u32),
+```
+
+```rust
+assert_eq!(Subclass2::Reserved(3), Subclass2::from(3));
+assert_eq!(Subclass2::Reserved(42), Subclass2::from(42));
+let num = u32::from(Subclass2::from(42));
+assert_eq!(42, num);
+assert_ne!(3, num);
+```
+
+### Fallible (TryFrom)
+
+In contrast to structs, enums don't have to declare all of their bits:
+
+```rust
+#[bitsize(2)]
+#[derive(TryFromBits)]
+enum Class {
+ Mobile, Semimobile, /* 0x2 undefined */ Stationary = 0x3
+}
+```
+
+meaning this will work:
+
+```rust
+let class = Class::try_from(u2::new(2));
+assert!(class.is_err());
+```
+
+except we first need to `#[derive(Debug, PartialEq)]` on `Class`, since
`assert_eq!` needs those.
+
+Let's do that, and use `Class` as a field:
+
+```rust
+#[bitsize(8)]
+#[derive(TryFromBits)]
+struct Device {
+ reserved: u2,
+ class: Class,
+ reserved: u4,
+}
+```
+
+This shows `TryFrom` being propagated upward. There's also another small help:
`reserved` fields (which are often used in registers) can all have the same
name.
+
+Again, let's try to print this:
+
+```rust
+println!("{:?}", Device::try_from(0b0000_11_00));
+println!("{:?}", Device::new(Class::Mobile));
+```
+
+And again, `Device` doesn't implement `Debug`:
+
+### DebugBits
+
+For structs, you need to add `#[derive(DebugBits)]` to get an output like this:
+
+```rust
+Ok(Device { reserved_i: 0, class: Stationary, reserved_ii: 0 })
+Device { reserved_i: 0, class: Mobile, reserved_ii: 0 }
+```
+
+For testing + overview, the full readme example code is in
`/examples/readme.rs`.
+
+### Custom -Bits derives
+
+One of the main advantages of our approach is that we can keep `#[bitsize]`
pretty slim, offloading all the other features to derive macros.
+Besides the derive macros shown above, you can extend `bilge` with your own
derive crates working on bitfields.
+An example of this is given in
[`/tests/custom_derive.rs`](https://github.com/hecatia-elegua/bilge/blob/main/tests/custom_derive.rs),
with its implementation in
[`tests/custom_bits`](https://github.com/hecatia-elegua/bilge/blob/1dfb6cf7d278d102d3f96ac31a9374e2b27fafc7/tests/custom_bits/custom_bits_derive/src/lib.rs).
+
+## Back- and Forwards Compatibility
+
+The syntax is kept very similar to usual rust structs for a simple reason:
+
+The endgoal of this library is to support the adoption of LLVM's arbitrary
bitwidth integers into rust,
+thereby allowing rust-native bitfields.
+Until then, bilge is using the wonderful [`arbitrary-int` crate by
danlehmann](https://github.com/danlehmann/arbitrary-int).
+
+After all attribute expansions, our generated bitfield contains a single
field, somewhat like:
+
+```rust
+struct Register { value: u14 }
+```
+
+This means you _could_ modify the inner value directly, but it breaks type
safety guarantees (e.g. unfilled or read-only fields).
+So if you need to modify the whole field, instead use the type-safe
conversions `u14::from(register)` and `Register::from(u14)`.
+It is possible that this inner type will be made private.
+
+For some more examples and an overview of functionality, take a look at
`/examples` and `/tests`.
+
+## Alternatives
+
+### benchmarks, performance, asm line count
+
+First of all, [basic
benchmarking](https://github.com/hecatia-elegua/bilge/blob/main/benches/compared/main.rs)
reveals that all alternatives mentioned here (besides deku) have about the
same performance and line count. This includes a handwritten version.
+
+### build-time
+
+Measuring build time of the crate inself (both with its dependencies and
without), yields these numbers on my machine:
+| | debug | debug single crate | release | release
single crate |
+|-----------------------|-------|--------------------|-----------|----------------------|
+| bilge 1.67-nightly | 8 | 1.8 | 6 | 0.8
|
+| bitbybit 1.69 | 4.5 | 1.3 | 13.5 [^*] | 9.5 [^*]
|
+| modular-bitfield 1.69 | 8 | 2.2 | 7.2 | 1.6
|
+
+[^*]: This is just a weird rustc regression or my setup or sth, not
representative.
+
+This was measured with `cargo clean && cargo build [--release] --quiet
--timings`.
+Of course, the actual codegen time on an example project needs to be measured,
too.
+
+
+### handwritten implementation
+
+The common handwritten implementation pattern for bitfields in rust looks
[somewhat like
benches/compared/handmade.rs](https://github.com/hecatia-elegua/bilge/blob/main/benches/compared/handmade.rs),
sometimes also throwing around a lot of consts for field offsets. The problems
with this approach are:
+- readability suffers
+- offset, cast or masking errors could go unnoticed
+- bit fiddling, shifting and masking is done all over the place, in contrast
to bitfields
+- beginners suffer, although I would argue even seniors, since it's more like:
"Why do we need to learn and debug bit fiddling if we can get most of it done
by using structs?"
+- reimplementing different kinds of _fallible nested-struct enum-tuple array
field access_ might not be so fun
+
+### modular-bitfield
+
+The often used and very inspiring
[`modular-bitfield`](https://github.com/robbepop/modular-bitfield) has a few
+problems:
+- it is unmaintained and has a quirky structure
+- constructors use the builder pattern
+ - makes user code unreadable if you have many fields
+ - can accidentally leave things uninitialized
+- `from_bytes` can easily take invalid arguments, which turns verification
inside-out:
+ - modular-bitfield flow: `u16` -> `PackedData::from_bytes([u16])` ->
`PackedData::status_or_err()?`
+ - needs to check for `Err` on every single access
+ - adds duplicate getters and setters with postfix `_or_err`
+ - reinvents `From<u16>`/`TryFrom<u16>` as a kind of hybrid
+ - bilge: usual type-system centric flow: `u16` ->
`PackedData::try_from(u16)?` -> `PackedData::status()`
+ - just works, needs to check nothing on access
+ - some more general info on this: [Parse, don't
validate](https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate/)
+- big god-macro
+ - powerful, but less readable to the devs of modular-bitfield
+ - needs to cover many derives in itself, like `impl Debug` (other bitfield
crates do this as well)
+ - bilge: solves this by providing a kind of scope for `-Bits`-derives
+
+and implementation differences:
+- underlying type is a byte array
+ - can be useful for bitfields larger than u128
+ - bilge: if your bitfields get larger than u128, you can most often
split them into multiple bitfields of a primitive size (like u64) and put those
in a parent struct which is not a bitfield
+
+Still, modular-bitfield is pretty good and I had set out to build something
equal or hopefully better than it.
+Tell me where I can do better, I will try.
+
+### bitbybit
+
+One of the libs inspired by the same crate is
[`bitbybit`](https://github.com/danlehmann/bitfield), which is much more
readable and up-to-date. Actually, I even helped and am still helping on that
one as well. After experimenting and hacking around in their code though, I
realized it would need to be severely changed for the features and structure I
had in mind.
+
+implementation differences (as of 26.04.23):
+- it can do read/write-only, array strides and repeat the same bits for
multiple fields
+ - bilge: these will be added the moment someone needs it
+- redundant bit-offset specification, which can help or annoy, the same way
bilge's `reserved` fields can help or annoy
+
+### deku
+
+After looking at a ton of bitfield libs on crates.io, I _didn't_ find
[`deku`](https://github.com/sharksforarms/deku).
+I will still mention it here because it uses a very interesting crate
underneath (bitvec).
+Currently (as of 26.04.23), it generates far more assembly and takes longer to
run, since parts of the API are not `const`.
+I've opened an issue on their repo about that.
+
+### most others
+
+Besides that, many bitfield libs try to imitate or look like C bitfields, even
though these are hated by many.
+I argue most beginners would have the idea to specify bits with basic
primitives like u1, u2, ...
+This also opens up some possibilities for calculation and conversion on those
primitives.
+
+Something similar can be said about `bitflags`, which, under this model, can
be turned into simple structs with bools and enums.
+
+Basically, `bilge` tries to convert bit fiddling, shifting and masking into
more widely known concepts like struct access.
+
+About the name: a bilge is one of the "lowest" parts of a ship, nothing else
to it :)
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/meson.build
b/rust/hw/char/pl011/vendor/bilge-impl/meson.build
new file mode 100644
index 0000000000..11f3dd186f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/meson.build
@@ -0,0 +1,24 @@
+rust = import('rust')
+
+_bilge_impl_rs = rust.proc_macro(
+ 'bilge_impl',
+ files('src/lib.rs'),
+ rust_args: rust_args + [
+ '--edition', '2021',
+ '--cfg', 'use_fallback',
+ '--cfg', 'feature="syn-error"',
+ '--cfg', 'feature="proc-macro"',
+ ],
+ dependencies: [
+ dep_itertools,
+ dep_proc_macro_error_attr,
+ dep_proc_macro_error,
+ dep_quote,
+ dep_syn,
+ dep_proc_macro2,
+ ],
+)
+
+dep_bilge_impl = declare_dependency(
+ link_with: _bilge_impl_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize.rs
new file mode 100644
index 0000000000..66660c3e30
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize.rs
@@ -0,0 +1,187 @@
+mod split;
+
+use proc_macro2::{Ident, TokenStream};
+use proc_macro_error::{abort, abort_call_site};
+use quote::quote;
+use split::SplitAttributes;
+use syn::{punctuated::Iter, spanned::Spanned, Fields, Item, ItemEnum,
ItemStruct, Type, Variant};
+
+use crate::shared::{self, enum_fills_bitsize, is_fallback_attribute,
unreachable, BitSize, MAX_ENUM_BIT_SIZE};
+
+/// Intermediate Representation, just for bundling these together
+struct ItemIr {
+ /// generated item (and size check)
+ expanded: TokenStream,
+}
+
+pub(super) fn bitsize(args: TokenStream, item: TokenStream) -> TokenStream {
+ let (item, declared_bitsize) = parse(item, args);
+ let attrs = SplitAttributes::from_item(&item);
+ let ir = match item {
+ Item::Struct(mut item) => {
+ modify_special_field_names(&mut item.fields);
+ analyze_struct(&item.fields);
+ let expanded = generate_struct(&item, declared_bitsize);
+ ItemIr { expanded }
+ }
+ Item::Enum(item) => {
+ analyze_enum(declared_bitsize, item.variants.iter());
+ let expanded = generate_enum(&item);
+ ItemIr { expanded }
+ }
+ _ => unreachable(()),
+ };
+ generate_common(ir, attrs, declared_bitsize)
+}
+
+fn parse(item: TokenStream, args: TokenStream) -> (Item, BitSize) {
+ let item = syn::parse2(item).unwrap_or_else(unreachable);
+
+ if args.is_empty() {
+ abort_call_site!("missing attribute value"; help = "you need to define
the size like this: `#[bitsize(32)]`")
+ }
+
+ let (declared_bitsize, _arb_int) =
shared::bitsize_and_arbitrary_int_from(args);
+ (item, declared_bitsize)
+}
+
+fn check_type_is_supported(ty: &Type) {
+ use Type::*;
+ match ty {
+ Tuple(tuple) => tuple.elems.iter().for_each(check_type_is_supported),
+ Array(array) => check_type_is_supported(&array.elem),
+ // Probably okay (compilation would validate that this type is also
Bitsized)
+ Path(_) => (),
+ // These don't work with structs or aren't useful in bitfields.
+ BareFn(_) | Group(_) | ImplTrait(_) | Infer(_) | Macro(_) | Never(_) |
+ // We could provide some info on error as to why Ptr/Reference won't
work due to safety.
+ Ptr(_) | Reference(_) |
+ // The bitsize must be known at compile time.
+ Slice(_) |
+ // Something to investigate, but doesn't seem useful/usable here
either.
+ TraitObject(_) |
+ // I have no idea where this is used.
+ Verbatim(_) | Paren(_) => abort!(ty, "This field type is not
supported"),
+ _ => abort!(ty, "This field type is currently not supported"),
+ }
+}
+
+/// Allows you to give multiple fields the name `reserved` or `padding`
+/// by numbering them for you.
+fn modify_special_field_names(fields: &mut Fields) {
+ // We could have just counted up, i.e. `reserved_0`, but people might
interpret this as "reserved to zero".
+ // Using some other, more useful unique info as postfix would be nice.
+ // Also, it might be useful to generate no getters or setters for these
fields and skipping some calc.
+ let mut reserved_count = 0;
+ let mut padding_count = 0;
+ let field_idents_mut = fields.iter_mut().filter_map(|field|
field.ident.as_mut());
+ for ident in field_idents_mut {
+ if ident == "reserved" || ident == "_reserved" {
+ reserved_count += 1;
+ let span = ident.span();
+ let name = format!("reserved_{}", "i".repeat(reserved_count));
+ *ident = Ident::new(&name, span)
+ } else if ident == "padding" || ident == "_padding" {
+ padding_count += 1;
+ let span = ident.span();
+ let name = format!("padding_{}", "i".repeat(padding_count));
+ *ident = Ident::new(&name, span)
+ }
+ }
+}
+
+fn analyze_struct(fields: &Fields) {
+ if fields.is_empty() {
+ abort_call_site!("structs without fields are not supported")
+ }
+
+ // don't move this. we validate all nested field types here as well
+ // and later assume this was checked.
+ for field in fields {
+ check_type_is_supported(&field.ty)
+ }
+}
+
+fn analyze_enum(bitsize: BitSize, variants: Iter<Variant>) {
+ if bitsize > MAX_ENUM_BIT_SIZE {
+ abort_call_site!("enum bitsize is limited to {}", MAX_ENUM_BIT_SIZE)
+ }
+
+ let variant_count = variants.clone().count();
+ if variant_count == 0 {
+ abort_call_site!("empty enums are not supported");
+ }
+
+ let has_fallback = variants.flat_map(|variant|
&variant.attrs).any(is_fallback_attribute);
+
+ if !has_fallback {
+ // this has a side-effect of validating the enum count
+ let _ = enum_fills_bitsize(bitsize, variant_count);
+ }
+}
+
+fn generate_struct(item: &ItemStruct, declared_bitsize: u8) -> TokenStream {
+ let ItemStruct { vis, ident, fields, .. } = item;
+ let declared_bitsize = declared_bitsize as usize;
+
+ let computed_bitsize = fields.iter().fold(quote!(0), |acc, next| {
+ let field_size = shared::generate_type_bitsize(&next.ty);
+ quote!(#acc + #field_size)
+ });
+
+ // we could remove this if the whole struct gets passed
+ let is_tuple_struct = fields.iter().any(|field| field.ident.is_none());
+ let fields_def = if is_tuple_struct {
+ let fields = fields.iter();
+ quote! {
+ ( #(#fields,)* );
+ }
+ } else {
+ let fields = fields.iter();
+ quote! {
+ { #(#fields,)* }
+ }
+ };
+
+ quote! {
+ #vis struct #ident #fields_def
+
+ // constness: when we get const blocks evaluated at compile time, add
a const computed_bitsize
+ const _: () = assert!(
+ (#computed_bitsize) == (#declared_bitsize),
+ concat!("struct size and declared bit size differ: ",
+ // stringify!(#computed_bitsize),
+ " != ",
+ stringify!(#declared_bitsize))
+ );
+ }
+}
+
+// attributes are handled in `generate_common`
+fn generate_enum(item: &ItemEnum) -> TokenStream {
+ let ItemEnum { vis, ident, variants, .. } = item;
+ quote! {
+ #vis enum #ident {
+ #variants
+ }
+ }
+}
+
+/// we have _one_ generate_common function, which holds everything that struct
and enum have _in common_.
+/// Everything else has its own generate_ functions.
+fn generate_common(ir: ItemIr, attrs: SplitAttributes, declared_bitsize: u8)
-> TokenStream {
+ let ItemIr { expanded } = ir;
+ let SplitAttributes {
+ before_compression,
+ after_compression,
+ } = attrs;
+
+ let bitsize_internal_attr = quote!
{#[::bilge::bitsize_internal(#declared_bitsize)]};
+
+ quote! {
+ #(#before_compression)*
+ #bitsize_internal_attr
+ #(#after_compression)*
+ #expanded
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize/split.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize/split.rs
new file mode 100644
index 0000000000..3848ba2c24
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize/split.rs
@@ -0,0 +1,185 @@
+use proc_macro_error::{abort, abort_call_site};
+use quote::ToTokens;
+use syn::{meta::ParseNestedMeta, parse_quote, Attribute, Item, Meta, Path};
+
+use crate::shared::{unreachable, util::PathExt};
+
+/// Since we want to be maximally interoperable, we need to handle attributes
in a special way.
+/// We use `#[bitsize]` as a sort of scope for all attributes below it and
+/// the whole family of `-Bits` macros only works when used in that scope.
+///
+/// Let's visualize why this is the case, starting with some user-code:
+/// ```ignore
+/// #[bitsize(6)]
+/// #[derive(Clone, Copy, PartialEq, DebugBits, FromBits)]
+/// struct Example {
+/// field1: u2,
+/// field2: u4,
+/// }
+/// ```
+/// First, the attributes get sorted, depending on their name.
+/// Every attribute in need of field information gets resolved first,
+/// in this case `DebugBits` and `FromBits`.
+///
+/// Now, after resolving all `before_compression` attributes, the
halfway-resolved
+/// code looks like this:
+/// ```ignore
+/// #[::bilge::bitsize_internal(6)]
+/// #[derive(Clone, Copy, PartialEq)]
+/// struct Example {
+/// field1: u2,
+/// field2: u4,
+/// }
+/// ```
+/// This `#[bitsize_internal]` attribute is the one actually doing the
compression and generating
+/// all the getters, setters and a constructor.
+///
+/// Finally, the struct ends up like this (excluding the generated impl
blocks):
+/// ```ignore
+/// struct Example {
+/// value: u6,
+/// }
+/// ```
+pub struct SplitAttributes {
+ pub before_compression: Vec<Attribute>,
+ pub after_compression: Vec<Attribute>,
+}
+
+impl SplitAttributes {
+ /// Split item attributes into those applied before bitfield-compression
and those applied after.
+ /// Also, abort on any invalid configuration.
+ ///
+ /// Any derives with suffix `Bits` will be able to access field
information.
+ /// This way, users of `bilge` can define their own derives working on the
uncompressed bitfield.
+ pub fn from_item(item: &Item) -> SplitAttributes {
+ let attrs = match item {
+ Item::Enum(item) => &item.attrs,
+ Item::Struct(item) => &item.attrs,
+ _ => abort_call_site!("item is not a struct or enum"; help =
"`#[bitsize]` can only be used on structs and enums"),
+ };
+
+ let parsed = attrs.iter().map(parse_attribute);
+
+ let is_struct = matches!(item, Item::Struct(..));
+
+ let mut from_bytes = None;
+ let mut has_frombits = false;
+
+ let mut before_compression = vec![];
+ let mut after_compression = vec![];
+
+ for parsed_attr in parsed {
+ match parsed_attr {
+ ParsedAttribute::DeriveList(derives) => {
+ for mut derive in derives {
+ if derive.matches(&["zerocopy", "FromBytes"]) {
+ from_bytes = Some(derive.clone());
+ } else if derive.matches(&["bilge", "FromBits"]) {
+ has_frombits = true;
+ } else if derive.matches_core_or_std(&["fmt",
"Debug"]) && is_struct {
+ abort!(derive.0, "use derive(DebugBits) for
structs")
+ } else if derive.matches_core_or_std(&["default",
"Default"]) && is_struct {
+ // emit_warning!(derive.0, "use
derive(DefaultBits) for structs")
+ derive.0 = syn::parse_quote!(::bilge::DefaultBits);
+ }
+
+ if derive.is_custom_bitfield_derive() {
+ before_compression.push(derive.into_attribute());
+ } else {
+ // It is most probable that basic derive macros
work if we put them on after compression
+ after_compression.push(derive.into_attribute());
+ }
+ }
+ }
+
+ ParsedAttribute::BitsizeInternal(attr) => {
+ abort!(attr, "remove bitsize_internal"; help = "attribute
bitsize_internal can only be applied internally by the bitsize macros")
+ }
+
+ ParsedAttribute::Other(attr) => {
+ // I don't know with which attrs I can hit Path and
NameValue,
+ // so let's just put them on after compression.
+ after_compression.push(attr.to_owned())
+ }
+ };
+ }
+
+ if let Some(from_bytes) = from_bytes {
+ if !has_frombits {
+ abort!(from_bytes.0, "a bitfield with zerocopy::FromBytes also
needs to have FromBits")
+ }
+ }
+
+ // currently, enums don't need special handling - so just put all
attributes before compression
+ if !is_struct {
+ before_compression.append(&mut after_compression)
+ }
+
+ SplitAttributes {
+ before_compression,
+ after_compression,
+ }
+ }
+}
+
+fn parse_attribute(attribute: &Attribute) -> ParsedAttribute {
+ match &attribute.meta {
+ Meta::List(list) if list.path.is_ident("derive") => {
+ let mut derives = Vec::new();
+ let add_derive = |meta: ParseNestedMeta| {
+ let derive = Derive(meta.path);
+ derives.push(derive);
+
+ Ok(())
+ };
+
+ list.parse_nested_meta(add_derive)
+ .unwrap_or_else(|e| abort!(list.tokens, "failed to parse
derive: {}", e));
+
+ ParsedAttribute::DeriveList(derives)
+ }
+
+ meta if contains_anywhere(meta, "bitsize_internal") =>
ParsedAttribute::BitsizeInternal(attribute),
+
+ _ => ParsedAttribute::Other(attribute),
+ }
+}
+
+/// a crude approximation of things we currently consider in item attributes
+enum ParsedAttribute<'attr> {
+ DeriveList(Vec<Derive>),
+ BitsizeInternal(&'attr Attribute),
+ Other(&'attr Attribute),
+}
+
+/// the path of a single derive attribute, parsed from a list which may have
contained several
+#[derive(Clone)]
+struct Derive(Path);
+
+impl Derive {
+ /// a new `#[derive]` attribute containing only this derive
+ fn into_attribute(self) -> Attribute {
+ let path = self.0;
+ parse_quote! { #[derive(#path)] }
+ }
+
+ /// by `bilge` convention, any derive satisfying this condition is able
+ /// to access bitfield structure information pre-compression,
+ /// allowing for user derives
+ fn is_custom_bitfield_derive(&self) -> bool {
+ let last_segment = self.0.segments.last().unwrap_or_else(||
unreachable(()));
+
+ last_segment.ident.to_string().ends_with("Bits")
+ }
+}
+
+impl PathExt for Derive {
+ fn matches(&self, str_segments: &[&str]) -> bool {
+ self.0.matches(str_segments)
+ }
+}
+
+/// slightly hacky. attempts to recognize cases where an ident is
deeply-nested in the meta.
+fn contains_anywhere(meta: &Meta, ident: &str) -> bool {
+ meta.to_token_stream().to_string().contains(ident)
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal.rs
new file mode 100644
index 0000000000..ad10350372
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal.rs
@@ -0,0 +1,235 @@
+use proc_macro2::{Ident, TokenStream};
+use quote::quote;
+use syn::{Attribute, Field, Item, ItemEnum, ItemStruct, Type};
+
+use crate::shared::{self, unreachable};
+
+pub(crate) mod struct_gen;
+
+/// Intermediate Representation, just for bundling these together
+struct ItemIr<'a> {
+ attrs: &'a Vec<Attribute>,
+ name: &'a Ident,
+ /// generated item (and setters, getters, constructor, impl Bitsized)
+ expanded: TokenStream,
+}
+
+pub(super) fn bitsize_internal(args: TokenStream, item: TokenStream) ->
TokenStream {
+ let (item, arb_int) = parse(item, args);
+ let ir = match item {
+ Item::Struct(ref item) => {
+ let expanded = generate_struct(item, &arb_int);
+ let attrs = &item.attrs;
+ let name = &item.ident;
+ ItemIr { attrs, name, expanded }
+ }
+ Item::Enum(ref item) => {
+ let expanded = generate_enum(item);
+ let attrs = &item.attrs;
+ let name = &item.ident;
+ ItemIr { attrs, name, expanded }
+ }
+ _ => unreachable(()),
+ };
+ generate_common(ir, &arb_int)
+}
+
+fn parse(item: TokenStream, args: TokenStream) -> (Item, TokenStream) {
+ let item = syn::parse2(item).unwrap_or_else(unreachable);
+ let (_declared_bitsize, arb_int) =
shared::bitsize_and_arbitrary_int_from(args);
+ (item, arb_int)
+}
+
+fn generate_struct(struct_data: &ItemStruct, arb_int: &TokenStream) ->
TokenStream {
+ let ItemStruct { vis, ident, fields, .. } = struct_data;
+
+ let mut fieldless_next_int = 0;
+ let mut previous_field_sizes = vec![];
+ let (accessors, (constructor_args, constructor_parts)): (Vec<TokenStream>,
(Vec<TokenStream>, Vec<TokenStream>)) = fields
+ .iter()
+ .map(|field| {
+ // offset is needed for bit-shifting
+ // struct Example { field1: u8, field2: u4, field3: u4 }
+ // previous_field_sizes = [] -> unwrap_or_else -> field_offset
= 0
+ // previous_field_sizes = [8] -> reduce -> field_offset
= 0 + 8 = 8
+ // previous_field_sizes = [8, 4] -> reduce -> field_offset
= 0 + 8 + 4 = 12
+ let field_offset = previous_field_sizes
+ .iter()
+ .cloned()
+ .reduce(|acc, next| quote!(#acc + #next))
+ .unwrap_or_else(|| quote!(0));
+ let field_size = shared::generate_type_bitsize(&field.ty);
+ previous_field_sizes.push(field_size);
+ generate_field(field, &field_offset, &mut fieldless_next_int)
+ })
+ .unzip();
+
+ let const_ = if cfg!(feature = "nightly") { quote!(const) } else {
quote!() };
+
+ quote! {
+ #vis struct #ident {
+ /// WARNING: modifying this value directly can break invariants
+ value: #arb_int,
+ }
+ impl #ident {
+ // #[inline]
+ #[allow(clippy::too_many_arguments, clippy::type_complexity,
unused_parens)]
+ pub #const_ fn new(#( #constructor_args )*) -> Self {
+ type ArbIntOf<T> = <T as Bitsized>::ArbitraryInt;
+ type BaseIntOf<T> = <ArbIntOf<T> as Number>::UnderlyingType;
+
+ let mut offset = 0;
+ let raw_value = #( #constructor_parts )|*;
+ let value = #arb_int::new(raw_value);
+ Self { value }
+ }
+ #( #accessors )*
+ }
+ }
+}
+
+fn generate_field(field: &Field, field_offset: &TokenStream,
fieldless_next_int: &mut usize) -> (TokenStream, (TokenStream, TokenStream)) {
+ let Field { ident, ty, .. } = field;
+ let name = if let Some(ident) = ident {
+ ident.clone()
+ } else {
+ let name = format!("val_{fieldless_next_int}");
+ *fieldless_next_int += 1;
+ syn::parse_str(&name).unwrap_or_else(unreachable)
+ };
+
+ // skip reserved fields in constructors and setters
+ let name_str = name.to_string();
+ if name_str.contains("reserved_") || name_str.contains("padding_") {
+ // needed for `DebugBits`
+ let getter = generate_getter(field, field_offset, &name);
+ let size = shared::generate_type_bitsize(ty);
+ let accessors = quote!(#getter);
+ let constructor_arg = quote!();
+ let constructor_part = quote! { {
+ // we still need to shift by the element's size
+ offset += #size;
+ 0
+ } };
+ return (accessors, (constructor_arg, constructor_part));
+ }
+
+ let getter = generate_getter(field, field_offset, &name);
+ let setter = generate_setter(field, field_offset, &name);
+ let (constructor_arg, constructor_part) = generate_constructor_stuff(ty,
&name);
+
+ let accessors = quote! {
+ #getter
+ #setter
+ };
+
+ (accessors, (constructor_arg, constructor_part))
+}
+
+fn generate_getter(field: &Field, offset: &TokenStream, name: &Ident) ->
TokenStream {
+ let Field { attrs, vis, ty, .. } = field;
+
+ let getter_value = struct_gen::generate_getter_value(ty, offset, false);
+
+ let const_ = if cfg!(feature = "nightly") { quote!(const) } else {
quote!() };
+
+ let array_at = if let Type::Array(array) = ty {
+ let elem_ty = &array.elem;
+ let len_expr = &array.len;
+ let name: Ident =
syn::parse_str(&format!("{name}_at")).unwrap_or_else(unreachable);
+ let getter_value = struct_gen::generate_getter_value(elem_ty, offset,
true);
+ quote! {
+ // #[inline]
+ #(#attrs)*
+ #[allow(clippy::type_complexity, unused_parens)]
+ #vis #const_ fn #name(&self, index: usize) -> #elem_ty {
+ ::core::assert!(index < #len_expr);
+ #getter_value
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ quote! {
+ // #[inline]
+ #(#attrs)*
+ #[allow(clippy::type_complexity, unused_parens)]
+ #vis #const_ fn #name(&self) -> #ty {
+ #getter_value
+ }
+
+ #array_at
+ }
+}
+
+fn generate_setter(field: &Field, offset: &TokenStream, name: &Ident) ->
TokenStream {
+ let Field { attrs, vis, ty, .. } = field;
+ let setter_value = struct_gen::generate_setter_value(ty, offset, false);
+
+ let name: Ident =
syn::parse_str(&format!("set_{name}")).unwrap_or_else(unreachable);
+
+ let const_ = if cfg!(feature = "nightly") { quote!(const) } else {
quote!() };
+
+ let array_at = if let Type::Array(array) = ty {
+ let elem_ty = &array.elem;
+ let len_expr = &array.len;
+ let name: Ident =
syn::parse_str(&format!("{name}_at")).unwrap_or_else(unreachable);
+ let setter_value = struct_gen::generate_setter_value(elem_ty, offset,
true);
+ quote! {
+ // #[inline]
+ #(#attrs)*
+ #[allow(clippy::type_complexity, unused_parens)]
+ #vis #const_ fn #name(&mut self, index: usize, value: #elem_ty) {
+ ::core::assert!(index < #len_expr);
+ #setter_value
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ quote! {
+ // #[inline]
+ #(#attrs)*
+ #[allow(clippy::type_complexity, unused_parens)]
+ #vis #const_ fn #name(&mut self, value: #ty) {
+ #setter_value
+ }
+
+ #array_at
+ }
+}
+
+fn generate_constructor_stuff(ty: &Type, name: &Ident) -> (TokenStream,
TokenStream) {
+ let constructor_arg = quote! {
+ #name: #ty,
+ };
+ let constructor_part = struct_gen::generate_constructor_part(ty, name);
+ (constructor_arg, constructor_part)
+}
+
+fn generate_enum(enum_data: &ItemEnum) -> TokenStream {
+ let ItemEnum { vis, ident, variants, .. } = enum_data;
+ quote! {
+ #vis enum #ident {
+ #variants
+ }
+ }
+}
+
+/// We have _one_ `generate_common` function, which holds everything struct
and enum have _in common_.
+/// Everything else has its own `generate_` functions.
+fn generate_common(ir: ItemIr, arb_int: &TokenStream) -> TokenStream {
+ let ItemIr { attrs, name, expanded } = ir;
+
+ quote! {
+ #(#attrs)*
+ #expanded
+ impl ::bilge::Bitsized for #name {
+ type ArbitraryInt = #arb_int;
+ const BITS: usize = <Self::ArbitraryInt as Bitsized>::BITS;
+ const MAX: Self::ArbitraryInt = <Self::ArbitraryInt as
Bitsized>::MAX;
+ }
+ }
+}
diff --git
a/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal/struct_gen.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal/struct_gen.rs
new file mode 100644
index 0000000000..74cd65fec1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/bitsize_internal/struct_gen.rs
@@ -0,0 +1,402 @@
+//! We're keeping most of the generating together, to ease reading here and in
`cargo_expand`.
+//! For this reason, we also use more locals and types.
+//! These locals, types, casts should be optimized away.
+//! In simple cases they indeed are optimized away, but if some case is not,
please report.
+//!
+//! ## Important
+//!
+//! We often do thing like:
+//! ```ignore
+//! quote! {
+//! #value_shifted
+//! value_shifted
+//! }
+//! ```
+//! By convention, `#value_shifted` has its name because we define a `let
value_shifted` inside that `TokenStream`.
+//! So the above code means we're returning the value of `let value_shifted`.
+//! Earlier on, we would have done something like this:
+//! ```ignore
+//! quote! {
+//! let value_shifted = { #value_shifted };
+//! value_shifted
+//! }
+//! ```
+//! which aids in reading this here macro code, but doesn't help reading the
generated code since it introduces
+//! lots of new scopes (curly brackets). We need the scope since
`#value_shifted` expands to multiple lines.
+use super::*;
+
+/// Top-level function which initializes the cursor and offsets it to what we
want to read
+///
+/// `is_array_elem_getter` allows us to generate an array_at getter more easily
+pub(crate) fn generate_getter_value(ty: &Type, offset: &TokenStream,
is_array_elem_getter: bool) -> TokenStream {
+ // if we generate `fn array_at(index)`, we need to offset to the array
element
+ let elem_offset = if is_array_elem_getter {
+ let size = shared::generate_type_bitsize(ty);
+ quote! {
+ let size = #size;
+ // cursor now starts at this element
+ cursor >>= size * index;
+ }
+ } else {
+ quote!()
+ };
+
+ let inner = generate_getter_inner(ty, true);
+ quote! {
+ // for ease of reading
+ type ArbIntOf<T> = <T as Bitsized>::ArbitraryInt;
+ type BaseIntOf<T> = <ArbIntOf<T> as Number>::UnderlyingType;
+ // cursor is the value we read from and starts at the struct's first
field
+ let mut cursor = self.value.value();
+ // this field's offset
+ let field_offset = #offset;
+ // cursor now starts at this field
+ cursor >>= field_offset;
+ #elem_offset
+
+ #inner
+ }
+}
+
+/// We heavily rely on the fact that transmuting into a nested array [[T; N1];
N2] can
+/// be done in the same way as transmuting into an array [T; N1*N2].
+/// Otherwise, nested arrays would generate even more code.
+///
+/// `is_getter` allows us to generate a try_from impl more easily
+pub(crate) fn generate_getter_inner(ty: &Type, is_getter: bool) -> TokenStream
{
+ use Type::*;
+ match ty {
+ Tuple(tuple) => {
+ let unbraced = tuple
+ .elems
+ .iter()
+ .map(|elem| {
+ // for every tuple element, generate its getter code
+ let getter = generate_getter_inner(elem, is_getter);
+ // and add a scope around it
+ quote! { {#getter} }
+ })
+ .reduce(|acc, next| {
+ // join all getter codes with:
+ if is_getter {
+ // comma, to later produce (val_1, val_2, ...)
+ quote!(#acc, #next)
+ } else {
+ // bool-and, since for try_from we just generate bools
+ quote!(#acc && #next)
+ }
+ })
+ // `field: (),` will be handled like this:
+ .unwrap_or_else(|| quote!());
+ // add tuple braces, to produce (val_1, val_2, ...)
+ quote! { (#unbraced) }
+ }
+ Array(array) => {
+ // [[T; N1]; N2] -> (N1*N2, T)
+ let (len_expr, elem_ty) = length_and_type_of_nested_array(array);
+ // generate the getter code for one array element
+ let array_elem = generate_getter_inner(&elem_ty, is_getter);
+ // either generate an array or only check each value
+ if is_getter {
+ quote! {
+ // constness: iter, array::from_fn, for-loop, range are
not const, so we're using while loops
+ // Modified version of the array init example in
[`MaybeUninit`]:
+ let array = {
+ // [T; N1*N2]
+ let mut array: [::core::mem::MaybeUninit<#elem_ty>;
#len_expr] = unsafe {
+ ::core::mem::MaybeUninit::uninit().assume_init()
+ };
+ let mut i = 0;
+ while i < #len_expr {
+ // for every element, get its value
+ let elem_value = {
+ #array_elem
+ };
+ // and write it to the output array
+ array[i].write(elem_value);
+ i += 1;
+ }
+ // [T; N1*N2] -> [[T; N1]; N2]
+ unsafe { ::core::mem::transmute(array) }
+ };
+ array
+ }
+ } else {
+ quote! { {
+ let mut is_filled = true;
+ let mut i = 0;
+ // TODO: this could be simplified for always-filled values
+ while i < #len_expr {
+ // for every element, get its filled check
+ let elem_filled = {
+ #array_elem
+ };
+ // and join it with the others
+ is_filled = is_filled && elem_filled;
+ i += 1;
+ }
+ is_filled
+ } }
+ }
+ }
+ Path(_) => {
+ // get the size, so we can shift to the next element's offset
+ let size = shared::generate_type_bitsize(ty);
+ // get the mask, so we can get this element's value
+ let mask = generate_ty_mask(ty);
+
+ // do all steps until conversion
+ let elem_value = quote! {
+ // the element's mask
+ let mask = #mask;
+ // the cursor starts at this element's offset, now get its
value
+ let raw_value = cursor & mask;
+ // after getting the value, we can shift by the element's size
+ // TODO: we could move this into tuple/array (and try_from,
below)
+ let size = #size;
+ cursor = cursor.wrapping_shr(size as u32);
+ // cast the element value (e.g. u32 -> u8),
+ let raw_value: BaseIntOf<#ty> = raw_value as BaseIntOf<#ty>;
+ // which allows it to be used here (e.g. u4::new(u8))
+ let elem_value = <#ty as
Bitsized>::ArbitraryInt::new(raw_value);
+ };
+
+ if is_getter {
+ // generate the real value from the arbint `elem_value`
+ quote! {
+ #elem_value
+ match #ty::try_from(elem_value) {
+ Ok(v) => v,
+ Err(_) => panic!("unreachable"),
+ }
+ }
+ } else {
+ // generate only the filled check
+ if shared::is_always_filled(ty) {
+ // skip the obviously filled values
+ quote! {
+ // we still need to shift by the element's size
+ let size = #size;
+ cursor = cursor.wrapping_shr(size as u32);
+ true
+ }
+ } else {
+ // handle structs, enums - everything which can be unfilled
+ quote! { {
+ #elem_value
+ // so, has try_from impl
+ // note this is available even if the type is `From`
+ #ty::try_from(elem_value).is_ok()
+ } }
+ }
+ }
+ }
+ _ => unreachable(()),
+ }
+}
+
+/// Top-level function which initializes the offset, masks other values and
combines the final value
+///
+/// `is_array_elem_setter` allows us to generate a set_array_at setter more
easily
+pub(crate) fn generate_setter_value(ty: &Type, offset: &TokenStream,
is_array_elem_setter: bool) -> TokenStream {
+ // if we generate `fn set_array_at(index, value)`, we need to offset to
the array element
+ let elem_offset = if is_array_elem_setter {
+ let size = shared::generate_type_bitsize(ty);
+ quote! {
+ let size = #size;
+ // offset now starts at this element
+ offset += size * index;
+ }
+ } else {
+ quote!()
+ };
+
+ let value_shifted = generate_setter_inner(ty);
+ // get the mask, so we can set this field's value
+ let mask = generate_ty_mask(ty);
+ quote! {
+ type ArbIntOf<T> = <T as Bitsized>::ArbitraryInt;
+ type BaseIntOf<T> = <ArbIntOf<T> as Number>::UnderlyingType;
+
+ // offset now starts at this field
+ let mut offset = #offset;
+ #elem_offset
+
+ let field_mask = #mask;
+ // shift the mask into place
+ let field_mask: BaseIntOf<Self> = field_mask << offset;
+ // all other fields as a mask
+ let others_mask: BaseIntOf<Self> = !field_mask;
+ // the current struct value
+ let struct_value: BaseIntOf<Self> = self.value.value();
+ // mask off the field getting set
+ let others_values: BaseIntOf<Self> = struct_value & others_mask;
+
+ // get the new field value, shifted into place
+ #value_shifted
+
+ // join the values using bit-or
+ let new_struct_value = others_values | value_shifted;
+ self.value = <ArbIntOf<Self>>::new(new_struct_value);
+ }
+}
+
+/// We heavily rely on the fact that transmuting into a nested array [[T; N1];
N2] can
+/// be done in the same way as transmuting into an array [T; N1*N2].
+/// Otherwise, nested arrays would generate even more code.
+fn generate_setter_inner(ty: &Type) -> TokenStream {
+ use Type::*;
+ match ty {
+ Tuple(tuple) => {
+ // to index into the tuple value
+ let mut tuple_index = syn::Index::from(0);
+ let value_shifted = tuple
+ .elems
+ .iter()
+ .map(|elem| {
+ let elem_name = quote!(value.#tuple_index);
+ tuple_index.index += 1;
+ // for every tuple element, generate its setter code
+ let value_shifted = generate_setter_inner(elem);
+ // set the value and add a scope around it
+ quote! { {
+ let value = #elem_name;
+ #value_shifted
+ value_shifted
+ } }
+ })
+ // join all setter codes with bit-or
+ .reduce(|acc, next| quote!(#acc | #next))
+ // `field: (),` will be handled like this:
+ .unwrap_or_else(|| quote!(0));
+ quote! {
+ let value_shifted = #value_shifted;
+ }
+ }
+ Array(array) => {
+ // [[T; N1]; N2] -> (N1*N2, T)
+ let (len_expr, elem_ty) = length_and_type_of_nested_array(array);
+ // generate the setter code for one array element
+ let value_shifted = generate_setter_inner(&elem_ty);
+ quote! {
+ // [[T; N1]; N2] -> [T; N1*N2], for example: [[(u2, u2); 3];
4] -> [(u2, u2); 12]
+ #[allow(clippy::useless_transmute)]
+ let value: [#elem_ty; #len_expr] = unsafe {
::core::mem::transmute(value) };
+ // constness: iter, for-loop, range are not const, so we're
using while loops
+ // [u4; 8] -> u32
+ let mut acc = 0;
+ let mut i = 0;
+ while i < #len_expr {
+ let value = value[i];
+ // for every element, shift its value into its place
+ #value_shifted
+ // and bit-or them together
+ acc |= value_shifted;
+ i += 1;
+ }
+ let value_shifted = acc;
+ }
+ }
+ Path(_) => {
+ // get the size, so we can reach the next element afterwards
+ let size = shared::generate_type_bitsize(ty);
+ quote! {
+ // the element's value as it's underlying type
+ let value: BaseIntOf<#ty> =
<ArbIntOf<#ty>>::from(value).value();
+ // cast the element value (e.g. u8 -> u32),
+ // which allows it to be combined with the struct's value later
+ let value: BaseIntOf<Self> = value as BaseIntOf<Self>;
+ let value_shifted = value << offset;
+ // increase the offset to allow the next element to be read
+ offset += #size;
+ }
+ }
+ _ => unreachable(()),
+ }
+}
+
+/// The constructor code just needs every field setter.
+///
+/// [`super::generate_struct`] contains the initialization of `offset`.
+pub(crate) fn generate_constructor_part(ty: &Type, name: &Ident) ->
TokenStream {
+ let value_shifted = generate_setter_inner(ty);
+ // setters look like this: `fn set_field1(&mut self, value: u3)`
+ // constructors like this: `fn new(field1: u3, field2: u4) -> Self`
+ // so we need to rename `field1` -> `value` and put this in a scope
+ quote! { {
+ let value = #name;
+ #value_shifted
+ value_shifted
+ } }
+}
+
+/// We mostly need this in [`generate_setter_value`], to mask the whole field.
+/// It basically combines a bunch of `Bitsized::MAX` values into a mask.
+fn generate_ty_mask(ty: &Type) -> TokenStream {
+ use Type::*;
+ match ty {
+ Tuple(tuple) => {
+ let mut previous_elem_sizes = vec![];
+ tuple
+ .elems
+ .iter()
+ .map(|elem| {
+ // for every element, generate a mask
+ let mask = generate_ty_mask(elem);
+ // get it's size
+ let elem_size = shared::generate_type_bitsize(elem);
+ // generate it's offset from all previous sizes
+ let elem_offset =
previous_elem_sizes.iter().cloned().reduce(|acc, next| quote!((#acc + #next)));
+ previous_elem_sizes.push(elem_size);
+ // the first field doesn't need to be shifted
+ if let Some(elem_offset) = elem_offset {
+ quote!(#mask << #elem_offset)
+ } else {
+ quote!(#mask)
+ }
+ })
+ // join all shifted masks with bit-or
+ .reduce(|acc, next| quote!(#acc | #next))
+ // `field: (),` will be handled like this:
+ .unwrap_or_else(|| quote!(0))
+ }
+ Array(array) => {
+ let elem_ty = &array.elem;
+ let len_expr = &array.len;
+ // generate the mask for one array element
+ let mask = generate_ty_mask(elem_ty);
+ // and the size
+ let ty_size = shared::generate_type_bitsize(elem_ty);
+ quote! { {
+ let mask = #mask;
+ let mut field_mask = 0;
+ let mut i = 0;
+ while i < #len_expr {
+ // for every element, shift its mask into its place
+ // and bit-or them together
+ field_mask |= mask << (i * #ty_size);
+ i += 1;
+ }
+ field_mask
+ } }
+ }
+ Path(_) => quote! {
+ // Casting this is needed in some places, but it might not be
needed in some others.
+ // (u2, u12) -> u8 << 0 | u16 << 2 -> u8 | u16 not possible
+ (<#ty as Bitsized>::MAX.value() as BaseIntOf<Self>)
+ },
+ _ => unreachable(()),
+ }
+}
+
+/// We compute nested length here, to fold [[T; N]; M] to [T; N * M].
+fn length_and_type_of_nested_array(array: &syn::TypeArray) -> (TokenStream,
Type) {
+ let elem_ty = &array.elem;
+ let len_expr = &array.len;
+ if let Type::Array(array) = &**elem_ty {
+ let (child_len, child_ty) = length_and_type_of_nested_array(array);
+ (quote!((#len_expr) * (#child_len)), child_ty)
+ } else {
+ (quote!(#len_expr), *elem_ty.clone())
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/debug_bits.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/debug_bits.rs
new file mode 100644
index 0000000000..95ba9c73c1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/debug_bits.rs
@@ -0,0 +1,55 @@
+use proc_macro2::{Ident, TokenStream};
+use proc_macro_error::abort_call_site;
+use quote::quote;
+use syn::{Data, Fields};
+
+use crate::shared::{self, unreachable};
+
+pub(super) fn debug_bits(item: TokenStream) -> TokenStream {
+ let derive_input = shared::parse_derive(item);
+ let name = &derive_input.ident;
+ let name_str = name.to_string();
+ let mut fieldless_next_int = 0;
+ let struct_data = match derive_input.data {
+ Data::Struct(s) => s,
+ Data::Enum(_) => abort_call_site!("use derive(Debug) for enums"),
+ Data::Union(_) => unreachable(()),
+ };
+
+ let fmt_impl = match struct_data.fields {
+ Fields::Named(fields) => {
+ let calls = fields.named.iter().map(|f| {
+ // We can unwrap since this is a named field
+ let call = f.ident.as_ref().unwrap();
+ let name = call.to_string();
+ quote!(.field(#name, &self.#call()))
+ });
+ quote! {
+ f.debug_struct(#name_str)
+ // .field("field1", &self.field1()).field("field2",
&self.field2()).field("field3", &self.field3()).finish()
+ #(#calls)*.finish()
+ }
+ }
+ Fields::Unnamed(fields) => {
+ let calls = fields.unnamed.iter().map(|_| {
+ let call: Ident = syn::parse_str(&format!("val_{}",
fieldless_next_int)).unwrap_or_else(unreachable);
+ fieldless_next_int += 1;
+ quote!(.field(&self.#call()))
+ });
+ quote! {
+ f.debug_tuple(#name_str)
+ // .field(&self.val0()).field(&self.val1()).finish()
+ #(#calls)*.finish()
+ }
+ }
+ Fields::Unit => todo!("this is a unit struct, which is not supported
right now"),
+ };
+
+ quote! {
+ impl ::core::fmt::Debug for #name {
+ fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) ->
::core::fmt::Result {
+ #fmt_impl
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/default_bits.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/default_bits.rs
new file mode 100644
index 0000000000..f664accf36
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/default_bits.rs
@@ -0,0 +1,92 @@
+use proc_macro2::{Ident, TokenStream};
+use proc_macro_error::abort_call_site;
+use quote::quote;
+use syn::{Data, DeriveInput, Fields, Type};
+
+use crate::shared::{self, fallback::Fallback, unreachable, BitSize};
+
+pub(crate) fn default_bits(item: TokenStream) -> TokenStream {
+ let derive_input = parse(item);
+ //TODO: does fallback need handling?
+ let (derive_data, _, name, ..) = analyze(&derive_input);
+
+ match derive_data {
+ Data::Struct(data) => generate_struct_default_impl(name, &data.fields),
+ Data::Enum(_) => abort_call_site!("use derive(Default) for enums"),
+ _ => unreachable(()),
+ }
+}
+
+fn generate_struct_default_impl(struct_name: &Ident, fields: &Fields) ->
TokenStream {
+ let default_value = fields
+ .iter()
+ .map(|field| generate_default_inner(&field.ty))
+ .reduce(|acc, next| quote!(#acc | #next));
+
+ quote! {
+ impl ::core::default::Default for #struct_name {
+ fn default() -> Self {
+ let mut offset = 0;
+ let value = #default_value;
+ let value = <#struct_name as
Bitsized>::ArbitraryInt::new(value);
+ Self { value }
+ }
+ }
+ }
+}
+
+fn generate_default_inner(ty: &Type) -> TokenStream {
+ use Type::*;
+ match ty {
+ // TODO?: we could optimize nested arrays here like in `struct_gen.rs`
+ // NOTE: in std, Default is only derived for arrays with up to 32
elements, but we allow more
+ Array(array) => {
+ let len_expr = &array.len;
+ let elem_ty = &*array.elem;
+ // generate the default value code for one array element
+ let value_shifted = generate_default_inner(elem_ty);
+ quote! {{
+ // constness: iter, array::from_fn, for-loop, range are not
const, so we're using while loops
+ let mut acc = 0;
+ let mut i = 0;
+ while i < #len_expr {
+ // for every element, shift its value into its place
+ let value_shifted = #value_shifted;
+ // and bit-or them together
+ acc |= value_shifted;
+ i += 1;
+ }
+ acc
+ }}
+ }
+ Path(path) => {
+ let field_size = shared::generate_type_bitsize(ty);
+ // u2::from(HaveFun::default()).value() as u32;
+ quote! {{
+ let as_int = <#path as Bitsized>::ArbitraryInt::from(<#path as
::core::default::Default>::default()).value();
+ let as_base_int = as_int as <<Self as Bitsized>::ArbitraryInt
as Number>::UnderlyingType;
+ let shifted = as_base_int << offset;
+ offset += #field_size;
+ shifted
+ }}
+ }
+ Tuple(tuple) => {
+ tuple
+ .elems
+ .iter()
+ .map(generate_default_inner)
+ .reduce(|acc, next| quote!(#acc | #next))
+ // `field: (),` will be handled like this:
+ .unwrap_or_else(|| quote!(0))
+ }
+ _ => unreachable(()),
+ }
+}
+
+fn parse(item: TokenStream) -> DeriveInput {
+ shared::parse_derive(item)
+}
+
+fn analyze(derive_input: &DeriveInput) -> (&Data, TokenStream, &Ident,
BitSize, Option<Fallback>) {
+ shared::analyze_derive(derive_input, false)
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/fmt_bits.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/fmt_bits.rs
new file mode 100644
index 0000000000..527691ed65
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/fmt_bits.rs
@@ -0,0 +1,112 @@
+use proc_macro2::{Ident, TokenStream};
+use quote::quote;
+use syn::{punctuated::Iter, Data, DeriveInput, Fields, Variant};
+
+use crate::shared::{self, discriminant_assigner::DiscriminantAssigner,
fallback::Fallback, unreachable, BitSize};
+
+pub(crate) fn binary(item: TokenStream) -> TokenStream {
+ let derive_input = parse(item);
+ let (derive_data, arb_int, name, bitsize, fallback) =
analyze(&derive_input);
+
+ match derive_data {
+ Data::Struct(data) => generate_struct_binary_impl(name, &data.fields),
+ Data::Enum(data) => generate_enum_binary_impl(name,
data.variants.iter(), arb_int, bitsize, fallback),
+ _ => unreachable(()),
+ }
+}
+
+fn generate_struct_binary_impl(struct_name: &Ident, fields: &Fields) ->
TokenStream {
+ let write_underscore = quote! { write!(f, "_")?; };
+
+ // fields are printed from most significant to least significant,
separated by an underscore
+ let writes = fields
+ .iter()
+ .rev()
+ .map(|field| {
+ let field_size = shared::generate_type_bitsize(&field.ty);
+
+ // `extracted` is `field_size` bits of `value`, starting from
index `first_bit_pos` (counting from LSB)
+ quote! {
+ let field_size = #field_size;
+ let field_mask = mask >> (struct_size - field_size);
+ let first_bit_pos = last_bit_pos - field_size;
+ last_bit_pos -= field_size;
+ let extracted = field_mask & (self.value >> first_bit_pos);
+ write!(f, "{:0width$b}", extracted, width = field_size)?;
+ }
+ })
+ .reduce(|acc, next| quote!(#acc #write_underscore #next));
+
+ quote! {
+ impl ::core::fmt::Binary for #struct_name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result
{
+ let struct_size = <#struct_name as Bitsized>::BITS;
+ let mut last_bit_pos = struct_size;
+ let mask = <#struct_name as Bitsized>::MAX;
+ #writes
+ Ok(())
+ }
+ }
+ }
+}
+
+fn generate_enum_binary_impl(
+ enum_name: &Ident, variants: Iter<Variant>, arb_int: TokenStream, bitsize:
BitSize, fallback: Option<Fallback>,
+) -> TokenStream {
+ let to_int_match_arms = generate_to_int_match_arms(variants, enum_name,
bitsize, arb_int, fallback);
+
+ let body = if to_int_match_arms.is_empty() {
+ quote! { Ok(()) }
+ } else {
+ quote! {
+ let value = match self {
+ #( #to_int_match_arms )*
+ };
+ write!(f, "{:0width$b}", value, width = <#enum_name as
Bitsized>::BITS)
+ }
+ };
+
+ quote! {
+ impl ::core::fmt::Binary for #enum_name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result
{
+ #body
+ }
+ }
+ }
+}
+
+/// generates the arms for an (infallible) conversion from an enum to the
enum's underlying arbitrary_int
+fn generate_to_int_match_arms(
+ variants: Iter<Variant>, enum_name: &Ident, bitsize: BitSize, arb_int:
TokenStream, fallback: Option<Fallback>,
+) -> Vec<TokenStream> {
+ let is_value_fallback = |variant_name| {
+ if let Some(Fallback::WithValue(name)) = &fallback {
+ variant_name == name
+ } else {
+ false
+ }
+ };
+
+ let mut assigner = DiscriminantAssigner::new(bitsize);
+
+ variants
+ .map(|variant| {
+ let variant_name = &variant.ident;
+ let variant_value = assigner.assign_unsuffixed(variant);
+
+ if is_value_fallback(variant_name) {
+ quote! { #enum_name::#variant_name(number) => *number, }
+ } else {
+ shared::to_int_match_arm(enum_name, variant_name, &arb_int,
variant_value)
+ }
+ })
+ .collect()
+}
+
+fn parse(item: TokenStream) -> DeriveInput {
+ shared::parse_derive(item)
+}
+
+fn analyze(derive_input: &DeriveInput) -> (&Data, TokenStream, &Ident,
BitSize, Option<Fallback>) {
+ shared::analyze_derive(derive_input, false)
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/from_bits.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/from_bits.rs
new file mode 100644
index 0000000000..e58b921521
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/from_bits.rs
@@ -0,0 +1,222 @@
+use itertools::Itertools;
+use proc_macro2::{Ident, TokenStream};
+use proc_macro_error::{abort, abort_call_site};
+use quote::quote;
+use syn::{punctuated::Iter, Data, DeriveInput, Fields, Type, Variant};
+
+use crate::shared::{
+ self, discriminant_assigner::DiscriminantAssigner, enum_fills_bitsize,
fallback::Fallback,
+ unreachable, BitSize,
+};
+
+pub(super) fn from_bits(item: TokenStream) -> TokenStream {
+ let derive_input = parse(item);
+ let (derive_data, arb_int, name, internal_bitsize, fallback) =
analyze(&derive_input);
+ let expanded = match &derive_data {
+ Data::Struct(struct_data) => generate_struct(arb_int, name,
&struct_data.fields),
+ Data::Enum(enum_data) => {
+ let variants = enum_data.variants.iter();
+ let match_arms = analyze_enum(
+ variants,
+ name,
+ internal_bitsize,
+ fallback.as_ref(),
+ &arb_int,
+ );
+ generate_enum(arb_int, name, match_arms, fallback)
+ }
+ _ => unreachable(()),
+ };
+ generate_common(expanded)
+}
+
+fn parse(item: TokenStream) -> DeriveInput {
+ shared::parse_derive(item)
+}
+
+fn analyze(
+ derive_input: &DeriveInput,
+) -> (&syn::Data, TokenStream, &Ident, BitSize, Option<Fallback>) {
+ shared::analyze_derive(derive_input, false)
+}
+
+fn analyze_enum(
+ variants: Iter<Variant>,
+ name: &Ident,
+ internal_bitsize: BitSize,
+ fallback: Option<&Fallback>,
+ arb_int: &TokenStream,
+) -> (Vec<TokenStream>, Vec<TokenStream>) {
+ validate_enum_variants(variants.clone(), fallback);
+
+ let enum_is_filled = enum_fills_bitsize(internal_bitsize, variants.len());
+ if !enum_is_filled && fallback.is_none() {
+ abort_call_site!("enum doesn't fill its bitsize"; help = "you need to
use `#[derive(TryFromBits)]` instead, or specify one of the variants as
#[fallback]")
+ }
+ if enum_is_filled && fallback.is_some() {
+ // NOTE: I've shortly tried pointing to `#[fallback]` here but it
wasn't easy enough
+ abort_call_site!("enum already has {} variants", variants.len(); help
= "remove the `#[fallback]` attribute")
+ }
+
+ let mut assigner = DiscriminantAssigner::new(internal_bitsize);
+
+ let is_fallback = |variant_name| {
+ if let Some(Fallback::Unit(name) | Fallback::WithValue(name)) =
fallback {
+ variant_name == name
+ } else {
+ false
+ }
+ };
+
+ let is_value_fallback = |variant_name| {
+ if let Some(Fallback::WithValue(name)) = fallback {
+ variant_name == name
+ } else {
+ false
+ }
+ };
+
+ variants
+ .map(|variant| {
+ let variant_name = &variant.ident;
+ let variant_value = assigner.assign_unsuffixed(variant);
+
+ let from_int_match_arm = if is_fallback(variant_name) {
+ // this value will be handled by the catch-all arm
+ quote!()
+ } else {
+ quote! { #variant_value => Self::#variant_name, }
+ };
+
+ let to_int_match_arm = if is_value_fallback(variant_name) {
+ quote! { #name::#variant_name(number) => number, }
+ } else {
+ shared::to_int_match_arm(name, variant_name, arb_int,
variant_value)
+ };
+
+ (from_int_match_arm, to_int_match_arm)
+ })
+ .unzip()
+}
+
+fn generate_enum(
+ arb_int: TokenStream,
+ enum_type: &Ident,
+ match_arms: (Vec<TokenStream>, Vec<TokenStream>),
+ fallback: Option<Fallback>,
+) -> TokenStream {
+ let (from_int_match_arms, to_int_match_arms) = match_arms;
+
+ let const_ = if cfg!(feature = "nightly") {
+ quote!(const)
+ } else {
+ quote!()
+ };
+
+ let from_enum_impl =
+ shared::generate_from_enum_impl(&arb_int, enum_type,
to_int_match_arms, &const_);
+
+ let catch_all_arm = match fallback {
+ Some(Fallback::WithValue(fallback_ident)) => quote! {
+ _ => Self::#fallback_ident(number),
+ },
+ Some(Fallback::Unit(fallback_ident)) => quote! {
+ _ => Self::#fallback_ident,
+ },
+ None => quote! {
+ // constness: unreachable!() is not const yet
+ _ => ::core::panic!("unreachable: arbitrary_int already validates
that this is unreachable")
+ },
+ };
+
+ quote! {
+ impl #const_ ::core::convert::From<#arb_int> for #enum_type {
+ fn from(number: #arb_int) -> Self {
+ match number.value() {
+ #( #from_int_match_arms )*
+ #catch_all_arm
+ }
+ }
+ }
+ #from_enum_impl
+ }
+}
+
+/// a type is considered "filled" if it implements `Bitsized` with `BITS == N`,
+/// and additionally is allowed to have any unsigned value from `0` to `2^N -
1`.
+/// such a type can then safely implement `From<uN>`.
+/// a filled type automatically implements the trait `Filled` thanks to a
blanket impl.
+/// the check generated by this function will prevent compilation if `ty` is
not `Filled`.
+fn generate_filled_check_for(ty: &Type, vec: &mut Vec<TokenStream>) {
+ use Type::*;
+ match ty {
+ Path(_) => {
+ let assume = quote! { ::bilge::assume_filled::<#ty>(); };
+ vec.push(assume);
+ }
+ Tuple(tuple) => {
+ for elem in &tuple.elems {
+ generate_filled_check_for(elem, vec)
+ }
+ }
+ Array(array) => generate_filled_check_for(&array.elem, vec),
+ _ => unreachable(()),
+ }
+}
+
+fn generate_struct(arb_int: TokenStream, struct_type: &Ident, fields: &Fields)
-> TokenStream {
+ let const_ = if cfg!(feature = "nightly") {
+ quote!(const)
+ } else {
+ quote!()
+ };
+
+ let mut assumes = Vec::new();
+ for field in fields {
+ generate_filled_check_for(&field.ty, &mut assumes)
+ }
+
+ // a single check per type is enough, so the checks can be deduped
+ let assumes = assumes.into_iter().unique_by(TokenStream::to_string);
+
+ quote! {
+ impl #const_ ::core::convert::From<#arb_int> for #struct_type {
+ fn from(value: #arb_int) -> Self {
+ #( #assumes )*
+ Self { value }
+ }
+ }
+ impl #const_ ::core::convert::From<#struct_type> for #arb_int {
+ fn from(value: #struct_type) -> Self {
+ value.value
+ }
+ }
+ }
+}
+
+fn generate_common(expanded: TokenStream) -> TokenStream {
+ quote! {
+ #expanded
+ }
+}
+
+fn validate_enum_variants(variants: Iter<Variant>, fallback:
Option<&Fallback>) {
+ for variant in variants {
+ // we've already validated the correctness of the fallback variant,
and that there's at most one such variant.
+ // this means we can safely skip a fallback variant if we find one.
+ if let Some(fallback) = &fallback {
+ if fallback.is_fallback_variant(&variant.ident) {
+ continue;
+ }
+ }
+
+ if !matches!(variant.fields, Fields::Unit) {
+ let help_message = if fallback.is_some() {
+ "change this variant to a unit"
+ } else {
+ "add a fallback variant or change this variant to a unit"
+ };
+ abort!(variant, "FromBits only supports unit variants for variants
without `#[fallback]`"; help = help_message);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/lib.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/lib.rs
new file mode 100644
index 0000000000..4b34b4f306
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/lib.rs
@@ -0,0 +1,79 @@
+extern crate itertools;
+extern crate proc_macro_error;
+extern crate proc_macro_error_attr;
+use proc_macro::TokenStream;
+use proc_macro_error::proc_macro_error;
+
+mod bitsize;
+mod bitsize_internal;
+mod debug_bits;
+mod default_bits;
+mod fmt_bits;
+mod from_bits;
+mod try_from_bits;
+
+mod shared;
+
+/// Defines the bitsize of a struct or an enum.
+///
+/// e.g. `#[bitsize(4)]` represents the item as a u4, which is UInt<u8, 4>
underneath.
+/// The size of structs is currently limited to 128 bits.
+/// The size of enums is limited to 64 bits.
+/// Please open an issue if you have a usecase for bigger bitfields.
+#[proc_macro_error]
+#[proc_macro_attribute]
+pub fn bitsize(args: TokenStream, item: TokenStream) -> TokenStream {
+ bitsize::bitsize(args.into(), item.into()).into()
+}
+
+/// This is internally used, not to be used by anything besides `bitsize`.
+/// No guarantees are given.
+#[proc_macro_error]
+#[proc_macro_attribute]
+pub fn bitsize_internal(args: TokenStream, item: TokenStream) -> TokenStream {
+ bitsize_internal::bitsize_internal(args.into(), item.into()).into()
+}
+
+/// Generate an `impl TryFrom<uN>` for unfilled bitfields.
+///
+/// This should be used when your enum or enums nested in
+/// a struct don't fill their given `bitsize`.
+#[proc_macro_error]
+#[proc_macro_derive(TryFromBits, attributes(bitsize_internal, fallback))]
+pub fn derive_try_from_bits(item: TokenStream) -> TokenStream {
+ try_from_bits::try_from_bits(item.into()).into()
+}
+
+/// Generate an `impl From<uN>` for filled bitfields.
+///
+/// This should be used when your enum or enums nested in
+/// a struct fill their given `bitsize` or if you're not
+/// using enums.
+#[proc_macro_error]
+#[proc_macro_derive(FromBits, attributes(bitsize_internal, fallback))]
+pub fn derive_from_bits(item: TokenStream) -> TokenStream {
+ from_bits::from_bits(item.into()).into()
+}
+
+/// Generate an `impl core::fmt::Debug` for bitfield structs.
+///
+/// Please use normal #[derive(Debug)] for enums.
+#[proc_macro_error]
+#[proc_macro_derive(DebugBits, attributes(bitsize_internal))]
+pub fn debug_bits(item: TokenStream) -> TokenStream {
+ debug_bits::debug_bits(item.into()).into()
+}
+
+/// Generate an `impl core::fmt::Binary` for bitfields.
+#[proc_macro_error]
+#[proc_macro_derive(BinaryBits)]
+pub fn derive_binary_bits(item: TokenStream) -> TokenStream {
+ fmt_bits::binary(item.into()).into()
+}
+
+/// Generate an `impl core::default::Default` for bitfield structs.
+#[proc_macro_error]
+#[proc_macro_derive(DefaultBits)]
+pub fn derive_default_bits(item: TokenStream) -> TokenStream {
+ default_bits::default_bits(item.into()).into()
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/shared.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/shared.rs
new file mode 100644
index 0000000000..2e54e0d787
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/shared.rs
@@ -0,0 +1,196 @@
+pub mod discriminant_assigner;
+pub mod fallback;
+pub mod util;
+
+use fallback::{fallback_variant, Fallback};
+use proc_macro2::{Ident, Literal, TokenStream};
+use proc_macro_error::{abort, abort_call_site};
+use quote::quote;
+use syn::{Attribute, DeriveInput, LitInt, Meta, Type};
+use util::PathExt;
+
+/// As arbitrary_int is limited to basic rust primitives, the maximum is u128.
+/// Is there a true usecase for bitfields above this size?
+/// This would also be change-worthy when rust starts supporting LLVM's
arbitrary integers.
+pub const MAX_STRUCT_BIT_SIZE: BitSize = 128;
+/// As `#[repr(u128)]` is unstable and currently no real usecase for higher
sizes exists, the maximum is u64.
+pub const MAX_ENUM_BIT_SIZE: BitSize = 64;
+pub type BitSize = u8;
+
+pub(crate) fn parse_derive(item: TokenStream) -> DeriveInput {
+ syn::parse2(item).unwrap_or_else(unreachable)
+}
+
+// allow since we want `if try_from` blocks to stand out
+#[allow(clippy::collapsible_if)]
+pub(crate) fn analyze_derive(derive_input: &DeriveInput, try_from: bool) ->
(&syn::Data, TokenStream, &Ident, BitSize, Option<Fallback>) {
+ let DeriveInput {
+ attrs,
+ ident,
+ // generics,
+ data,
+ ..
+ } = derive_input;
+
+ if !try_from {
+ if attrs.iter().any(is_non_exhaustive_attribute) {
+ abort_call_site!("Item can't be FromBits and non_exhaustive"; help
= "remove #[non_exhaustive] or derive(FromBits) here")
+ }
+ } else {
+ // currently not allowed, would need some thinking:
+ if let syn::Data::Struct(_) = data {
+ if attrs.iter().any(is_non_exhaustive_attribute) {
+ abort_call_site!("Using #[non_exhaustive] on structs is
currently not supported"; help = "open an issue on our repository if needed")
+ }
+ }
+ }
+
+ // parsing the #[bitsize_internal(num)] attribute macro
+ let args = attrs
+ .iter()
+ .find_map(bitsize_internal_arg)
+ .unwrap_or_else(|| abort_call_site!("add #[bitsize] attribute above
your derive attribute"));
+ let (bitsize, arb_int) = bitsize_and_arbitrary_int_from(args);
+
+ let fallback = fallback_variant(data, bitsize);
+ if fallback.is_some() && try_from {
+ abort_call_site!("fallback is not allowed with `TryFromBits`"; help =
"use `#[derive(FromBits)]` or remove this `#[fallback]`")
+ }
+
+ (data, arb_int, ident, bitsize, fallback)
+}
+
+// If we want to support bitsize(u4) besides bitsize(4), do that here.
+pub fn bitsize_and_arbitrary_int_from(bitsize_arg: TokenStream) -> (BitSize,
TokenStream) {
+ let bitsize: LitInt = syn::parse2(bitsize_arg.clone())
+ .unwrap_or_else(|_| abort!(bitsize_arg, "attribute value is not a
number"; help = "you need to define the size like this: `#[bitsize(32)]`"));
+ // without postfix
+ let bitsize = bitsize
+ .base10_parse()
+ .ok()
+ .filter(|&n| n != 0 && n <= MAX_STRUCT_BIT_SIZE)
+ .unwrap_or_else(|| abort!(bitsize_arg, "attribute value is not a valid
number"; help = "currently, numbers from 1 to {} are allowed",
MAX_STRUCT_BIT_SIZE));
+ let arb_int =
syn::parse_str(&format!("u{bitsize}")).unwrap_or_else(unreachable);
+ (bitsize, arb_int)
+}
+
+pub fn generate_type_bitsize(ty: &Type) -> TokenStream {
+ use Type::*;
+ match ty {
+ Tuple(tuple) => {
+ tuple
+ .elems
+ .iter()
+ .map(generate_type_bitsize)
+ .reduce(|acc, next| quote!((#acc + #next)))
+ // `field: (),` will be handled like this:
+ .unwrap_or_else(|| quote!(0))
+ }
+ Array(array) => {
+ let elem_bitsize = generate_type_bitsize(&array.elem);
+ let len_expr = &array.len;
+ quote!((#elem_bitsize * #len_expr))
+ }
+ Path(_) => {
+ quote!(<#ty as Bitsized>::BITS)
+ }
+ _ => unreachable(()),
+ }
+}
+
+pub(crate) fn generate_from_enum_impl(
+ arb_int: &TokenStream, enum_type: &Ident, to_int_match_arms:
Vec<TokenStream>, const_: &TokenStream,
+) -> TokenStream {
+ quote! {
+ impl #const_ ::core::convert::From<#enum_type> for #arb_int {
+ fn from(enum_value: #enum_type) -> Self {
+ match enum_value {
+ #( #to_int_match_arms )*
+ }
+ }
+ }
+ }
+}
+
+/// Filters fields which are always `FILLED`, meaning all bit-patterns are
possible,
+/// meaning they are (should be) From<uN>, not TryFrom<uN>
+///
+/// Currently, this is exactly the set of types we can extract a bitsize out
of, just by looking at their ident: `uN` and `bool`.
+pub fn is_always_filled(ty: &Type) -> bool {
+ last_ident_of_path(ty).and_then(bitsize_from_type_ident).is_some()
+}
+
+pub fn last_ident_of_path(ty: &Type) -> Option<&Ident> {
+ if let Type::Path(type_path) = ty {
+ // the type may have a qualified path, so I don't think we can use
`get_ident()` here
+ let last_segment = type_path.path.segments.last()?;
+ Some(&last_segment.ident)
+ } else {
+ None
+ }
+}
+
+/// in enums, internal_bitsize <= 64; u64::MAX + 1 = u128
+/// therefore the bitshift would not overflow.
+pub fn enum_fills_bitsize(bitsize: u8, variants_count: usize) -> bool {
+ let max_variants_count = 1u128 << bitsize;
+ if variants_count as u128 > max_variants_count {
+ abort_call_site!("enum overflows its bitsize"; help = "there should
only be at most {} variants defined", max_variants_count);
+ }
+ variants_count as u128 == max_variants_count
+}
+
+#[inline]
+pub fn unreachable<T, U>(_: T) -> U {
+ unreachable!("should have already been validated")
+}
+
+pub fn is_attribute(attr: &Attribute, name: &str) -> bool {
+ if let Meta::Path(path) = &attr.meta {
+ path.is_ident(name)
+ } else {
+ false
+ }
+}
+
+fn is_non_exhaustive_attribute(attr: &Attribute) -> bool {
+ is_attribute(attr, "non_exhaustive")
+}
+
+pub(crate) fn is_fallback_attribute(attr: &Attribute) -> bool {
+ is_attribute(attr, "fallback")
+}
+
+/// attempts to extract the bitsize from an ident equal to `uN` or `bool`.
+/// should return `Result` instead of `Option`, if we decide to add more
descriptive error handling.
+pub fn bitsize_from_type_ident(type_name: &Ident) -> Option<BitSize> {
+ let type_name = type_name.to_string();
+
+ if type_name == "bool" {
+ Some(1)
+ } else if let Some(suffix) = type_name.strip_prefix('u') {
+ // characters which may appear in this suffix are digits, letters and
underscores.
+ // parse() will reject letters and underscores, so this should be
correct.
+ let bitsize = suffix.parse().ok();
+
+ // the namespace contains u2 up to u{MAX_STRUCT_BIT_SIZE}. can't make
assumptions about larger values
+ bitsize.filter(|&n| n <= MAX_STRUCT_BIT_SIZE)
+ } else {
+ None
+ }
+}
+
+pub fn to_int_match_arm(enum_name: &Ident, variant_name: &Ident, arb_int:
&TokenStream, variant_value: Literal) -> TokenStream {
+ quote! { #enum_name::#variant_name => #arb_int::new(#variant_value), }
+}
+
+pub(crate) fn bitsize_internal_arg(attr: &Attribute) -> Option<TokenStream> {
+ if let Meta::List(list) = &attr.meta {
+ if list.path.matches(&["bilge", "bitsize_internal"]) {
+ let arg = list.tokens.to_owned();
+ return Some(arg);
+ }
+ }
+
+ None
+}
diff --git
a/rust/hw/char/pl011/vendor/bilge-impl/src/shared/discriminant_assigner.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/discriminant_assigner.rs
new file mode 100644
index 0000000000..5825baa4f1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/discriminant_assigner.rs
@@ -0,0 +1,56 @@
+use proc_macro2::Literal;
+use proc_macro_error::abort;
+use syn::{Expr, ExprLit, Lit, Variant};
+
+use super::{unreachable, BitSize};
+
+pub(crate) struct DiscriminantAssigner {
+ bitsize: BitSize,
+ next_expected_assignment: u128,
+}
+
+impl DiscriminantAssigner {
+ pub fn new(bitsize: u8) -> DiscriminantAssigner {
+ DiscriminantAssigner {
+ bitsize,
+ next_expected_assignment: 0,
+ }
+ }
+
+ fn max_value(&self) -> u128 {
+ (1u128 << self.bitsize) - 1
+ }
+
+ fn value_from_discriminant(&self, variant: &Variant) -> Option<u128> {
+ let discriminant = variant.discriminant.as_ref()?;
+ let discriminant_expr = &discriminant.1;
+ let variant_name = &variant.ident;
+
+ let Expr::Lit(ExprLit { lit: Lit::Int(int), .. }) = discriminant_expr
else {
+ abort!(
+ discriminant_expr,
+ "variant `{}` is not a number", variant_name;
+ help = "only literal integers currently supported"
+ )
+ };
+
+ let discriminant_value: u128 =
int.base10_parse().unwrap_or_else(unreachable);
+ if discriminant_value > self.max_value() {
+ abort!(variant, "Value of variant exceeds the given number of
bits")
+ }
+
+ Some(discriminant_value)
+ }
+
+ fn assign(&mut self, variant: &Variant) -> u128 {
+ let value =
self.value_from_discriminant(variant).unwrap_or(self.next_expected_assignment);
+ self.next_expected_assignment = value + 1;
+ value
+ }
+
+ /// syn adds a suffix when printing Rust integers. we use an unsuffixed
`Literal` for better-looking codegen
+ pub fn assign_unsuffixed(&mut self, variant: &Variant) -> Literal {
+ let next = self.assign(variant);
+ Literal::u128_unsuffixed(next)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/shared/fallback.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/fallback.rs
new file mode 100644
index 0000000000..893919659e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/fallback.rs
@@ -0,0 +1,92 @@
+use itertools::Itertools;
+use proc_macro2::Ident;
+use proc_macro_error::{abort, abort_call_site};
+use syn::{Data, Variant};
+
+use super::{bitsize_from_type_ident, is_fallback_attribute,
last_ident_of_path, unreachable, BitSize};
+
+pub enum Fallback {
+ Unit(Ident),
+ WithValue(Ident),
+}
+
+impl Fallback {
+ fn from_variant(variant: &Variant, enum_bitsize: BitSize, is_last_variant:
bool) -> Fallback {
+ use syn::Fields::*;
+
+ let ident = variant.ident.to_owned();
+
+ match &variant.fields {
+ Named(_) => {
+ abort!(variant, "`#[fallback]` does not support variants with
named fields"; help = "use a tuple variant or remove this `#[fallback]`")
+ }
+ Unnamed(fields) => {
+ let variant_fields = fields.unnamed.iter();
+ let Ok(fallback_value) = variant_fields.exactly_one() else {
+ abort!(variant, "fallback variant must have exactly one
field"; help = "use only one field or change to a unit variant")
+ };
+
+ if !is_last_variant {
+ abort!(variant, "value fallback is not the last variant";
help = "a fallback variant with value must be the last variant of the enum")
+ }
+
+ // here we validate that the fallback variant field type
matches the bitsize
+ let size_from_type =
last_ident_of_path(&fallback_value.ty).and_then(bitsize_from_type_ident);
+
+ match size_from_type {
+ Some(bitsize) if bitsize == enum_bitsize =>
Fallback::WithValue(ident),
+ Some(bitsize) => abort!(
+ variant.fields,
+ "bitsize of fallback field ({}) does not match bitsize
of enum ({})",
+ bitsize,
+ enum_bitsize
+ ),
+ None => abort!(variant.fields, "`#[fallback]` only
supports arbitrary_int or bool types"),
+ }
+ }
+ Unit => Fallback::Unit(ident),
+ }
+ }
+
+ pub fn is_fallback_variant(&self, variant_ident: &Ident) -> bool {
+ matches!(self, Fallback::Unit(fallback_ident) |
Fallback::WithValue(fallback_ident) if variant_ident == fallback_ident)
+ }
+}
+
+/// finds a single enum variant with the attribute "fallback".
+/// a "fallback variant" may come in one of two forms:
+/// 1. `#[fallback] Foo`, which we map to `Fallback::Unit`
+/// 2. `#[fallback] Foo(uN)`, where `N` is the enum's bitsize and `Foo` is the
enum's last variant,
+/// which we map to `Fallback::WithValue`
+pub fn fallback_variant(data: &Data, enum_bitsize: BitSize) ->
Option<Fallback> {
+ match data {
+ Data::Enum(enum_data) => {
+ let variants_with_fallback = enum_data
+ .variants
+ .iter()
+ .filter(|variant|
variant.attrs.iter().any(is_fallback_attribute));
+
+ match variants_with_fallback.at_most_one() {
+ Ok(None) => None,
+ Ok(Some(variant)) => {
+ let is_last_variant = variant.ident ==
enum_data.variants.last().unwrap().ident;
+ let fallback = Fallback::from_variant(variant,
enum_bitsize, is_last_variant);
+ Some(fallback)
+ }
+ Err(_) => {
+ abort_call_site!("only one enum variant may be
`#[fallback]`"; help = "remove #[fallback] attributes until you only have one")
+ }
+ }
+ }
+ Data::Struct(struct_data) => {
+ let mut field_attrs = struct_data.fields.iter().flat_map(|field|
&field.attrs);
+
+ if field_attrs.any(is_fallback_attribute) {
+ abort_call_site!("`#[fallback]` is only applicable to enums";
help = "remove all `#[fallback]` from this struct")
+ } else {
+ None
+ }
+ }
+ _ => unreachable(()),
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/shared/util.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/util.rs
new file mode 100644
index 0000000000..31a9be1f2a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/shared/util.rs
@@ -0,0 +1,91 @@
+use syn::Path;
+#[cfg(test)]
+use syn_path::path;
+
+pub trait PathExt {
+ /// match path segments. `str_segments` should contain the entire
+ /// qualified path from the crate root, for example `["bilge",
"FromBits"]`.
+ /// allows partial matches - `["std", "default", "Default"]` will also
match
+ /// the paths `Default` or `default::Default`.
+ fn matches(&self, str_segments: &[&str]) -> bool;
+
+ /// match path segments, but also allow first segment to be either "core"
or "std"
+ fn matches_core_or_std(&self, str_segments: &[&str]) -> bool {
+ let mut str_segments = str_segments.to_owned();
+
+ // try matching with "std" as first segment
+ // first, make "std" the first segment
+ match str_segments.first().copied() {
+ None => return false, // since path is non-empty, this is
trivially false
+ Some("std") => (),
+ _ => str_segments.insert(0, "std"),
+ };
+
+ if self.matches(&str_segments) {
+ return true;
+ }
+
+ // try matching with "core" as first segment
+ str_segments[0] = "core";
+ self.matches(&str_segments)
+ }
+}
+
+impl PathExt for Path {
+ fn matches(&self, str_segments: &[&str]) -> bool {
+ if self.segments.len() > str_segments.len() {
+ return false;
+ }
+
+ let segments = self.segments.iter().map(|seg|
seg.ident.to_string()).rev();
+ let str_segments = str_segments.iter().copied().rev();
+
+ segments.zip(str_segments).all(|(a, b)| a == b)
+ }
+}
+
+#[test]
+fn path_matching() {
+ let paths = [
+ path!(::std::default::Default),
+ path!(std::default::Default),
+ path!(default::Default),
+ path!(Default),
+ ];
+
+ let str_segments = &["std", "default", "Default"];
+
+ for path in paths {
+ assert!(path.matches(str_segments));
+ }
+}
+
+#[test]
+fn partial_does_not_match() {
+ let full_path = path!(std::foo::bar::fizz::Buzz);
+
+ let str_segments = ["std", "foo", "bar", "fizz", "Buzz"];
+
+ for i in 1..str_segments.len() {
+ let partial_str_segments = &str_segments[i..];
+ assert!(!full_path.matches(partial_str_segments))
+ }
+}
+
+#[test]
+fn path_matching_without_root() {
+ let paths = [
+ path!(::core::fmt::Debug),
+ path!(core::fmt::Debug),
+ path!(::std::fmt::Debug),
+ path!(std::fmt::Debug),
+ path!(fmt::Debug),
+ path!(Debug),
+ ];
+
+ let str_segments_without_root = &["fmt", "Debug"];
+
+ for path in paths {
+ assert!(path.matches_core_or_std(str_segments_without_root));
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge-impl/src/try_from_bits.rs
b/rust/hw/char/pl011/vendor/bilge-impl/src/try_from_bits.rs
new file mode 100644
index 0000000000..b27a5567c5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge-impl/src/try_from_bits.rs
@@ -0,0 +1,143 @@
+use proc_macro2::{Ident, TokenStream};
+use proc_macro_error::{abort, emit_call_site_warning};
+use quote::quote;
+use syn::{punctuated::Iter, Data, DeriveInput, Fields, Type, Variant};
+
+use crate::shared::{self, discriminant_assigner::DiscriminantAssigner,
enum_fills_bitsize, fallback::Fallback, unreachable, BitSize};
+use crate::shared::{bitsize_from_type_ident, last_ident_of_path};
+
+pub(super) fn try_from_bits(item: TokenStream) -> TokenStream {
+ let derive_input = parse(item);
+ let (derive_data, arb_int, name, internal_bitsize, ..) =
analyze(&derive_input);
+ match derive_data {
+ Data::Struct(ref data) => codegen_struct(arb_int, name, &data.fields),
+ Data::Enum(ref enum_data) => {
+ let variants = enum_data.variants.iter();
+ let match_arms = analyze_enum(variants, name, internal_bitsize,
&arb_int);
+ codegen_enum(arb_int, name, match_arms)
+ }
+ _ => unreachable(()),
+ }
+}
+
+fn parse(item: TokenStream) -> DeriveInput {
+ shared::parse_derive(item)
+}
+
+fn analyze(derive_input: &DeriveInput) -> (&syn::Data, TokenStream, &Ident,
BitSize, Option<Fallback>) {
+ shared::analyze_derive(derive_input, true)
+}
+
+fn analyze_enum(variants: Iter<Variant>, name: &Ident, internal_bitsize:
BitSize, arb_int: &TokenStream) -> (Vec<TokenStream>, Vec<TokenStream>) {
+ validate_enum_variants(variants.clone());
+
+ if enum_fills_bitsize(internal_bitsize, variants.len()) {
+ emit_call_site_warning!("enum fills its bitsize"; help = "you can use
`#[derive(FromBits)]` instead, rust will provide `TryFrom` for you (so you
don't necessarily have to update call-sites)");
+ }
+
+ let mut assigner = DiscriminantAssigner::new(internal_bitsize);
+
+ variants
+ .map(|variant| {
+ let variant_name = &variant.ident;
+ let variant_value = assigner.assign_unsuffixed(variant);
+
+ let from_int_match_arm = quote! {
+ #variant_value => Ok(Self::#variant_name),
+ };
+
+ let to_int_match_arm = shared::to_int_match_arm(name,
variant_name, arb_int, variant_value);
+
+ (from_int_match_arm, to_int_match_arm)
+ })
+ .unzip()
+}
+
+fn codegen_enum(arb_int: TokenStream, enum_type: &Ident, match_arms:
(Vec<TokenStream>, Vec<TokenStream>)) -> TokenStream {
+ let (from_int_match_arms, to_int_match_arms) = match_arms;
+
+ let const_ = if cfg!(feature = "nightly") { quote!(const) } else {
quote!() };
+
+ let from_enum_impl = shared::generate_from_enum_impl(&arb_int, enum_type,
to_int_match_arms, &const_);
+ quote! {
+ impl #const_ ::core::convert::TryFrom<#arb_int> for #enum_type {
+ type Error = ::bilge::BitsError;
+
+ fn try_from(number: #arb_int) -> ::core::result::Result<Self,
Self::Error> {
+ match number.value() {
+ #( #from_int_match_arms )*
+ i => Err(::bilge::give_me_error()),
+ }
+ }
+ }
+
+ // this other direction is needed for get/set/new
+ #from_enum_impl
+ }
+}
+
+fn generate_field_check(ty: &Type) -> TokenStream {
+ // Yes, this is hacky module management.
+ crate::bitsize_internal::struct_gen::generate_getter_inner(ty, false)
+}
+
+fn codegen_struct(arb_int: TokenStream, struct_type: &Ident, fields: &Fields)
-> TokenStream {
+ let is_ok: TokenStream = fields
+ .iter()
+ .map(|field| {
+ let ty = &field.ty;
+ let size_from_type =
last_ident_of_path(ty).and_then(bitsize_from_type_ident);
+ if let Some(size) = size_from_type {
+ quote! { {
+ // we still need to shift by the element's size
+ let size = #size;
+ cursor = cursor.wrapping_shr(size as u32);
+ true
+ } }
+ } else {
+ generate_field_check(ty)
+ }
+ })
+ .reduce(|acc, next| quote!((#acc && #next)))
+ // `Struct {}` would be handled like this:
+ .unwrap_or_else(|| quote!(true));
+
+ let const_ = if cfg!(feature = "nightly") { quote!(const) } else {
quote!() };
+
+ quote! {
+ impl #const_ ::core::convert::TryFrom<#arb_int> for #struct_type {
+ type Error = ::bilge::BitsError;
+
+ // validates all values, which means enums, even in inner structs
(TODO: and reserved fields?)
+ fn try_from(value: #arb_int) -> ::core::result::Result<Self,
Self::Error> {
+ type ArbIntOf<T> = <T as Bitsized>::ArbitraryInt;
+ type BaseIntOf<T> = <ArbIntOf<T> as Number>::UnderlyingType;
+
+ // cursor starts at value's first field
+ let mut cursor = value.value();
+
+ let is_ok: bool = {#is_ok};
+
+ if is_ok {
+ Ok(Self { value })
+ } else {
+ Err(::bilge::give_me_error())
+ }
+ }
+ }
+
+ impl #const_ ::core::convert::From<#struct_type> for #arb_int {
+ fn from(struct_value: #struct_type) -> Self {
+ struct_value.value
+ }
+ }
+ }
+}
+
+fn validate_enum_variants(variants: Iter<Variant>) {
+ for variant in variants {
+ if !matches!(variant.fields, Fields::Unit) {
+ abort!(variant, "TryFromBits only supports unit variants in
enums"; help = "change this variant to a unit");
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/bilge/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/bilge/.cargo-checksum.json
new file mode 100644
index 0000000000..39c4922340
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"3bb4a52531b944f44649567e4308c98efe1a908ca15558eaf9139fe260c22184","LICENSE-APACHE":"2514772e5475f208616174f81b67168179a7c51bdcb9570a96a9dc5962b83116","LICENSE-MIT":"7363fc7e2596998f3fc0109b6908575bf1cd8f6fa2fc97aff6bd9d17177f50bb","README.md":"6d4fcc631ed47bbe8e654649185ce987e9630192ea25c84edd264674e30efa4d","src/lib.rs":"4c8546a19b3255895058b4d5a2e8f17b36d196275bbc6831fe1a8b8cbeb258dc"},"package":"dc707ed8ebf81de5cd6c7f48f54b4c8621760926cdf35a57000747c512e67b57"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/bilge/Cargo.toml
b/rust/hw/char/pl011/vendor/bilge/Cargo.toml
new file mode 100644
index 0000000000..3e4900f08c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/Cargo.toml
@@ -0,0 +1,69 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "bilge"
+version = "0.2.0"
+authors = ["Hecatia Elegua"]
+include = [
+ "src/lib.rs",
+ "LICENSE-*",
+ "README.md",
+]
+description = "Use bitsized types as if they were a feature of rust."
+documentation = "https://docs.rs/bilge"
+readme = "README.md"
+keywords = [
+ "bilge",
+ "bitfield",
+ "bits",
+ "register",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/hecatia-elegua/bilge"
+
+[lib]
+bench = false
+
+[[bench]]
+name = "compared"
+path = "benches/compared/main.rs"
+bench = false
+harness = false
+
+[dependencies.arbitrary-int]
+version = "1.2.6"
+
+[dependencies.bilge-impl]
+version = "=0.2.0"
+
+[dev-dependencies.assert_matches]
+version = "1.5.0"
+
+[dev-dependencies.rustversion]
+version = "1.0"
+
+[dev-dependencies.trybuild]
+version = "1.0"
+
+[dev-dependencies.volatile]
+version = "0.5.1"
+
+[dev-dependencies.zerocopy]
+version = "0.5.0"
+
+[features]
+default = []
+nightly = [
+ "arbitrary-int/const_convert_and_const_trait_impl",
+ "bilge-impl/nightly",
+]
diff --git a/rust/hw/char/pl011/vendor/bilge/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/bilge/LICENSE-APACHE
new file mode 100644
index 0000000000..21254fc75d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/bilge/LICENSE-MIT
b/rust/hw/char/pl011/vendor/bilge/LICENSE-MIT
new file mode 100644
index 0000000000..2b1af07674
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/LICENSE-MIT
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/bilge/README.md
b/rust/hw/char/pl011/vendor/bilge/README.md
new file mode 100644
index 0000000000..48daad0fcb
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/README.md
@@ -0,0 +1,327 @@
+# bilge: the most readable bitfields
+
+[![crates.io](https://img.shields.io/crates/v/bilge.svg)](https://crates.io/crates/bilge)
+[![docs.rs](https://docs.rs/bilge/badge.svg)](https://docs.rs/bilge)
+[![loc](https://tokei.rs/b1/github/hecatia-elegua/bilge?category=code)](https://github.com/Aaronepower/tokei#badges)
+
+_Y e s_, this is yet another bitfield crate, but hear me out:
+
+This is a _**bit**_ better than what we had before.
+
+I wanted a design fitting rust:
+
+- **safe**
+ - types model as much of the functionality as possible and don't allow
false usage
+- **fast**
+ - like handwritten bit fiddling code
+- **simple to complex**
+ - obvious and readable basic frontend, like normal structs
+ - only minimally and gradually introduce advanced concepts
+ - provide extension mechanisms
+
+The lib is **no-std** (and fully `const` behind a `"nightly"` feature gate).
+
+For some more explanations on the "why" and "how": [blog
post](https://hecatia-elegua.github.io/blog/no-more-bit-fiddling/) and [reddit
comments](https://www.reddit.com/r/rust/comments/13ic0mf/no_more_bit_fiddling_and_introducing_bilge/).
+
+## WARNING
+
+Our current version is still pre 1.0, which means nothing is completely stable.
+
+However, constructors, getters, setters and From/TryFrom should stay the same,
since their semantics are very clear.
+
+[//]: # (keep this fixed to the version in .github/workflows/ci.yml,
rust-toolchain.toml)
+
+The nightly feature is tested on `nightly-2022-11-03` and [will not work on
the newest nightly until const_convert comes
back](https://github.com/rust-lang/rust/issues/110395#issuecomment-1524775763).
+
+## Usage
+
+To make your life easier:
+
+```rust
+use bilge::prelude::*;
+```
+
+### Infallible (From)
+
+You can just specify bitsized fields like normal fields:
+
+```rust
+#[bitsize(14)]
+struct Register {
+ header: u4,
+ body: u7,
+ footer: Footer,
+}
+```
+
+The attribute `bitsize` generates the bitfield, while `14` works as a
failsafe, emitting a compile error if your struct definition doesn't declare 14
bits.
+Let's define the nested struct `Footer` as well:
+
+```rust
+#[bitsize(3)]
+#[derive(FromBits)]
+struct Footer {
+ is_last: bool,
+ code: Code,
+}
+```
+
+As you can see, we added `#[derive(FromBits)]`, which is needed for
`Register`'s getters and setters.
+Due to how rust macros work (outside-in), it needs to be below `#[bitsize]`.
+Also, `bool` can be used as one bit.
+
+`Code` is another nesting, this time an enum:
+
+```rust
+#[bitsize(2)]
+#[derive(FromBits)]
+enum Code { Success, Error, IoError, GoodExample }
+```
+
+Now we can construct `Register`:
+
+```rust
+let reg1 = Register::new(
+ u4::new(0b1010),
+ u7::new(0b010_1010),
+ Footer::new(true, Code::GoodExample)
+);
+```
+
+Or, if we add `#[derive(FromBits)]` to `Register` and want to parse a raw
register value:
+
+```rust
+let mut reg2 = Register::from(u14::new(0b11_1_0101010_1010));
+```
+
+And getting and setting fields is done like this:
+
+```rust
+let header = reg2.header();
+reg2.set_footer(Footer::new(false, Code::Success));
+```
+
+Any kinds of tuple and array are also supported:
+
+```rust
+#[bitsize(32)]
+#[derive(FromBits)]
+struct InterruptSetEnables([bool; 32]);
+```
+
+Which produces the usual getter and setter, but also element accessors:
+
+```rust
+let mut ise =
InterruptSetEnables::from(0b0000_0000_0000_0000_0000_0000_0001_0000);
+let ise5 = ise.val_0_at(4);
+ise.set_val_0_at(2, ise5);
+assert_eq!(0b0000_0000_0000_0000_0000_0000_0001_0100, ise.value);
+```
+
+Depending on what you're working with, only a subset of enum values might be
clear, or some values might be reserved.
+In that case, you can use a fallback variant, defined like this:
+
+```rust
+#[bitsize(32)]
+#[derive(FromBits, Debug, PartialEq)]
+enum Subclass {
+ Mouse,
+ Keyboard,
+ Speakers,
+ #[fallback]
+ Reserved,
+}
+```
+
+which will convert any undeclared bits to `Reserved`:
+
+```rust
+assert_eq!(Subclass::Reserved, Subclass::from(3));
+assert_eq!(Subclass::Reserved, Subclass::from(42));
+let num = u32::from(Subclass::from(42));
+assert_eq!(3, num);
+assert_ne!(42, num);
+```
+
+or, if you need to keep the exact number saved, use:
+
+```rust
+#[fallback]
+Reserved(u32),
+```
+
+```rust
+assert_eq!(Subclass2::Reserved(3), Subclass2::from(3));
+assert_eq!(Subclass2::Reserved(42), Subclass2::from(42));
+let num = u32::from(Subclass2::from(42));
+assert_eq!(42, num);
+assert_ne!(3, num);
+```
+
+### Fallible (TryFrom)
+
+In contrast to structs, enums don't have to declare all of their bits:
+
+```rust
+#[bitsize(2)]
+#[derive(TryFromBits)]
+enum Class {
+ Mobile, Semimobile, /* 0x2 undefined */ Stationary = 0x3
+}
+```
+
+meaning this will work:
+
+```rust
+let class = Class::try_from(u2::new(2));
+assert!(class.is_err());
+```
+
+except we first need to `#[derive(Debug, PartialEq)]` on `Class`, since
`assert_eq!` needs those.
+
+Let's do that, and use `Class` as a field:
+
+```rust
+#[bitsize(8)]
+#[derive(TryFromBits)]
+struct Device {
+ reserved: u2,
+ class: Class,
+ reserved: u4,
+}
+```
+
+This shows `TryFrom` being propagated upward. There's also another small help:
`reserved` fields (which are often used in registers) can all have the same
name.
+
+Again, let's try to print this:
+
+```rust
+println!("{:?}", Device::try_from(0b0000_11_00));
+println!("{:?}", Device::new(Class::Mobile));
+```
+
+And again, `Device` doesn't implement `Debug`:
+
+### DebugBits
+
+For structs, you need to add `#[derive(DebugBits)]` to get an output like this:
+
+```rust
+Ok(Device { reserved_i: 0, class: Stationary, reserved_ii: 0 })
+Device { reserved_i: 0, class: Mobile, reserved_ii: 0 }
+```
+
+For testing + overview, the full readme example code is in
`/examples/readme.rs`.
+
+### Custom -Bits derives
+
+One of the main advantages of our approach is that we can keep `#[bitsize]`
pretty slim, offloading all the other features to derive macros.
+Besides the derive macros shown above, you can extend `bilge` with your own
derive crates working on bitfields.
+An example of this is given in
[`/tests/custom_derive.rs`](https://github.com/hecatia-elegua/bilge/blob/main/tests/custom_derive.rs),
with its implementation in
[`tests/custom_bits`](https://github.com/hecatia-elegua/bilge/blob/1dfb6cf7d278d102d3f96ac31a9374e2b27fafc7/tests/custom_bits/custom_bits_derive/src/lib.rs).
+
+## Back- and Forwards Compatibility
+
+The syntax is kept very similar to usual rust structs for a simple reason:
+
+The endgoal of this library is to support the adoption of LLVM's arbitrary
bitwidth integers into rust,
+thereby allowing rust-native bitfields.
+Until then, bilge is using the wonderful [`arbitrary-int` crate by
danlehmann](https://github.com/danlehmann/arbitrary-int).
+
+After all attribute expansions, our generated bitfield contains a single
field, somewhat like:
+
+```rust
+struct Register { value: u14 }
+```
+
+This means you _could_ modify the inner value directly, but it breaks type
safety guarantees (e.g. unfilled or read-only fields).
+So if you need to modify the whole field, instead use the type-safe
conversions `u14::from(register)` and `Register::from(u14)`.
+It is possible that this inner type will be made private.
+
+For some more examples and an overview of functionality, take a look at
`/examples` and `/tests`.
+
+## Alternatives
+
+### benchmarks, performance, asm line count
+
+First of all, [basic
benchmarking](https://github.com/hecatia-elegua/bilge/blob/main/benches/compared/main.rs)
reveals that all alternatives mentioned here (besides deku) have about the
same performance and line count. This includes a handwritten version.
+
+### build-time
+
+Measuring build time of the crate inself (both with its dependencies and
without), yields these numbers on my machine:
+| | debug | debug single crate | release | release
single crate |
+|-----------------------|-------|--------------------|-----------|----------------------|
+| bilge 1.67-nightly | 8 | 1.8 | 6 | 0.8
|
+| bitbybit 1.69 | 4.5 | 1.3 | 13.5 [^*] | 9.5 [^*]
|
+| modular-bitfield 1.69 | 8 | 2.2 | 7.2 | 1.6
|
+
+[^*]: This is just a weird rustc regression or my setup or sth, not
representative.
+
+This was measured with `cargo clean && cargo build [--release] --quiet
--timings`.
+Of course, the actual codegen time on an example project needs to be measured,
too.
+
+
+### handwritten implementation
+
+The common handwritten implementation pattern for bitfields in rust looks
[somewhat like
benches/compared/handmade.rs](https://github.com/hecatia-elegua/bilge/blob/main/benches/compared/handmade.rs),
sometimes also throwing around a lot of consts for field offsets. The problems
with this approach are:
+- readability suffers
+- offset, cast or masking errors could go unnoticed
+- bit fiddling, shifting and masking is done all over the place, in contrast
to bitfields
+- beginners suffer, although I would argue even seniors, since it's more like:
"Why do we need to learn and debug bit fiddling if we can get most of it done
by using structs?"
+- reimplementing different kinds of _fallible nested-struct enum-tuple array
field access_ might not be so fun
+
+### modular-bitfield
+
+The often used and very inspiring
[`modular-bitfield`](https://github.com/robbepop/modular-bitfield) has a few
+problems:
+- it is unmaintained and has a quirky structure
+- constructors use the builder pattern
+ - makes user code unreadable if you have many fields
+ - can accidentally leave things uninitialized
+- `from_bytes` can easily take invalid arguments, which turns verification
inside-out:
+ - modular-bitfield flow: `u16` -> `PackedData::from_bytes([u16])` ->
`PackedData::status_or_err()?`
+ - needs to check for `Err` on every single access
+ - adds duplicate getters and setters with postfix `_or_err`
+ - reinvents `From<u16>`/`TryFrom<u16>` as a kind of hybrid
+ - bilge: usual type-system centric flow: `u16` ->
`PackedData::try_from(u16)?` -> `PackedData::status()`
+ - just works, needs to check nothing on access
+ - some more general info on this: [Parse, don't
validate](https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate/)
+- big god-macro
+ - powerful, but less readable to the devs of modular-bitfield
+ - needs to cover many derives in itself, like `impl Debug` (other bitfield
crates do this as well)
+ - bilge: solves this by providing a kind of scope for `-Bits`-derives
+
+and implementation differences:
+- underlying type is a byte array
+ - can be useful for bitfields larger than u128
+ - bilge: if your bitfields get larger than u128, you can most often
split them into multiple bitfields of a primitive size (like u64) and put those
in a parent struct which is not a bitfield
+
+Still, modular-bitfield is pretty good and I had set out to build something
equal or hopefully better than it.
+Tell me where I can do better, I will try.
+
+### bitbybit
+
+One of the libs inspired by the same crate is
[`bitbybit`](https://github.com/danlehmann/bitfield), which is much more
readable and up-to-date. Actually, I even helped and am still helping on that
one as well. After experimenting and hacking around in their code though, I
realized it would need to be severely changed for the features and structure I
had in mind.
+
+implementation differences (as of 26.04.23):
+- it can do read/write-only, array strides and repeat the same bits for
multiple fields
+ - bilge: these will be added the moment someone needs it
+- redundant bit-offset specification, which can help or annoy, the same way
bilge's `reserved` fields can help or annoy
+
+### deku
+
+After looking at a ton of bitfield libs on crates.io, I _didn't_ find
[`deku`](https://github.com/sharksforarms/deku).
+I will still mention it here because it uses a very interesting crate
underneath (bitvec).
+Currently (as of 26.04.23), it generates far more assembly and takes longer to
run, since parts of the API are not `const`.
+I've opened an issue on their repo about that.
+
+### most others
+
+Besides that, many bitfield libs try to imitate or look like C bitfields, even
though these are hated by many.
+I argue most beginners would have the idea to specify bits with basic
primitives like u1, u2, ...
+This also opens up some possibilities for calculation and conversion on those
primitives.
+
+Something similar can be said about `bitflags`, which, under this model, can
be turned into simple structs with bools and enums.
+
+Basically, `bilge` tries to convert bit fiddling, shifting and masking into
more widely known concepts like struct access.
+
+About the name: a bilge is one of the "lowest" parts of a ship, nothing else
to it :)
diff --git a/rust/hw/char/pl011/vendor/bilge/meson.build
b/rust/hw/char/pl011/vendor/bilge/meson.build
new file mode 100644
index 0000000000..906cec7764
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/meson.build
@@ -0,0 +1,17 @@
+_bilge_rs = static_library(
+ 'bilge',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ ],
+ dependencies: [
+ dep_arbitrary_int,
+ dep_bilge_impl,
+ ],
+)
+
+dep_bilge = declare_dependency(
+ link_with: _bilge_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/bilge/src/lib.rs
b/rust/hw/char/pl011/vendor/bilge/src/lib.rs
new file mode 100644
index 0000000000..c6c9752ea5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/bilge/src/lib.rs
@@ -0,0 +1,80 @@
+#![cfg_attr(not(doctest), doc = include_str!("../README.md"))]
+#![no_std]
+
+#[doc(no_inline)]
+pub use arbitrary_int;
+pub use bilge_impl::{bitsize, bitsize_internal, BinaryBits, DebugBits,
DefaultBits, FromBits, TryFromBits};
+
+/// used for `use bilge::prelude::*;`
+pub mod prelude {
+ #[rustfmt::skip]
+ #[doc(no_inline)]
+ pub use super::{
+ bitsize, Bitsized,
+ FromBits, TryFromBits, DebugBits, BinaryBits, DefaultBits,
+ // we control the version, so this should not be a problem
+ arbitrary_int::*,
+ };
+}
+
+/// This is internally used, but might be useful. No guarantees are given (for
now).
+pub trait Bitsized {
+ type ArbitraryInt;
+ const BITS: usize;
+ const MAX: Self::ArbitraryInt;
+}
+
+/// Internally used marker trait.
+/// # Safety
+///
+/// Avoid implementing this for your types. Implementing this trait could
break invariants.
+pub unsafe trait Filled: Bitsized {}
+unsafe impl<T> Filled for T where T: Bitsized + From<<T as
Bitsized>::ArbitraryInt> {}
+
+/// This is generated to statically validate that a type implements `FromBits`.
+pub const fn assume_filled<T: Filled>() {}
+
+#[non_exhaustive]
+#[derive(Debug, PartialEq)]
+pub struct BitsError;
+
+/// Internally used for generating the `Result::Err` type in `TryFrom`.
+///
+/// This is needed since we don't want users to be able to create `BitsError`
right now.
+/// We'll be able to turn `BitsError` into an enum later, or anything else
really.
+pub const fn give_me_error() -> BitsError {
+ BitsError
+}
+
+/// Only basing this on Number did not work, as bool and others are not Number.
+/// We could remove the whole macro_rules thing if it worked, though.
+/// Maybe there is some way to do this, I'm not deep into types.
+/// Finding some way to combine Number and Bitsized would be good as well.
+impl<BaseType, const BITS: usize> Bitsized for arbitrary_int::UInt<BaseType,
BITS>
+where
+ arbitrary_int::UInt<BaseType, BITS>: arbitrary_int::Number,
+{
+ type ArbitraryInt = Self;
+ const BITS: usize = BITS;
+ const MAX: Self::ArbitraryInt = <Self as arbitrary_int::Number>::MAX;
+}
+
+macro_rules! bitsized_impl {
+ ($(($name:ident, $bits:expr)),+) => {
+ $(
+ impl Bitsized for $name {
+ type ArbitraryInt = Self;
+ const BITS: usize = $bits;
+ const MAX: Self::ArbitraryInt = <Self as
arbitrary_int::Number>::MAX;
+ }
+ )+
+ };
+}
+bitsized_impl!((u8, 8), (u16, 16), (u32, 32), (u64, 64), (u128, 128));
+
+/// Handle bool as a u1
+impl Bitsized for bool {
+ type ArbitraryInt = arbitrary_int::u1;
+ const BITS: usize = 1;
+ const MAX: Self::ArbitraryInt = <arbitrary_int::u1 as
arbitrary_int::Number>::MAX;
+}
diff --git a/rust/hw/char/pl011/vendor/either/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/either/.cargo-checksum.json
new file mode 100644
index 0000000000..d145aae980
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"96ca858a773ab30021cc60d1838bfccfc83b10e1279d8148187c8a049f18dbd6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7576269ea71f767b99297934c0b2367532690f8c4badc695edf8e04ab6a1e545","README-crates.io.md":"b775991a01ab4a0a8de6169f597775319d9ce8178f5c74ccdc634f13a286b20c","README.rst":"4fef58c3451b2eac9fd941f1fa0135d5df8183c124d75681497fa14bd1872b8b","src/into_either.rs":"0477f226bbba78ef017de08b87d421d3cd99fbc95b90ba4e6e3e803e3d15254e","src/iterator.rs":"fa2a6d14141980ce8a0bfcf7df2113d1e056d0f9815773dc9c2fb92a88923f4a","src/lib.rs":"4fbfa03b22b84d877610dfce1c7f279c97d80f4dc2c079c7dda364e4cf56ef13","src/serde_untagged.rs":"e826ee0ab31616e49c3e3f3711c8441001ee424b3e7a8c4c466cfcc4f8a7701a","src/serde_untagged_optional.rs":"86265f09d0795428bb2ce013b070d1badf1e2210217844a9ff3f04b2795868ab"},"package":"3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/either/Cargo.toml
b/rust/hw/char/pl011/vendor/either/Cargo.toml
new file mode 100644
index 0000000000..1bfc7d42f1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/Cargo.toml
@@ -0,0 +1,54 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.37"
+name = "either"
+version = "1.12.0"
+authors = ["bluss"]
+description = """
+The enum `Either` with variants `Left` and `Right` is a general purpose sum
type with two cases.
+"""
+documentation = "https://docs.rs/either/1/"
+readme = "README-crates.io.md"
+keywords = [
+ "data-structure",
+ "no_std",
+]
+categories = [
+ "data-structures",
+ "no-std",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rayon-rs/either"
+
+[package.metadata.docs.rs]
+features = ["serde"]
+
+[package.metadata.playground]
+features = ["serde"]
+
+[package.metadata.release]
+no-dev-version = true
+tag-name = "{{version}}"
+
+[dependencies.serde]
+version = "1.0"
+features = ["derive"]
+optional = true
+
+[dev-dependencies.serde_json]
+version = "1.0.0"
+
+[features]
+default = ["use_std"]
+use_std = []
diff --git a/rust/hw/char/pl011/vendor/either/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/either/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/rust/hw/char/pl011/vendor/either/LICENSE-MIT
b/rust/hw/char/pl011/vendor/either/LICENSE-MIT
new file mode 100644
index 0000000000..9203baa055
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2015
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/either/README-crates.io.md
b/rust/hw/char/pl011/vendor/either/README-crates.io.md
new file mode 100644
index 0000000000..d36890278b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/README-crates.io.md
@@ -0,0 +1,10 @@
+The enum `Either` with variants `Left` and `Right` is a general purpose
+sum type with two cases.
+
+Either has methods that are similar to Option and Result, and it also
implements
+traits like `Iterator`.
+
+Includes macros `try_left!()` and `try_right!()` to use for
+short-circuiting logic, similar to how the `?` operator is used with `Result`.
+Note that `Either` is general purpose. For describing success or error, use the
+regular `Result`.
diff --git a/rust/hw/char/pl011/vendor/either/meson.build
b/rust/hw/char/pl011/vendor/either/meson.build
new file mode 100644
index 0000000000..2d2d3057bc
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/meson.build
@@ -0,0 +1,16 @@
+_either_rs = static_library(
+ 'either',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2018',
+ '--cfg', 'feature="use_std"',
+ '--cfg', 'feature="use_alloc"',
+ ],
+ dependencies: [],
+)
+
+dep_either = declare_dependency(
+ link_with: _either_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/either/src/into_either.rs
b/rust/hw/char/pl011/vendor/either/src/into_either.rs
new file mode 100644
index 0000000000..73746c80f1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/src/into_either.rs
@@ -0,0 +1,64 @@
+//! The trait [`IntoEither`] provides methods for converting a type `Self`,
whose
+//! size is constant and known at compile-time, into an [`Either`] variant.
+
+use super::{Either, Left, Right};
+
+/// Provides methods for converting a type `Self` into either a [`Left`] or
[`Right`]
+/// variant of [`Either<Self, Self>`](Either).
+///
+/// The [`into_either`](IntoEither::into_either) method takes a [`bool`] to
determine
+/// whether to convert to [`Left`] or [`Right`].
+///
+/// The [`into_either_with`](IntoEither::into_either_with) method takes a
+/// [predicate function](FnOnce) to determine whether to convert to [`Left`]
or [`Right`].
+pub trait IntoEither: Sized {
+ /// Converts `self` into a [`Left`] variant of [`Either<Self,
Self>`](Either)
+ /// if `into_left` is `true`.
+ /// Converts `self` into a [`Right`] variant of [`Either<Self,
Self>`](Either)
+ /// otherwise.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use either::{IntoEither, Left, Right};
+ ///
+ /// let x = 0;
+ /// assert_eq!(x.into_either(true), Left(x));
+ /// assert_eq!(x.into_either(false), Right(x));
+ /// ```
+ fn into_either(self, into_left: bool) -> Either<Self, Self> {
+ if into_left {
+ Left(self)
+ } else {
+ Right(self)
+ }
+ }
+
+ /// Converts `self` into a [`Left`] variant of [`Either<Self,
Self>`](Either)
+ /// if `into_left(&self)` returns `true`.
+ /// Converts `self` into a [`Right`] variant of [`Either<Self,
Self>`](Either)
+ /// otherwise.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use either::{IntoEither, Left, Right};
+ ///
+ /// fn is_even(x: &u8) -> bool {
+ /// x % 2 == 0
+ /// }
+ ///
+ /// let x = 0;
+ /// assert_eq!(x.into_either_with(is_even), Left(x));
+ /// assert_eq!(x.into_either_with(|x| !is_even(x)), Right(x));
+ /// ```
+ fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
+ where
+ F: FnOnce(&Self) -> bool,
+ {
+ let into_left = into_left(&self);
+ self.into_either(into_left)
+ }
+}
+
+impl<T> IntoEither for T {}
diff --git a/rust/hw/char/pl011/vendor/either/src/iterator.rs
b/rust/hw/char/pl011/vendor/either/src/iterator.rs
new file mode 100644
index 0000000000..9c5a83f9a5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/src/iterator.rs
@@ -0,0 +1,315 @@
+use super::{for_both, Either, Left, Right};
+use core::iter;
+
+macro_rules! wrap_either {
+ ($value:expr => $( $tail:tt )*) => {
+ match $value {
+ Left(inner) => inner.map(Left) $($tail)*,
+ Right(inner) => inner.map(Right) $($tail)*,
+ }
+ };
+}
+
+/// Iterator that maps left or right iterators to corresponding
`Either`-wrapped items.
+///
+/// This struct is created by the [`Either::factor_into_iter`],
+/// [`factor_iter`][Either::factor_iter],
+/// and [`factor_iter_mut`][Either::factor_iter_mut] methods.
+#[derive(Clone, Debug)]
+pub struct IterEither<L, R> {
+ inner: Either<L, R>,
+}
+
+impl<L, R> IterEither<L, R> {
+ pub(crate) fn new(inner: Either<L, R>) -> Self {
+ IterEither { inner }
+ }
+}
+
+impl<L, R, A> Extend<A> for Either<L, R>
+where
+ L: Extend<A>,
+ R: Extend<A>,
+{
+ fn extend<T>(&mut self, iter: T)
+ where
+ T: IntoIterator<Item = A>,
+ {
+ for_both!(*self, ref mut inner => inner.extend(iter))
+ }
+}
+
+/// `Either<L, R>` is an iterator if both `L` and `R` are iterators.
+impl<L, R> Iterator for Either<L, R>
+where
+ L: Iterator,
+ R: Iterator<Item = L::Item>,
+{
+ type Item = L::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ for_both!(*self, ref mut inner => inner.next())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ for_both!(*self, ref inner => inner.size_hint())
+ }
+
+ fn fold<Acc, G>(self, init: Acc, f: G) -> Acc
+ where
+ G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ for_both!(self, inner => inner.fold(init, f))
+ }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ {
+ for_both!(self, inner => inner.for_each(f))
+ }
+
+ fn count(self) -> usize {
+ for_both!(self, inner => inner.count())
+ }
+
+ fn last(self) -> Option<Self::Item> {
+ for_both!(self, inner => inner.last())
+ }
+
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ for_both!(*self, ref mut inner => inner.nth(n))
+ }
+
+ fn collect<B>(self) -> B
+ where
+ B: iter::FromIterator<Self::Item>,
+ {
+ for_both!(self, inner => inner.collect())
+ }
+
+ fn partition<B, F>(self, f: F) -> (B, B)
+ where
+ B: Default + Extend<Self::Item>,
+ F: FnMut(&Self::Item) -> bool,
+ {
+ for_both!(self, inner => inner.partition(f))
+ }
+
+ fn all<F>(&mut self, f: F) -> bool
+ where
+ F: FnMut(Self::Item) -> bool,
+ {
+ for_both!(*self, ref mut inner => inner.all(f))
+ }
+
+ fn any<F>(&mut self, f: F) -> bool
+ where
+ F: FnMut(Self::Item) -> bool,
+ {
+ for_both!(*self, ref mut inner => inner.any(f))
+ }
+
+ fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
+ where
+ P: FnMut(&Self::Item) -> bool,
+ {
+ for_both!(*self, ref mut inner => inner.find(predicate))
+ }
+
+ fn find_map<B, F>(&mut self, f: F) -> Option<B>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ {
+ for_both!(*self, ref mut inner => inner.find_map(f))
+ }
+
+ fn position<P>(&mut self, predicate: P) -> Option<usize>
+ where
+ P: FnMut(Self::Item) -> bool,
+ {
+ for_both!(*self, ref mut inner => inner.position(predicate))
+ }
+}
+
+impl<L, R> DoubleEndedIterator for Either<L, R>
+where
+ L: DoubleEndedIterator,
+ R: DoubleEndedIterator<Item = L::Item>,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ for_both!(*self, ref mut inner => inner.next_back())
+ }
+
+ fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
+ for_both!(*self, ref mut inner => inner.nth_back(n))
+ }
+
+ fn rfold<Acc, G>(self, init: Acc, f: G) -> Acc
+ where
+ G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ for_both!(self, inner => inner.rfold(init, f))
+ }
+
+ fn rfind<P>(&mut self, predicate: P) -> Option<Self::Item>
+ where
+ P: FnMut(&Self::Item) -> bool,
+ {
+ for_both!(*self, ref mut inner => inner.rfind(predicate))
+ }
+}
+
+impl<L, R> ExactSizeIterator for Either<L, R>
+where
+ L: ExactSizeIterator,
+ R: ExactSizeIterator<Item = L::Item>,
+{
+ fn len(&self) -> usize {
+ for_both!(*self, ref inner => inner.len())
+ }
+}
+
+impl<L, R> iter::FusedIterator for Either<L, R>
+where
+ L: iter::FusedIterator,
+ R: iter::FusedIterator<Item = L::Item>,
+{
+}
+
+impl<L, R> Iterator for IterEither<L, R>
+where
+ L: Iterator,
+ R: Iterator,
+{
+ type Item = Either<L::Item, R::Item>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(map_either!(self.inner, ref mut inner => inner.next()?))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ for_both!(self.inner, ref inner => inner.size_hint())
+ }
+
+ fn fold<Acc, G>(self, init: Acc, f: G) -> Acc
+ where
+ G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ wrap_either!(self.inner => .fold(init, f))
+ }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ {
+ wrap_either!(self.inner => .for_each(f))
+ }
+
+ fn count(self) -> usize {
+ for_both!(self.inner, inner => inner.count())
+ }
+
+ fn last(self) -> Option<Self::Item> {
+ Some(map_either!(self.inner, inner => inner.last()?))
+ }
+
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ Some(map_either!(self.inner, ref mut inner => inner.nth(n)?))
+ }
+
+ fn collect<B>(self) -> B
+ where
+ B: iter::FromIterator<Self::Item>,
+ {
+ wrap_either!(self.inner => .collect())
+ }
+
+ fn partition<B, F>(self, f: F) -> (B, B)
+ where
+ B: Default + Extend<Self::Item>,
+ F: FnMut(&Self::Item) -> bool,
+ {
+ wrap_either!(self.inner => .partition(f))
+ }
+
+ fn all<F>(&mut self, f: F) -> bool
+ where
+ F: FnMut(Self::Item) -> bool,
+ {
+ wrap_either!(&mut self.inner => .all(f))
+ }
+
+ fn any<F>(&mut self, f: F) -> bool
+ where
+ F: FnMut(Self::Item) -> bool,
+ {
+ wrap_either!(&mut self.inner => .any(f))
+ }
+
+ fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
+ where
+ P: FnMut(&Self::Item) -> bool,
+ {
+ wrap_either!(&mut self.inner => .find(predicate))
+ }
+
+ fn find_map<B, F>(&mut self, f: F) -> Option<B>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ {
+ wrap_either!(&mut self.inner => .find_map(f))
+ }
+
+ fn position<P>(&mut self, predicate: P) -> Option<usize>
+ where
+ P: FnMut(Self::Item) -> bool,
+ {
+ wrap_either!(&mut self.inner => .position(predicate))
+ }
+}
+
+impl<L, R> DoubleEndedIterator for IterEither<L, R>
+where
+ L: DoubleEndedIterator,
+ R: DoubleEndedIterator,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ Some(map_either!(self.inner, ref mut inner => inner.next_back()?))
+ }
+
+ fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
+ Some(map_either!(self.inner, ref mut inner => inner.nth_back(n)?))
+ }
+
+ fn rfold<Acc, G>(self, init: Acc, f: G) -> Acc
+ where
+ G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ wrap_either!(self.inner => .rfold(init, f))
+ }
+
+ fn rfind<P>(&mut self, predicate: P) -> Option<Self::Item>
+ where
+ P: FnMut(&Self::Item) -> bool,
+ {
+ wrap_either!(&mut self.inner => .rfind(predicate))
+ }
+}
+
+impl<L, R> ExactSizeIterator for IterEither<L, R>
+where
+ L: ExactSizeIterator,
+ R: ExactSizeIterator,
+{
+ fn len(&self) -> usize {
+ for_both!(self.inner, ref inner => inner.len())
+ }
+}
+
+impl<L, R> iter::FusedIterator for IterEither<L, R>
+where
+ L: iter::FusedIterator,
+ R: iter::FusedIterator,
+{
+}
diff --git a/rust/hw/char/pl011/vendor/either/src/lib.rs
b/rust/hw/char/pl011/vendor/either/src/lib.rs
new file mode 100644
index 0000000000..e0792f2631
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/src/lib.rs
@@ -0,0 +1,1519 @@
+//! The enum [`Either`] with variants `Left` and `Right` is a general purpose
+//! sum type with two cases.
+//!
+//! [`Either`]: enum.Either.html
+//!
+//! **Crate features:**
+//!
+//! * `"use_std"`
+//! Enabled by default. Disable to make the library `#![no_std]`.
+//!
+//! * `"serde"`
+//! Disabled by default. Enable to `#[derive(Serialize, Deserialize)]` for
`Either`
+//!
+
+#![doc(html_root_url = "https://docs.rs/either/1/")]
+#![no_std]
+
+#[cfg(any(test, feature = "use_std"))]
+extern crate std;
+
+#[cfg(feature = "serde")]
+pub mod serde_untagged;
+
+#[cfg(feature = "serde")]
+pub mod serde_untagged_optional;
+
+use core::convert::{AsMut, AsRef};
+use core::fmt;
+use core::future::Future;
+use core::ops::Deref;
+use core::ops::DerefMut;
+use core::pin::Pin;
+
+#[cfg(any(test, feature = "use_std"))]
+use std::error::Error;
+#[cfg(any(test, feature = "use_std"))]
+use std::io::{self, BufRead, Read, Seek, SeekFrom, Write};
+
+pub use crate::Either::{Left, Right};
+
+/// The enum `Either` with variants `Left` and `Right` is a general purpose
+/// sum type with two cases.
+///
+/// The `Either` type is symmetric and treats its variants the same way,
without
+/// preference.
+/// (For representing success or error, use the regular `Result` enum instead.)
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub enum Either<L, R> {
+ /// A value of type `L`.
+ Left(L),
+ /// A value of type `R`.
+ Right(R),
+}
+
+/// Evaluate the provided expression for both [`Either::Left`] and
[`Either::Right`].
+///
+/// This macro is useful in cases where both sides of [`Either`] can be
interacted with
+/// in the same way even though the don't share the same type.
+///
+/// Syntax: `either::for_both!(` *expression* `,` *pattern* `=>` *expression*
`)`
+///
+/// # Example
+///
+/// ```
+/// use either::Either;
+///
+/// fn length(owned_or_borrowed: Either<String, &'static str>) -> usize {
+/// either::for_both!(owned_or_borrowed, s => s.len())
+/// }
+///
+/// fn main() {
+/// let borrowed = Either::Right("Hello world!");
+/// let owned = Either::Left("Hello world!".to_owned());
+///
+/// assert_eq!(length(borrowed), 12);
+/// assert_eq!(length(owned), 12);
+/// }
+/// ```
+#[macro_export]
+macro_rules! for_both {
+ ($value:expr, $pattern:pat => $result:expr) => {
+ match $value {
+ $crate::Either::Left($pattern) => $result,
+ $crate::Either::Right($pattern) => $result,
+ }
+ };
+}
+
+/// Macro for unwrapping the left side of an [`Either`], which fails early
+/// with the opposite side. Can only be used in functions that return
+/// `Either` because of the early return of `Right` that it provides.
+///
+/// See also [`try_right!`] for its dual, which applies the same just to the
+/// right side.
+///
+/// # Example
+///
+/// ```
+/// use either::{Either, Left, Right};
+///
+/// fn twice(wrapper: Either<u32, &str>) -> Either<u32, &str> {
+/// let value = either::try_left!(wrapper);
+/// Left(value * 2)
+/// }
+///
+/// fn main() {
+/// assert_eq!(twice(Left(2)), Left(4));
+/// assert_eq!(twice(Right("ups")), Right("ups"));
+/// }
+/// ```
+#[macro_export]
+macro_rules! try_left {
+ ($expr:expr) => {
+ match $expr {
+ $crate::Left(val) => val,
+ $crate::Right(err) => return
$crate::Right(::core::convert::From::from(err)),
+ }
+ };
+}
+
+/// Dual to [`try_left!`], see its documentation for more information.
+#[macro_export]
+macro_rules! try_right {
+ ($expr:expr) => {
+ match $expr {
+ $crate::Left(err) => return
$crate::Left(::core::convert::From::from(err)),
+ $crate::Right(val) => val,
+ }
+ };
+}
+
+macro_rules! map_either {
+ ($value:expr, $pattern:pat => $result:expr) => {
+ match $value {
+ Left($pattern) => Left($result),
+ Right($pattern) => Right($result),
+ }
+ };
+}
+
+mod iterator;
+pub use self::iterator::IterEither;
+
+mod into_either;
+pub use self::into_either::IntoEither;
+
+impl<L: Clone, R: Clone> Clone for Either<L, R> {
+ fn clone(&self) -> Self {
+ match self {
+ Left(inner) => Left(inner.clone()),
+ Right(inner) => Right(inner.clone()),
+ }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ match (self, source) {
+ (Left(dest), Left(source)) => dest.clone_from(source),
+ (Right(dest), Right(source)) => dest.clone_from(source),
+ (dest, source) => *dest = source.clone(),
+ }
+ }
+}
+
+impl<L, R> Either<L, R> {
+ /// Return true if the value is the `Left` variant.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let values = [Left(1), Right("the right value")];
+ /// assert_eq!(values[0].is_left(), true);
+ /// assert_eq!(values[1].is_left(), false);
+ /// ```
+ pub fn is_left(&self) -> bool {
+ match *self {
+ Left(_) => true,
+ Right(_) => false,
+ }
+ }
+
+ /// Return true if the value is the `Right` variant.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let values = [Left(1), Right("the right value")];
+ /// assert_eq!(values[0].is_right(), false);
+ /// assert_eq!(values[1].is_right(), true);
+ /// ```
+ pub fn is_right(&self) -> bool {
+ !self.is_left()
+ }
+
+ /// Convert the left side of `Either<L, R>` to an `Option<L>`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, ()> = Left("some value");
+ /// assert_eq!(left.left(), Some("some value"));
+ ///
+ /// let right: Either<(), _> = Right(321);
+ /// assert_eq!(right.left(), None);
+ /// ```
+ pub fn left(self) -> Option<L> {
+ match self {
+ Left(l) => Some(l),
+ Right(_) => None,
+ }
+ }
+
+ /// Convert the right side of `Either<L, R>` to an `Option<R>`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, ()> = Left("some value");
+ /// assert_eq!(left.right(), None);
+ ///
+ /// let right: Either<(), _> = Right(321);
+ /// assert_eq!(right.right(), Some(321));
+ /// ```
+ pub fn right(self) -> Option<R> {
+ match self {
+ Left(_) => None,
+ Right(r) => Some(r),
+ }
+ }
+
+ /// Convert `&Either<L, R>` to `Either<&L, &R>`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, ()> = Left("some value");
+ /// assert_eq!(left.as_ref(), Left(&"some value"));
+ ///
+ /// let right: Either<(), _> = Right("some value");
+ /// assert_eq!(right.as_ref(), Right(&"some value"));
+ /// ```
+ pub fn as_ref(&self) -> Either<&L, &R> {
+ match *self {
+ Left(ref inner) => Left(inner),
+ Right(ref inner) => Right(inner),
+ }
+ }
+
+ /// Convert `&mut Either<L, R>` to `Either<&mut L, &mut R>`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// fn mutate_left(value: &mut Either<u32, u32>) {
+ /// if let Some(l) = value.as_mut().left() {
+ /// *l = 999;
+ /// }
+ /// }
+ ///
+ /// let mut left = Left(123);
+ /// let mut right = Right(123);
+ /// mutate_left(&mut left);
+ /// mutate_left(&mut right);
+ /// assert_eq!(left, Left(999));
+ /// assert_eq!(right, Right(123));
+ /// ```
+ pub fn as_mut(&mut self) -> Either<&mut L, &mut R> {
+ match *self {
+ Left(ref mut inner) => Left(inner),
+ Right(ref mut inner) => Right(inner),
+ }
+ }
+
+ /// Convert `Pin<&Either<L, R>>` to `Either<Pin<&L>, Pin<&R>>`,
+ /// pinned projections of the inner variants.
+ pub fn as_pin_ref(self: Pin<&Self>) -> Either<Pin<&L>, Pin<&R>> {
+ // SAFETY: We can use `new_unchecked` because the `inner` parts are
+ // guaranteed to be pinned, as they come from `self` which is pinned.
+ unsafe {
+ match *Pin::get_ref(self) {
+ Left(ref inner) => Left(Pin::new_unchecked(inner)),
+ Right(ref inner) => Right(Pin::new_unchecked(inner)),
+ }
+ }
+ }
+
+ /// Convert `Pin<&mut Either<L, R>>` to `Either<Pin<&mut L>, Pin<&mut R>>`,
+ /// pinned projections of the inner variants.
+ pub fn as_pin_mut(self: Pin<&mut Self>) -> Either<Pin<&mut L>, Pin<&mut
R>> {
+ // SAFETY: `get_unchecked_mut` is fine because we don't move anything.
+ // We can use `new_unchecked` because the `inner` parts are guaranteed
+ // to be pinned, as they come from `self` which is pinned, and we never
+ // offer an unpinned `&mut L` or `&mut R` through `Pin<&mut Self>`. We
+ // also don't have an implementation of `Drop`, nor manual `Unpin`.
+ unsafe {
+ match *Pin::get_unchecked_mut(self) {
+ Left(ref mut inner) => Left(Pin::new_unchecked(inner)),
+ Right(ref mut inner) => Right(Pin::new_unchecked(inner)),
+ }
+ }
+ }
+
+ /// Convert `Either<L, R>` to `Either<R, L>`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, ()> = Left(123);
+ /// assert_eq!(left.flip(), Right(123));
+ ///
+ /// let right: Either<(), _> = Right("some value");
+ /// assert_eq!(right.flip(), Left("some value"));
+ /// ```
+ pub fn flip(self) -> Either<R, L> {
+ match self {
+ Left(l) => Right(l),
+ Right(r) => Left(r),
+ }
+ }
+
+ /// Apply the function `f` on the value in the `Left` variant if it is
present rewrapping the
+ /// result in `Left`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, u32> = Left(123);
+ /// assert_eq!(left.map_left(|x| x * 2), Left(246));
+ ///
+ /// let right: Either<u32, _> = Right(123);
+ /// assert_eq!(right.map_left(|x| x * 2), Right(123));
+ /// ```
+ pub fn map_left<F, M>(self, f: F) -> Either<M, R>
+ where
+ F: FnOnce(L) -> M,
+ {
+ match self {
+ Left(l) => Left(f(l)),
+ Right(r) => Right(r),
+ }
+ }
+
+ /// Apply the function `f` on the value in the `Right` variant if it is
present rewrapping the
+ /// result in `Right`.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, u32> = Left(123);
+ /// assert_eq!(left.map_right(|x| x * 2), Left(123));
+ ///
+ /// let right: Either<u32, _> = Right(123);
+ /// assert_eq!(right.map_right(|x| x * 2), Right(246));
+ /// ```
+ pub fn map_right<F, S>(self, f: F) -> Either<L, S>
+ where
+ F: FnOnce(R) -> S,
+ {
+ match self {
+ Left(l) => Left(l),
+ Right(r) => Right(f(r)),
+ }
+ }
+
+ /// Apply the functions `f` and `g` to the `Left` and `Right` variants
+ /// respectively. This is equivalent to
+ ///
[bimap](https://hackage.haskell.org/package/bifunctors-5/docs/Data-Bifunctor.html)
+ /// in functional programming.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let f = |s: String| s.len();
+ /// let g = |u: u8| u.to_string();
+ ///
+ /// let left: Either<String, u8> = Left("loopy".into());
+ /// assert_eq!(left.map_either(f, g), Left(5));
+ ///
+ /// let right: Either<String, u8> = Right(42);
+ /// assert_eq!(right.map_either(f, g), Right("42".into()));
+ /// ```
+ pub fn map_either<F, G, M, S>(self, f: F, g: G) -> Either<M, S>
+ where
+ F: FnOnce(L) -> M,
+ G: FnOnce(R) -> S,
+ {
+ match self {
+ Left(l) => Left(f(l)),
+ Right(r) => Right(g(r)),
+ }
+ }
+
+ /// Similar to [`map_either`][Self::map_either], with an added context
`ctx` accessible to
+ /// both functions.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let mut sum = 0;
+ ///
+ /// // Both closures want to update the same value, so pass it as context.
+ /// let mut f = |sum: &mut usize, s: String| { *sum += s.len();
s.to_uppercase() };
+ /// let mut g = |sum: &mut usize, u: usize| { *sum += u; u.to_string() };
+ ///
+ /// let left: Either<String, usize> = Left("loopy".into());
+ /// assert_eq!(left.map_either_with(&mut sum, &mut f, &mut g),
Left("LOOPY".into()));
+ ///
+ /// let right: Either<String, usize> = Right(42);
+ /// assert_eq!(right.map_either_with(&mut sum, &mut f, &mut g),
Right("42".into()));
+ ///
+ /// assert_eq!(sum, 47);
+ /// ```
+ pub fn map_either_with<Ctx, F, G, M, S>(self, ctx: Ctx, f: F, g: G) ->
Either<M, S>
+ where
+ F: FnOnce(Ctx, L) -> M,
+ G: FnOnce(Ctx, R) -> S,
+ {
+ match self {
+ Left(l) => Left(f(ctx, l)),
+ Right(r) => Right(g(ctx, r)),
+ }
+ }
+
+ /// Apply one of two functions depending on contents, unifying their
result. If the value is
+ /// `Left(L)` then the first function `f` is applied; if it is `Right(R)`
then the second
+ /// function `g` is applied.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// fn square(n: u32) -> i32 { (n * n) as i32 }
+ /// fn negate(n: i32) -> i32 { -n }
+ ///
+ /// let left: Either<u32, i32> = Left(4);
+ /// assert_eq!(left.either(square, negate), 16);
+ ///
+ /// let right: Either<u32, i32> = Right(-4);
+ /// assert_eq!(right.either(square, negate), 4);
+ /// ```
+ pub fn either<F, G, T>(self, f: F, g: G) -> T
+ where
+ F: FnOnce(L) -> T,
+ G: FnOnce(R) -> T,
+ {
+ match self {
+ Left(l) => f(l),
+ Right(r) => g(r),
+ }
+ }
+
+ /// Like [`either`][Self::either], but provide some context to whichever
of the
+ /// functions ends up being called.
+ ///
+ /// ```
+ /// // In this example, the context is a mutable reference
+ /// use either::*;
+ ///
+ /// let mut result = Vec::new();
+ ///
+ /// let values = vec![Left(2), Right(2.7)];
+ ///
+ /// for value in values {
+ /// value.either_with(&mut result,
+ /// |ctx, integer| ctx.push(integer),
+ /// |ctx, real| ctx.push(f64::round(real) as i32));
+ /// }
+ ///
+ /// assert_eq!(result, vec![2, 3]);
+ /// ```
+ pub fn either_with<Ctx, F, G, T>(self, ctx: Ctx, f: F, g: G) -> T
+ where
+ F: FnOnce(Ctx, L) -> T,
+ G: FnOnce(Ctx, R) -> T,
+ {
+ match self {
+ Left(l) => f(ctx, l),
+ Right(r) => g(ctx, r),
+ }
+ }
+
+ /// Apply the function `f` on the value in the `Left` variant if it is
present.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, u32> = Left(123);
+ /// assert_eq!(left.left_and_then::<_,()>(|x| Right(x * 2)), Right(246));
+ ///
+ /// let right: Either<u32, _> = Right(123);
+ /// assert_eq!(right.left_and_then(|x| Right::<(), _>(x * 2)), Right(123));
+ /// ```
+ pub fn left_and_then<F, S>(self, f: F) -> Either<S, R>
+ where
+ F: FnOnce(L) -> Either<S, R>,
+ {
+ match self {
+ Left(l) => f(l),
+ Right(r) => Right(r),
+ }
+ }
+
+ /// Apply the function `f` on the value in the `Right` variant if it is
present.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, u32> = Left(123);
+ /// assert_eq!(left.right_and_then(|x| Right(x * 2)), Left(123));
+ ///
+ /// let right: Either<u32, _> = Right(123);
+ /// assert_eq!(right.right_and_then(|x| Right(x * 2)), Right(246));
+ /// ```
+ pub fn right_and_then<F, S>(self, f: F) -> Either<L, S>
+ where
+ F: FnOnce(R) -> Either<L, S>,
+ {
+ match self {
+ Left(l) => Left(l),
+ Right(r) => f(r),
+ }
+ }
+
+ /// Convert the inner value to an iterator.
+ ///
+ /// This requires the `Left` and `Right` iterators to have the same item
type.
+ /// See [`factor_into_iter`][Either::factor_into_iter] to iterate
different types.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, Vec<u32>> = Left(vec![1, 2, 3, 4, 5]);
+ /// let mut right: Either<Vec<u32>, _> = Right(vec![]);
+ /// right.extend(left.into_iter());
+ /// assert_eq!(right, Right(vec![1, 2, 3, 4, 5]));
+ /// ```
+ #[allow(clippy::should_implement_trait)]
+ pub fn into_iter(self) -> Either<L::IntoIter, R::IntoIter>
+ where
+ L: IntoIterator,
+ R: IntoIterator<Item = L::Item>,
+ {
+ map_either!(self, inner => inner.into_iter())
+ }
+
+ /// Borrow the inner value as an iterator.
+ ///
+ /// This requires the `Left` and `Right` iterators to have the same item
type.
+ /// See [`factor_iter`][Either::factor_iter] to iterate different types.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, &[u32]> = Left(vec![2, 3]);
+ /// let mut right: Either<Vec<u32>, _> = Right(&[4, 5][..]);
+ /// let mut all = vec![1];
+ /// all.extend(left.iter());
+ /// all.extend(right.iter());
+ /// assert_eq!(all, vec![1, 2, 3, 4, 5]);
+ /// ```
+ pub fn iter(&self) -> Either<<&L as IntoIterator>::IntoIter, <&R as
IntoIterator>::IntoIter>
+ where
+ for<'a> &'a L: IntoIterator,
+ for<'a> &'a R: IntoIterator<Item = <&'a L as IntoIterator>::Item>,
+ {
+ map_either!(self, inner => inner.into_iter())
+ }
+
+ /// Mutably borrow the inner value as an iterator.
+ ///
+ /// This requires the `Left` and `Right` iterators to have the same item
type.
+ /// See [`factor_iter_mut`][Either::factor_iter_mut] to iterate different
types.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let mut left: Either<_, &mut [u32]> = Left(vec![2, 3]);
+ /// for l in left.iter_mut() {
+ /// *l *= *l
+ /// }
+ /// assert_eq!(left, Left(vec![4, 9]));
+ ///
+ /// let mut inner = [4, 5];
+ /// let mut right: Either<Vec<u32>, _> = Right(&mut inner[..]);
+ /// for r in right.iter_mut() {
+ /// *r *= *r
+ /// }
+ /// assert_eq!(inner, [16, 25]);
+ /// ```
+ pub fn iter_mut(
+ &mut self,
+ ) -> Either<<&mut L as IntoIterator>::IntoIter, <&mut R as
IntoIterator>::IntoIter>
+ where
+ for<'a> &'a mut L: IntoIterator,
+ for<'a> &'a mut R: IntoIterator<Item = <&'a mut L as
IntoIterator>::Item>,
+ {
+ map_either!(self, inner => inner.into_iter())
+ }
+
+ /// Converts an `Either` of `Iterator`s to be an `Iterator` of `Either`s
+ ///
+ /// Unlike [`into_iter`][Either::into_iter], this does not require the
+ /// `Left` and `Right` iterators to have the same item type.
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, Vec<u8>> = Left(&["hello"]);
+ /// assert_eq!(left.factor_into_iter().next(), Some(Left(&"hello")));
+
+ /// let right: Either<&[&str], _> = Right(vec![0, 1]);
+ /// assert_eq!(right.factor_into_iter().collect::<Vec<_>>(),
vec![Right(0), Right(1)]);
+ ///
+ /// ```
+ // TODO(MSRV): doc(alias) was stabilized in Rust 1.48
+ // #[doc(alias = "transpose")]
+ pub fn factor_into_iter(self) -> IterEither<L::IntoIter, R::IntoIter>
+ where
+ L: IntoIterator,
+ R: IntoIterator,
+ {
+ IterEither::new(map_either!(self, inner => inner.into_iter()))
+ }
+
+ /// Borrows an `Either` of `Iterator`s to be an `Iterator` of `Either`s
+ ///
+ /// Unlike [`iter`][Either::iter], this does not require the
+ /// `Left` and `Right` iterators to have the same item type.
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, Vec<u8>> = Left(["hello"]);
+ /// assert_eq!(left.factor_iter().next(), Some(Left(&"hello")));
+
+ /// let right: Either<[&str; 2], _> = Right(vec![0, 1]);
+ /// assert_eq!(right.factor_iter().collect::<Vec<_>>(), vec![Right(&0),
Right(&1)]);
+ ///
+ /// ```
+ pub fn factor_iter(
+ &self,
+ ) -> IterEither<<&L as IntoIterator>::IntoIter, <&R as
IntoIterator>::IntoIter>
+ where
+ for<'a> &'a L: IntoIterator,
+ for<'a> &'a R: IntoIterator,
+ {
+ IterEither::new(map_either!(self, inner => inner.into_iter()))
+ }
+
+ /// Mutably borrows an `Either` of `Iterator`s to be an `Iterator` of
`Either`s
+ ///
+ /// Unlike [`iter_mut`][Either::iter_mut], this does not require the
+ /// `Left` and `Right` iterators to have the same item type.
+ ///
+ /// ```
+ /// use either::*;
+ /// let mut left: Either<_, Vec<u8>> = Left(["hello"]);
+ /// left.factor_iter_mut().for_each(|x| *x.unwrap_left() = "goodbye");
+ /// assert_eq!(left, Left(["goodbye"]));
+
+ /// let mut right: Either<[&str; 2], _> = Right(vec![0, 1, 2]);
+ /// right.factor_iter_mut().for_each(|x| if let Right(r) = x { *r = -*r;
});
+ /// assert_eq!(right, Right(vec![0, -1, -2]));
+ ///
+ /// ```
+ pub fn factor_iter_mut(
+ &mut self,
+ ) -> IterEither<<&mut L as IntoIterator>::IntoIter, <&mut R as
IntoIterator>::IntoIter>
+ where
+ for<'a> &'a mut L: IntoIterator,
+ for<'a> &'a mut R: IntoIterator,
+ {
+ IterEither::new(map_either!(self, inner => inner.into_iter()))
+ }
+
+ /// Return left value or given value
+ ///
+ /// Arguments passed to `left_or` are eagerly evaluated; if you are passing
+ /// the result of a function call, it is recommended to use
+ /// [`left_or_else`][Self::left_or_else], which is lazily evaluated.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<&str, &str> = Left("left");
+ /// assert_eq!(left.left_or("foo"), "left");
+ ///
+ /// let right: Either<&str, &str> = Right("right");
+ /// assert_eq!(right.left_or("left"), "left");
+ /// ```
+ pub fn left_or(self, other: L) -> L {
+ match self {
+ Either::Left(l) => l,
+ Either::Right(_) => other,
+ }
+ }
+
+ /// Return left or a default
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<String, u32> = Left("left".to_string());
+ /// assert_eq!(left.left_or_default(), "left");
+ ///
+ /// let right: Either<String, u32> = Right(42);
+ /// assert_eq!(right.left_or_default(), String::default());
+ /// ```
+ pub fn left_or_default(self) -> L
+ where
+ L: Default,
+ {
+ match self {
+ Either::Left(l) => l,
+ Either::Right(_) => L::default(),
+ }
+ }
+
+ /// Returns left value or computes it from a closure
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<String, u32> = Left("3".to_string());
+ /// assert_eq!(left.left_or_else(|_| unreachable!()), "3");
+ ///
+ /// let right: Either<String, u32> = Right(3);
+ /// assert_eq!(right.left_or_else(|x| x.to_string()), "3");
+ /// ```
+ pub fn left_or_else<F>(self, f: F) -> L
+ where
+ F: FnOnce(R) -> L,
+ {
+ match self {
+ Either::Left(l) => l,
+ Either::Right(r) => f(r),
+ }
+ }
+
+ /// Return right value or given value
+ ///
+ /// Arguments passed to `right_or` are eagerly evaluated; if you are
passing
+ /// the result of a function call, it is recommended to use
+ /// [`right_or_else`][Self::right_or_else], which is lazily evaluated.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let right: Either<&str, &str> = Right("right");
+ /// assert_eq!(right.right_or("foo"), "right");
+ ///
+ /// let left: Either<&str, &str> = Left("left");
+ /// assert_eq!(left.right_or("right"), "right");
+ /// ```
+ pub fn right_or(self, other: R) -> R {
+ match self {
+ Either::Left(_) => other,
+ Either::Right(r) => r,
+ }
+ }
+
+ /// Return right or a default
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<String, u32> = Left("left".to_string());
+ /// assert_eq!(left.right_or_default(), u32::default());
+ ///
+ /// let right: Either<String, u32> = Right(42);
+ /// assert_eq!(right.right_or_default(), 42);
+ /// ```
+ pub fn right_or_default(self) -> R
+ where
+ R: Default,
+ {
+ match self {
+ Either::Left(_) => R::default(),
+ Either::Right(r) => r,
+ }
+ }
+
+ /// Returns right value or computes it from a closure
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<String, u32> = Left("3".to_string());
+ /// assert_eq!(left.right_or_else(|x| x.parse().unwrap()), 3);
+ ///
+ /// let right: Either<String, u32> = Right(3);
+ /// assert_eq!(right.right_or_else(|_| unreachable!()), 3);
+ /// ```
+ pub fn right_or_else<F>(self, f: F) -> R
+ where
+ F: FnOnce(L) -> R,
+ {
+ match self {
+ Either::Left(l) => f(l),
+ Either::Right(r) => r,
+ }
+ }
+
+ /// Returns the left value
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<_, ()> = Left(3);
+ /// assert_eq!(left.unwrap_left(), 3);
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// When `Either` is a `Right` value
+ ///
+ /// ```should_panic
+ /// # use either::*;
+ /// let right: Either<(), _> = Right(3);
+ /// right.unwrap_left();
+ /// ```
+ pub fn unwrap_left(self) -> L
+ where
+ R: core::fmt::Debug,
+ {
+ match self {
+ Either::Left(l) => l,
+ Either::Right(r) => {
+ panic!("called `Either::unwrap_left()` on a `Right` value:
{:?}", r)
+ }
+ }
+ }
+
+ /// Returns the right value
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let right: Either<(), _> = Right(3);
+ /// assert_eq!(right.unwrap_right(), 3);
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// When `Either` is a `Left` value
+ ///
+ /// ```should_panic
+ /// # use either::*;
+ /// let left: Either<_, ()> = Left(3);
+ /// left.unwrap_right();
+ /// ```
+ pub fn unwrap_right(self) -> R
+ where
+ L: core::fmt::Debug,
+ {
+ match self {
+ Either::Right(r) => r,
+ Either::Left(l) => panic!("called `Either::unwrap_right()` on a
`Left` value: {:?}", l),
+ }
+ }
+
+ /// Returns the left value
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let left: Either<_, ()> = Left(3);
+ /// assert_eq!(left.expect_left("value was Right"), 3);
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// When `Either` is a `Right` value
+ ///
+ /// ```should_panic
+ /// # use either::*;
+ /// let right: Either<(), _> = Right(3);
+ /// right.expect_left("value was Right");
+ /// ```
+ pub fn expect_left(self, msg: &str) -> L
+ where
+ R: core::fmt::Debug,
+ {
+ match self {
+ Either::Left(l) => l,
+ Either::Right(r) => panic!("{}: {:?}", msg, r),
+ }
+ }
+
+ /// Returns the right value
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// let right: Either<(), _> = Right(3);
+ /// assert_eq!(right.expect_right("value was Left"), 3);
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// When `Either` is a `Left` value
+ ///
+ /// ```should_panic
+ /// # use either::*;
+ /// let left: Either<_, ()> = Left(3);
+ /// left.expect_right("value was Right");
+ /// ```
+ pub fn expect_right(self, msg: &str) -> R
+ where
+ L: core::fmt::Debug,
+ {
+ match self {
+ Either::Right(r) => r,
+ Either::Left(l) => panic!("{}: {:?}", msg, l),
+ }
+ }
+
+ /// Convert the contained value into `T`
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use either::*;
+ /// // Both u16 and u32 can be converted to u64.
+ /// let left: Either<u16, u32> = Left(3u16);
+ /// assert_eq!(left.either_into::<u64>(), 3u64);
+ /// let right: Either<u16, u32> = Right(7u32);
+ /// assert_eq!(right.either_into::<u64>(), 7u64);
+ /// ```
+ pub fn either_into<T>(self) -> T
+ where
+ L: Into<T>,
+ R: Into<T>,
+ {
+ match self {
+ Either::Left(l) => l.into(),
+ Either::Right(r) => r.into(),
+ }
+ }
+}
+
+impl<L, R> Either<Option<L>, Option<R>> {
+ /// Factors out `None` from an `Either` of [`Option`].
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, Option<String>> = Left(Some(vec![0]));
+ /// assert_eq!(left.factor_none(), Some(Left(vec![0])));
+ ///
+ /// let right: Either<Option<Vec<u8>>, _> = Right(Some(String::new()));
+ /// assert_eq!(right.factor_none(), Some(Right(String::new())));
+ /// ```
+ // TODO(MSRV): doc(alias) was stabilized in Rust 1.48
+ // #[doc(alias = "transpose")]
+ pub fn factor_none(self) -> Option<Either<L, R>> {
+ match self {
+ Left(l) => l.map(Either::Left),
+ Right(r) => r.map(Either::Right),
+ }
+ }
+}
+
+impl<L, R, E> Either<Result<L, E>, Result<R, E>> {
+ /// Factors out a homogenous type from an `Either` of [`Result`].
+ ///
+ /// Here, the homogeneous type is the `Err` type of the [`Result`].
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, Result<String, u32>> = Left(Ok(vec![0]));
+ /// assert_eq!(left.factor_err(), Ok(Left(vec![0])));
+ ///
+ /// let right: Either<Result<Vec<u8>, u32>, _> = Right(Ok(String::new()));
+ /// assert_eq!(right.factor_err(), Ok(Right(String::new())));
+ /// ```
+ // TODO(MSRV): doc(alias) was stabilized in Rust 1.48
+ // #[doc(alias = "transpose")]
+ pub fn factor_err(self) -> Result<Either<L, R>, E> {
+ match self {
+ Left(l) => l.map(Either::Left),
+ Right(r) => r.map(Either::Right),
+ }
+ }
+}
+
+impl<T, L, R> Either<Result<T, L>, Result<T, R>> {
+ /// Factors out a homogenous type from an `Either` of [`Result`].
+ ///
+ /// Here, the homogeneous type is the `Ok` type of the [`Result`].
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, Result<u32, String>> = Left(Err(vec![0]));
+ /// assert_eq!(left.factor_ok(), Err(Left(vec![0])));
+ ///
+ /// let right: Either<Result<u32, Vec<u8>>, _> = Right(Err(String::new()));
+ /// assert_eq!(right.factor_ok(), Err(Right(String::new())));
+ /// ```
+ // TODO(MSRV): doc(alias) was stabilized in Rust 1.48
+ // #[doc(alias = "transpose")]
+ pub fn factor_ok(self) -> Result<T, Either<L, R>> {
+ match self {
+ Left(l) => l.map_err(Either::Left),
+ Right(r) => r.map_err(Either::Right),
+ }
+ }
+}
+
+impl<T, L, R> Either<(T, L), (T, R)> {
+ /// Factor out a homogeneous type from an either of pairs.
+ ///
+ /// Here, the homogeneous type is the first element of the pairs.
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, (u32, String)> = Left((123, vec![0]));
+ /// assert_eq!(left.factor_first().0, 123);
+ ///
+ /// let right: Either<(u32, Vec<u8>), _> = Right((123, String::new()));
+ /// assert_eq!(right.factor_first().0, 123);
+ /// ```
+ pub fn factor_first(self) -> (T, Either<L, R>) {
+ match self {
+ Left((t, l)) => (t, Left(l)),
+ Right((t, r)) => (t, Right(r)),
+ }
+ }
+}
+
+impl<T, L, R> Either<(L, T), (R, T)> {
+ /// Factor out a homogeneous type from an either of pairs.
+ ///
+ /// Here, the homogeneous type is the second element of the pairs.
+ ///
+ /// ```
+ /// use either::*;
+ /// let left: Either<_, (String, u32)> = Left((vec![0], 123));
+ /// assert_eq!(left.factor_second().1, 123);
+ ///
+ /// let right: Either<(Vec<u8>, u32), _> = Right((String::new(), 123));
+ /// assert_eq!(right.factor_second().1, 123);
+ /// ```
+ pub fn factor_second(self) -> (Either<L, R>, T) {
+ match self {
+ Left((l, t)) => (Left(l), t),
+ Right((r, t)) => (Right(r), t),
+ }
+ }
+}
+
+impl<T> Either<T, T> {
+ /// Extract the value of an either over two equivalent types.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let left: Either<_, u32> = Left(123);
+ /// assert_eq!(left.into_inner(), 123);
+ ///
+ /// let right: Either<u32, _> = Right(123);
+ /// assert_eq!(right.into_inner(), 123);
+ /// ```
+ pub fn into_inner(self) -> T {
+ for_both!(self, inner => inner)
+ }
+
+ /// Map `f` over the contained value and return the result in the
+ /// corresponding variant.
+ ///
+ /// ```
+ /// use either::*;
+ ///
+ /// let value: Either<_, i32> = Right(42);
+ ///
+ /// let other = value.map(|x| x * 2);
+ /// assert_eq!(other, Right(84));
+ /// ```
+ pub fn map<F, M>(self, f: F) -> Either<M, M>
+ where
+ F: FnOnce(T) -> M,
+ {
+ match self {
+ Left(l) => Left(f(l)),
+ Right(r) => Right(f(r)),
+ }
+ }
+}
+
+/// Convert from `Result` to `Either` with `Ok => Right` and `Err => Left`.
+impl<L, R> From<Result<R, L>> for Either<L, R> {
+ fn from(r: Result<R, L>) -> Self {
+ match r {
+ Err(e) => Left(e),
+ Ok(o) => Right(o),
+ }
+ }
+}
+
+/// Convert from `Either` to `Result` with `Right => Ok` and `Left => Err`.
+#[allow(clippy::from_over_into)] // From requires RFC 2451, Rust 1.41
+impl<L, R> Into<Result<R, L>> for Either<L, R> {
+ fn into(self) -> Result<R, L> {
+ match self {
+ Left(l) => Err(l),
+ Right(r) => Ok(r),
+ }
+ }
+}
+
+/// `Either<L, R>` is a future if both `L` and `R` are futures.
+impl<L, R> Future for Either<L, R>
+where
+ L: Future,
+ R: Future<Output = L::Output>,
+{
+ type Output = L::Output;
+
+ fn poll(
+ self: Pin<&mut Self>,
+ cx: &mut core::task::Context<'_>,
+ ) -> core::task::Poll<Self::Output> {
+ for_both!(self.as_pin_mut(), inner => inner.poll(cx))
+ }
+}
+
+#[cfg(any(test, feature = "use_std"))]
+/// `Either<L, R>` implements `Read` if both `L` and `R` do.
+///
+/// Requires crate feature `"use_std"`
+impl<L, R> Read for Either<L, R>
+where
+ L: Read,
+ R: Read,
+{
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.read(buf))
+ }
+
+ fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
+ for_both!(*self, ref mut inner => inner.read_exact(buf))
+ }
+
+ fn read_to_end(&mut self, buf: &mut std::vec::Vec<u8>) ->
io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.read_to_end(buf))
+ }
+
+ fn read_to_string(&mut self, buf: &mut std::string::String) ->
io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.read_to_string(buf))
+ }
+}
+
+#[cfg(any(test, feature = "use_std"))]
+/// `Either<L, R>` implements `Seek` if both `L` and `R` do.
+///
+/// Requires crate feature `"use_std"`
+impl<L, R> Seek for Either<L, R>
+where
+ L: Seek,
+ R: Seek,
+{
+ fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
+ for_both!(*self, ref mut inner => inner.seek(pos))
+ }
+}
+
+#[cfg(any(test, feature = "use_std"))]
+/// Requires crate feature `"use_std"`
+impl<L, R> BufRead for Either<L, R>
+where
+ L: BufRead,
+ R: BufRead,
+{
+ fn fill_buf(&mut self) -> io::Result<&[u8]> {
+ for_both!(*self, ref mut inner => inner.fill_buf())
+ }
+
+ fn consume(&mut self, amt: usize) {
+ for_both!(*self, ref mut inner => inner.consume(amt))
+ }
+
+ fn read_until(&mut self, byte: u8, buf: &mut std::vec::Vec<u8>) ->
io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.read_until(byte, buf))
+ }
+
+ fn read_line(&mut self, buf: &mut std::string::String) ->
io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.read_line(buf))
+ }
+}
+
+#[cfg(any(test, feature = "use_std"))]
+/// `Either<L, R>` implements `Write` if both `L` and `R` do.
+///
+/// Requires crate feature `"use_std"`
+impl<L, R> Write for Either<L, R>
+where
+ L: Write,
+ R: Write,
+{
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ for_both!(*self, ref mut inner => inner.write(buf))
+ }
+
+ fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
+ for_both!(*self, ref mut inner => inner.write_all(buf))
+ }
+
+ fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
+ for_both!(*self, ref mut inner => inner.write_fmt(fmt))
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ for_both!(*self, ref mut inner => inner.flush())
+ }
+}
+
+impl<L, R, Target> AsRef<Target> for Either<L, R>
+where
+ L: AsRef<Target>,
+ R: AsRef<Target>,
+{
+ fn as_ref(&self) -> &Target {
+ for_both!(*self, ref inner => inner.as_ref())
+ }
+}
+
+macro_rules! impl_specific_ref_and_mut {
+ ($t:ty, $($attr:meta),* ) => {
+ $(#[$attr])*
+ impl<L, R> AsRef<$t> for Either<L, R>
+ where L: AsRef<$t>, R: AsRef<$t>
+ {
+ fn as_ref(&self) -> &$t {
+ for_both!(*self, ref inner => inner.as_ref())
+ }
+ }
+
+ $(#[$attr])*
+ impl<L, R> AsMut<$t> for Either<L, R>
+ where L: AsMut<$t>, R: AsMut<$t>
+ {
+ fn as_mut(&mut self) -> &mut $t {
+ for_both!(*self, ref mut inner => inner.as_mut())
+ }
+ }
+ };
+}
+
+impl_specific_ref_and_mut!(str,);
+impl_specific_ref_and_mut!(
+ ::std::path::Path,
+ cfg(feature = "use_std"),
+ doc = "Requires crate feature `use_std`."
+);
+impl_specific_ref_and_mut!(
+ ::std::ffi::OsStr,
+ cfg(feature = "use_std"),
+ doc = "Requires crate feature `use_std`."
+);
+impl_specific_ref_and_mut!(
+ ::std::ffi::CStr,
+ cfg(feature = "use_std"),
+ doc = "Requires crate feature `use_std`."
+);
+
+impl<L, R, Target> AsRef<[Target]> for Either<L, R>
+where
+ L: AsRef<[Target]>,
+ R: AsRef<[Target]>,
+{
+ fn as_ref(&self) -> &[Target] {
+ for_both!(*self, ref inner => inner.as_ref())
+ }
+}
+
+impl<L, R, Target> AsMut<Target> for Either<L, R>
+where
+ L: AsMut<Target>,
+ R: AsMut<Target>,
+{
+ fn as_mut(&mut self) -> &mut Target {
+ for_both!(*self, ref mut inner => inner.as_mut())
+ }
+}
+
+impl<L, R, Target> AsMut<[Target]> for Either<L, R>
+where
+ L: AsMut<[Target]>,
+ R: AsMut<[Target]>,
+{
+ fn as_mut(&mut self) -> &mut [Target] {
+ for_both!(*self, ref mut inner => inner.as_mut())
+ }
+}
+
+impl<L, R> Deref for Either<L, R>
+where
+ L: Deref,
+ R: Deref<Target = L::Target>,
+{
+ type Target = L::Target;
+
+ fn deref(&self) -> &Self::Target {
+ for_both!(*self, ref inner => &**inner)
+ }
+}
+
+impl<L, R> DerefMut for Either<L, R>
+where
+ L: DerefMut,
+ R: DerefMut<Target = L::Target>,
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ for_both!(*self, ref mut inner => &mut *inner)
+ }
+}
+
+#[cfg(any(test, feature = "use_std"))]
+/// `Either` implements `Error` if *both* `L` and `R` implement it.
+///
+/// Requires crate feature `"use_std"`
+impl<L, R> Error for Either<L, R>
+where
+ L: Error,
+ R: Error,
+{
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
+ for_both!(*self, ref inner => inner.source())
+ }
+
+ #[allow(deprecated)]
+ fn description(&self) -> &str {
+ for_both!(*self, ref inner => inner.description())
+ }
+
+ #[allow(deprecated)]
+ fn cause(&self) -> Option<&dyn Error> {
+ for_both!(*self, ref inner => inner.cause())
+ }
+}
+
+impl<L, R> fmt::Display for Either<L, R>
+where
+ L: fmt::Display,
+ R: fmt::Display,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for_both!(*self, ref inner => inner.fmt(f))
+ }
+}
+
+#[test]
+fn basic() {
+ let mut e = Left(2);
+ let r = Right(2);
+ assert_eq!(e, Left(2));
+ e = r;
+ assert_eq!(e, Right(2));
+ assert_eq!(e.left(), None);
+ assert_eq!(e.right(), Some(2));
+ assert_eq!(e.as_ref().right(), Some(&2));
+ assert_eq!(e.as_mut().right(), Some(&mut 2));
+}
+
+#[test]
+fn macros() {
+ use std::string::String;
+
+ fn a() -> Either<u32, u32> {
+ let x: u32 = try_left!(Right(1337u32));
+ Left(x * 2)
+ }
+ assert_eq!(a(), Right(1337));
+
+ fn b() -> Either<String, &'static str> {
+ Right(try_right!(Left("foo bar")))
+ }
+ assert_eq!(b(), Left(String::from("foo bar")));
+}
+
+#[test]
+fn deref() {
+ use std::string::String;
+
+ fn is_str(_: &str) {}
+ let value: Either<String, &str> = Left(String::from("test"));
+ is_str(&*value);
+}
+
+#[test]
+fn iter() {
+ let x = 3;
+ let mut iter = match x {
+ 3 => Left(0..10),
+ _ => Right(17..),
+ };
+
+ assert_eq!(iter.next(), Some(0));
+ assert_eq!(iter.count(), 9);
+}
+
+#[test]
+fn seek() {
+ use std::io;
+
+ let use_empty = false;
+ let mut mockdata = [0x00; 256];
+ for i in 0..256 {
+ mockdata[i] = i as u8;
+ }
+
+ let mut reader = if use_empty {
+ // Empty didn't impl Seek until Rust 1.51
+ Left(io::Cursor::new([]))
+ } else {
+ Right(io::Cursor::new(&mockdata[..]))
+ };
+
+ let mut buf = [0u8; 16];
+ assert_eq!(reader.read(&mut buf).unwrap(), buf.len());
+ assert_eq!(buf, mockdata[..buf.len()]);
+
+ // the first read should advance the cursor and return the next 16 bytes
thus the `ne`
+ assert_eq!(reader.read(&mut buf).unwrap(), buf.len());
+ assert_ne!(buf, mockdata[..buf.len()]);
+
+ // if the seek operation fails it should read 16..31 instead of 0..15
+ reader.seek(io::SeekFrom::Start(0)).unwrap();
+ assert_eq!(reader.read(&mut buf).unwrap(), buf.len());
+ assert_eq!(buf, mockdata[..buf.len()]);
+}
+
+#[test]
+fn read_write() {
+ use std::io;
+
+ let use_stdio = false;
+ let mockdata = [0xff; 256];
+
+ let mut reader = if use_stdio {
+ Left(io::stdin())
+ } else {
+ Right(&mockdata[..])
+ };
+
+ let mut buf = [0u8; 16];
+ assert_eq!(reader.read(&mut buf).unwrap(), buf.len());
+ assert_eq!(&buf, &mockdata[..buf.len()]);
+
+ let mut mockbuf = [0u8; 256];
+ let mut writer = if use_stdio {
+ Left(io::stdout())
+ } else {
+ Right(&mut mockbuf[..])
+ };
+
+ let buf = [1u8; 16];
+ assert_eq!(writer.write(&buf).unwrap(), buf.len());
+}
+
+#[test]
+fn error() {
+ let invalid_utf8 = b"\xff";
+ #[allow(invalid_from_utf8)]
+ let res = if let Err(error) = ::std::str::from_utf8(invalid_utf8) {
+ Err(Left(error))
+ } else if let Err(error) = "x".parse::<i32>() {
+ Err(Right(error))
+ } else {
+ Ok(())
+ };
+ assert!(res.is_err());
+ #[allow(deprecated)]
+ res.unwrap_err().description(); // make sure this can be called
+}
+
+/// A helper macro to check if AsRef and AsMut are implemented for a given
type.
+macro_rules! check_t {
+ ($t:ty) => {{
+ fn check_ref<T: AsRef<$t>>() {}
+ fn propagate_ref<T1: AsRef<$t>, T2: AsRef<$t>>() {
+ check_ref::<Either<T1, T2>>()
+ }
+ fn check_mut<T: AsMut<$t>>() {}
+ fn propagate_mut<T1: AsMut<$t>, T2: AsMut<$t>>() {
+ check_mut::<Either<T1, T2>>()
+ }
+ }};
+}
+
+// This "unused" method is here to ensure that compilation doesn't fail on
given types.
+fn _unsized_ref_propagation() {
+ check_t!(str);
+
+ fn check_array_ref<T: AsRef<[Item]>, Item>() {}
+ fn check_array_mut<T: AsMut<[Item]>, Item>() {}
+
+ fn propagate_array_ref<T1: AsRef<[Item]>, T2: AsRef<[Item]>, Item>() {
+ check_array_ref::<Either<T1, T2>, _>()
+ }
+
+ fn propagate_array_mut<T1: AsMut<[Item]>, T2: AsMut<[Item]>, Item>() {
+ check_array_mut::<Either<T1, T2>, _>()
+ }
+}
+
+// This "unused" method is here to ensure that compilation doesn't fail on
given types.
+#[cfg(feature = "use_std")]
+fn _unsized_std_propagation() {
+ check_t!(::std::path::Path);
+ check_t!(::std::ffi::OsStr);
+ check_t!(::std::ffi::CStr);
+}
diff --git a/rust/hw/char/pl011/vendor/either/src/serde_untagged.rs
b/rust/hw/char/pl011/vendor/either/src/serde_untagged.rs
new file mode 100644
index 0000000000..72078c3ec8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/src/serde_untagged.rs
@@ -0,0 +1,69 @@
+//! Untagged serialization/deserialization support for Either<L, R>.
+//!
+//! `Either` uses default, externally-tagged representation.
+//! However, sometimes it is useful to support several alternative types.
+//! For example, we may have a field which is generally Map<String, i32>
+//! but in typical cases Vec<String> would suffice, too.
+//!
+//! ```rust
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! use either::Either;
+//! use std::collections::HashMap;
+//!
+//! #[derive(serde::Serialize, serde::Deserialize, Debug)]
+//! #[serde(transparent)]
+//! struct IntOrString {
+//! #[serde(with = "either::serde_untagged")]
+//! inner: Either<Vec<String>, HashMap<String, i32>>
+//! };
+//!
+//! // serialization
+//! let data = IntOrString {
+//! inner: Either::Left(vec!["Hello".to_string()])
+//! };
+//! // notice: no tags are emitted.
+//! assert_eq!(serde_json::to_string(&data)?, r#"["Hello"]"#);
+//!
+//! // deserialization
+//! let data: IntOrString = serde_json::from_str(
+//! r#"{"a": 0, "b": 14}"#
+//! )?;
+//! println!("found {:?}", data);
+//! # Ok(())
+//! # }
+//! ```
+
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+
+#[derive(serde::Serialize, serde::Deserialize)]
+#[serde(untagged)]
+enum Either<L, R> {
+ Left(L),
+ Right(R),
+}
+
+pub fn serialize<L, R, S>(this: &super::Either<L, R>, serializer: S) ->
Result<S::Ok, S::Error>
+where
+ S: Serializer,
+ L: Serialize,
+ R: Serialize,
+{
+ let untagged = match this {
+ super::Either::Left(left) => Either::Left(left),
+ super::Either::Right(right) => Either::Right(right),
+ };
+ untagged.serialize(serializer)
+}
+
+pub fn deserialize<'de, L, R, D>(deserializer: D) -> Result<super::Either<L,
R>, D::Error>
+where
+ D: Deserializer<'de>,
+ L: Deserialize<'de>,
+ R: Deserialize<'de>,
+{
+ match Either::deserialize(deserializer) {
+ Ok(Either::Left(left)) => Ok(super::Either::Left(left)),
+ Ok(Either::Right(right)) => Ok(super::Either::Right(right)),
+ Err(error) => Err(error),
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/either/src/serde_untagged_optional.rs
b/rust/hw/char/pl011/vendor/either/src/serde_untagged_optional.rs
new file mode 100644
index 0000000000..fb3239ace1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/either/src/serde_untagged_optional.rs
@@ -0,0 +1,74 @@
+//! Untagged serialization/deserialization support for Option<Either<L, R>>.
+//!
+//! `Either` uses default, externally-tagged representation.
+//! However, sometimes it is useful to support several alternative types.
+//! For example, we may have a field which is generally Map<String, i32>
+//! but in typical cases Vec<String> would suffice, too.
+//!
+//! ```rust
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! use either::Either;
+//! use std::collections::HashMap;
+//!
+//! #[derive(serde::Serialize, serde::Deserialize, Debug)]
+//! #[serde(transparent)]
+//! struct IntOrString {
+//! #[serde(with = "either::serde_untagged_optional")]
+//! inner: Option<Either<Vec<String>, HashMap<String, i32>>>
+//! };
+//!
+//! // serialization
+//! let data = IntOrString {
+//! inner: Some(Either::Left(vec!["Hello".to_string()]))
+//! };
+//! // notice: no tags are emitted.
+//! assert_eq!(serde_json::to_string(&data)?, r#"["Hello"]"#);
+//!
+//! // deserialization
+//! let data: IntOrString = serde_json::from_str(
+//! r#"{"a": 0, "b": 14}"#
+//! )?;
+//! println!("found {:?}", data);
+//! # Ok(())
+//! # }
+//! ```
+
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+
+#[derive(Serialize, Deserialize)]
+#[serde(untagged)]
+enum Either<L, R> {
+ Left(L),
+ Right(R),
+}
+
+pub fn serialize<L, R, S>(
+ this: &Option<super::Either<L, R>>,
+ serializer: S,
+) -> Result<S::Ok, S::Error>
+where
+ S: Serializer,
+ L: Serialize,
+ R: Serialize,
+{
+ let untagged = match this {
+ Some(super::Either::Left(left)) => Some(Either::Left(left)),
+ Some(super::Either::Right(right)) => Some(Either::Right(right)),
+ None => None,
+ };
+ untagged.serialize(serializer)
+}
+
+pub fn deserialize<'de, L, R, D>(deserializer: D) ->
Result<Option<super::Either<L, R>>, D::Error>
+where
+ D: Deserializer<'de>,
+ L: Deserialize<'de>,
+ R: Deserialize<'de>,
+{
+ match Option::deserialize(deserializer) {
+ Ok(Some(Either::Left(left))) => Ok(Some(super::Either::Left(left))),
+ Ok(Some(Either::Right(right))) =>
Ok(Some(super::Either::Right(right))),
+ Ok(None) => Ok(None),
+ Err(error) => Err(error),
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/itertools/.cargo-checksum.json
new file mode 100644
index 0000000000..327f66ceb5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"9f94a3c5bdd8dd758864440205c84d73005b8619cd20833449db54f1f484c6bf","Cargo.lock":"b0443f54560491073ca861d8ed664a07a8039872568a527b2add8f362dd9734b","Cargo.toml":"e64e6e088ab537ba843f25a111af102dd434fd58cea3d446dff314cf42ad33e2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7576269ea71f767b99297934c0b2367532690f8c4badc695edf8e04ab6a1e545","README.md":"9de81a03c86ca4573d5d0a98eaa4d938bc6c538816f421d1b7499301efb5a454","benches/bench1.rs":"bb06f39db0544b1380cd4929139ccf521a9eecab7ca3f910b9499f965ec0a047","benches/combinations.rs":"51523ee1ca438a56f14711f0b04ee943895062d35859fbe23a2714d2fca3289d","benches/combinations_with_replacement.rs":"11f29160652a2d90ce7ca4b1c339c4457888ab6867e2456ce1c62e3adf9be737","benches/extra/mod.rs":"6ca290d72302a1945078621610b5788060b0de29639decebbdc557a80044aa97","benches/extra/zipslices.rs":"40e9f68a7c00f8429193fca463caef18851fa49b33355cc136bad3ccc840d655","benches/fold_specialization.rs":"5a517bbe29d366a15f6f751660e17ab1aa3e7b21552a1983048c662e34f0d69e","benches/powerset.rs":"6fd9d69a3483b37dc2411f99fb4efa6131577696f2dbdc8d1de9e4d7642fe3a3","benches/tree_fold1.rs":"539232e74f9aaea295a42069ac5af707811e90dc1c71c6e0a9064ffc731999de","benches/tuple_combinations.rs":"16366158743307a0289fc1df423a3cec45009807d410a9fe9922d5b6f8b7d002","benches/tuples.rs":"5a620783ae203e9ff9623d10d2c7fe9911d8b6c811cbad7613afa30e390c759d","examples/iris.data":"596ffd580471ca4d4880f8e439c7281f3b50d8249a5960353cb200b1490f63a0","examples/iris.rs":"1b465ed6a417180913104bc95a545fd9d1a3d67d121871ab737ad87e31b8be37","src/adaptors/coalesce.rs":"a0073325d40f297d29101538d18a267aef81889a999338dc09cb43a31cb4ec8b","src/adaptors/map.rs":"241971e856e468d71323071fb4a09867fbcedb83877320be132dc03516fe60e8","src/adaptors/mod.rs":"7f3bd7d011a348ce5e4bea486ef2e6346b64c7fe27540334d56d3f147f981d59","src/adaptors/multi_product.rs":"bb43e6dce68c815c21006d5b01c56e038d54b0c3bb8ee6bb8a4de11e2952c7ad","src/combinations.rs":"fb25babb459389093f886721016c72bf9f00e51d02735f638d871bb3a447ffd0","src/combinations_with_replacement.rs":"463011a574facbdd84278386b533a90e4dd517f0417e05adb82d182049db1f50","src/concat_impl.rs":"03b1ed61cbed242c286c3c4c5c848dbd57e02ab83fcef264f3a592b58107f324","src/cons_tuples_impl.rs":"c253d03b861831c01d62cacc57b49715ee62f6171e69f6886bb5a6ca0863bc3a","src/diff.rs":"a7800e9ce7a87b53ebe2338481335751fb43d44fa6a1ca719aceaaab40e5c8fe","src/duplicates_impl.rs":"f62fe4b642f501f785721ce5a505cf622a771e457210726dd0fb8b30be7ebbbc","src/either_or_both.rs":"c7ffe60772350c470fb42a5e4ff5087587985206733fe9814eeefa249983239a","src/exactly_one_err.rs":"aa50081f6a31b5109b30e3ed305e3ec2413c6908dedc8990ec5378a99cee2b39","src/extrema_set.rs":"2a25b0b86eed2fd5d05622d591a3085cab823973d450816c2c3b8cb76e9c187e","src/flatten_ok.rs":"fe209fd886ecd9cb98d99625aa0c7274af7e644eff4a10de15b4dec8bbbc934a","src/format.rs":"20fbbe35a98315ceb77ad910ff92319e163ae16452b0c24a8f1eccbc71c9e171","src/free.rs":"dfc57b7f56a08d4986a96b679018b41346576a7a34b668e008cc01109e728750","src/group_map.rs":"f7b02c964f63505d3e36280cfdc1755e05287714201efe983dacf702eee61434","src/groupbylazy.rs":"57ebf7d8a5a752045f94b76db8b80073f46964c28cc0919510fbdea102244918","src/grouping_map.rs":"cbc45ac563345c96f3ac50c78f73c83d870523436a7ab88c1c9a685d204461d3","src/impl_macros.rs":"4f829b458873bed556f1aff2ae4e88dbd576766e2b5bcc07ff3ac8756758e6f4","src/intersperse.rs":"b9717242495846a4a979c95d93d5681caccb7c07a0d889eab763ad3d49a46125","src/k_smallest.rs":"603eb34314c01769ff7f6def2a24cf7a7b38507e6f3658b7aafc23a3b2e9b322","src/kmerge_impl.rs":"a347b0f6fa7715afd8a54d85ce139ed5b14c9e58a16c2b3648f5b288fdb5375f","src/lazy_buffer.rs":"834f6ef7fdf9f00c8a6329beb38eaefb706847ceeec309c221dce705c2c1e05b","src/lib.rs":"703fa755955007c2ddf1c1abe6a20e9a762ba09746c4eeae905e6d417bf3bf31","src/merge_join.rs":"20574fbb0ca610a6ac0ad89fb7e856a629235a14f285954760386cd0de3dc687","src/minmax.rs":"96d3897c28c8c63284d4729becc9ada6855e0953cac6e1bd35cf6f38c50b0ec0","src/multipeek_impl.rs":"35162bca4456bfa20a08e8d40e4d1cc6783dc662778789fdcded60371e975122","src/pad_tail.rs":"04be2ca73abb85815b06b5524c99d6feb2919180c486a4646f9cc6c87462f67b","src/peek_nth.rs":"6a0a51f2f373ce14d3d58595c46464878a14976bf00841a7396c03f9f9ab07ac","src/peeking_take_while.rs":"2293eaba60142f427a8bd1fa6d347b21469cadaaef69a70f28daed3a4166c1b4","src/permutations.rs":"97831e7e26904c3cae68c97e74f7c6981ceb2fb2f2217282a0e5e54083a565fc","src/powerset.rs":"e0ee6b1316b4dd314c1e81502b90ae8113e1cda12168322520c5a65410e584b2","src/process_results_impl.rs":"fd51b2a4785c3b65145703dea4c088c822e5592de939cf228917c6275bee0778","src/put_back_n_impl.rs":"821e047fecd6ca0036290029f4febe7638a3abf1faa05e1e747a3bf9d80ff464","src/rciter_impl.rs":"5b156082ef2d25a94a4ad01d94cba2813c4b3e72e212515a8ad0fc8588f8045d","src/repeatn.rs":"bfc8f9145c9d8a3ea651f012b7d5a8d2fbbcbefdee76eafd098d02e7c54cda90","src/size_hint.rs":"021e57aad7df8f1e70ef588e9e9b8a1695aab183b1098f1848561f96c5dc9bcb","src/sources.rs":"61637f32c2cea2290ecfc1980c0b2d0f68463839ac09bd81006f8258ab8ecaae","src/take_while_inclusive.rs":"f567e91a7f25ed785c3132ff408e3f17b59dce98909041a8c40cd14c0f350f55","src/tee.rs":"665832aa547389a420c3441470ff2494249f0ed2841be0c6a578367fe9dbd381","src/tuple_impl.rs":"8d6c52850bf7f3b9d03fcbaed0e60e5a5becc2f8421ca4bc79e876659804a258","src/unique_impl.rs":"3b89cdd668b74cc0a0eabb1522489e2305a0d2d8da25d6a1884e8626bbdb5959","src/unziptuple.rs":"84b50e5d29b9ddbf21a46a1cc2fd7877729c7f7da9bdc8ae1966dbaf2d2f6f60","src/with_position.rs":"a3652e3e97de78c5c7eeb9a5306225b5ce517d6165b96663820b5f00fae1bff9","src/zip_eq_impl.rs":"4a41dc6dfe99359585d50ce648bdc85f15276c602048872b1d152e90841d8cad","src/zip_longest.rs":"f7cf5fffc3ca053ee80b410a05b27de1a475021f6de3181aea981010d7e8453f","src/ziptuple.rs":"7f9df12bf6556f382bbd4ad8cf17eb8b60c1c47fadbce016141133ba0f3384a1","tests/adaptors_no_collect.rs":"f459f36d54f5d475b2b2e83f5a1c98109c15062756ae822fa379486f3eeed666","tests/flatten_ok.rs":"b7894874132918b8229c7150b2637511d8e3e14197d8eeb9382d46b2a514efa2","tests/macros_hygiene.rs":"522afa0106e3f11a5149e9218f89c2329e405546d2ef0ea756d6a27e8a0e9ca3","tests/merge_join.rs":"b08c4ee6529d234c68d411a413b8781455d18a1eab17872d1828bb75a4fcf79b","tests/peeking_take_while.rs":"f834361c5520dda15eb9e9ebe87507c905462201412b21859d9f83dab91d0e0b","tests/quick.rs":"203619d7de9ae068a5c0c61c398f65f15a878b6ac759cc4575d19f0c90dfd9fa","tests/specializations.rs":"fdd16dc663330033fedcc478609b393d4aa369dc07dc8cda31a75219fb793087","tests/test_core.rs":"32576ba90aa8e5db985b6e6ffe30e3046bc6a11d392db8f6b4bdd2ba48d9b24d","tests/test_std.rs":"16a03cfe359a570685b48b80473d1947a89a49ec9ef744ea175252e2b95c0336","tests/tuples.rs":"014e4da776174bfe923270e2a359cd9c95b372fce4b952b8138909d6e2c52762","tests/zip.rs":"99af365fe6054ef1c6089d3e604e34da8fea66e55861ae4be9e7336ec8de4b56"},"package":"b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/itertools/CHANGELOG.md
b/rust/hw/char/pl011/vendor/itertools/CHANGELOG.md
new file mode 100644
index 0000000000..8d7404e759
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/CHANGELOG.md
@@ -0,0 +1,409 @@
+# Changelog
+
+## 0.11.0
+
+### Breaking
+- Make `Itertools::merge_join_by` also accept functions returning bool (#704)
+- Implement `PeekingNext` transitively over mutable references (#643)
+- Change `with_position` to yield `(Position, Item)` instead of
`Position<Item>` (#699)
+
+### Added
+- Add `Itertools::take_while_inclusive` (#616)
+- Implement `PeekingNext` for `PeekingTakeWhile` (#644)
+- Add `EitherOrBoth::{just_left, just_right, into_left, into_right, as_deref,
as_deref_mut, left_or_insert, right_or_insert, left_or_insert_with,
right_or_insert_with, insert_left, insert_right, insert_both}` (#629)
+- Implement `Clone` for `CircularTupleWindows` (#686)
+- Implement `Clone` for `Chunks` (#683)
+- Add `Itertools::process_results` (#680)
+
+### Changed
+- Use `Cell` instead of `RefCell` in `Format` and `FormatWith` (#608)
+- CI tweaks (#674, #675)
+- Document and test the difference between stable and unstable sorts (#653)
+- Fix documentation error on `Itertools::max_set_by_key` (#692)
+- Move MSRV metadata to `Cargo.toml` (#672)
+- Implement `equal` with `Iterator::eq` (#591)
+
+## 0.10.5
+ - Maintenance
+
+## 0.10.4
+ - Add `EitherOrBoth::or` and `EitherOrBoth::or_else` (#593)
+ - Add `min_set`, `max_set` et al. (#613, #323)
+ - Use `either/use_std` (#628)
+ - Documentation fixes (#612, #625, #632, #633, #634, #638)
+ - Code maintenance (#623, #624, #627, #630)
+
+## 0.10.3
+ - Maintenance
+
+## 0.10.2
+ - Add `Itertools::multiunzip` (#362, #565)
+ - Add `intersperse` and `intersperse_with` free functions (#555)
+ - Add `Itertools::sorted_by_cached_key` (#424, #575)
+ - Specialize `ProcessResults::fold` (#563)
+ - Fix subtraction overflow in `DuplicatesBy::size_hint` (#552)
+ - Fix specialization tests (#574)
+ - More `Debug` impls (#573)
+ - Deprecate `fold1` (use `reduce` instead) (#580)
+ - Documentation fixes (`HomogenousTuple`, `into_group_map`,
`into_group_map_by`, `MultiPeek::peek`) (#543 et al.)
+
+## 0.10.1
+ - Add `Itertools::contains` (#514)
+ - Add `Itertools::counts_by` (#515)
+ - Add `Itertools::partition_result` (#511)
+ - Add `Itertools::all_unique` (#241)
+ - Add `Itertools::duplicates` and `Itertools::duplicates_by` (#502)
+ - Add `chain!` (#525)
+ - Add `Itertools::at_most_one` (#523)
+ - Add `Itertools::flatten_ok` (#527)
+ - Add `EitherOrBoth::or_default` (#583)
+ - Add `Itertools::find_or_last` and `Itertools::find_or_first` (#535)
+ - Implement `FusedIterator` for `FilterOk`, `FilterMapOk`,
`InterleaveShortest`, `KMergeBy`, `MergeBy`, `PadUsing`, `Positions`, `Product`
, `RcIter`, `TupleWindows`, `Unique`, `UniqueBy`, `Update`, `WhileSome`,
`Combinations`, `CombinationsWithReplacement`, `Powerset`, `RepeatN`, and
`WithPosition` (#550)
+ - Implement `FusedIterator` for `Interleave`, `IntersperseWith`, and
`ZipLongest` (#548)
+
+## 0.10.0
+ - **Increase minimum supported Rust version to 1.32.0**
+ - Improve macro hygiene (#507)
+ - Add `Itertools::powerset` (#335)
+ - Add `Itertools::sorted_unstable`, `Itertools::sorted_unstable_by`, and
`Itertools::sorted_unstable_by_key` (#494)
+ - Implement `Error` for `ExactlyOneError` (#484)
+ - Undeprecate `Itertools::fold_while` (#476)
+ - Tuple-related adapters work for tuples of arity up to 12 (#475)
+ - `use_alloc` feature for users who have `alloc`, but not `std` (#474)
+ - Add `Itertools::k_smallest` (#473)
+ - Add `Itertools::into_grouping_map` and `GroupingMap` (#465)
+ - Add `Itertools::into_grouping_map_by` and `GroupingMapBy` (#465)
+ - Add `Itertools::counts` (#468)
+ - Add implementation of `DoubleEndedIterator` for `Unique` (#442)
+ - Add implementation of `DoubleEndedIterator` for `UniqueBy` (#442)
+ - Add implementation of `DoubleEndedIterator` for `Zip` (#346)
+ - Add `Itertools::multipeek` (#435)
+ - Add `Itertools::dedup_with_count` and `DedupWithCount` (#423)
+ - Add `Itertools::dedup_by_with_count` and `DedupByWithCount` (#423)
+ - Add `Itertools::intersperse_with` and `IntersperseWith` (#381)
+ - Add `Itertools::filter_ok` and `FilterOk` (#377)
+ - Add `Itertools::filter_map_ok` and `FilterMapOk` (#377)
+ - Deprecate `Itertools::fold_results`, use `Itertools::fold_ok` instead
(#377)
+ - Deprecate `Itertools::map_results`, use `Itertools::map_ok` instead (#377)
+ - Deprecate `FoldResults`, use `FoldOk` instead (#377)
+ - Deprecate `MapResults`, use `MapOk` instead (#377)
+ - Add `Itertools::circular_tuple_windows` and `CircularTupleWindows` (#350)
+ - Add `peek_nth` and `PeekNth` (#303)
+
+## 0.9.0
+ - Fix potential overflow in `MergeJoinBy::size_hint` (#385)
+ - Add `derive(Clone)` where possible (#382)
+ - Add `try_collect` method (#394)
+ - Add `HomogeneousTuple` trait (#389)
+ - Fix `combinations(0)` and `combinations_with_replacement(0)` (#383)
+ - Don't require `ParitalEq` to the `Item` of `DedupBy` (#397)
+ - Implement missing specializations on the `PutBack` adaptor and on the
`MergeJoinBy` iterator (#372)
+ - Add `position_*` methods (#412)
+ - Derive `Hash` for `EitherOrBoth` (#417)
+ - Increase minimum supported Rust version to 1.32.0
+
+## 0.8.2
+ - Use `slice::iter` instead of `into_iter` to avoid future breakage (#378,
by @LukasKalbertodt)
+## 0.8.1
+ - Added a
[`.exactly_one()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.exactly_one)
iterator method that, on success, extracts the single value of an iterator ;
by @Xaeroxe
+ - Added combinatory iterator adaptors:
+ -
[`.permutations(k)`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.permutations):
+
+ `[0, 1, 2].iter().permutations(2)` yields
+
+ ```rust
+ [
+ vec![0, 1],
+ vec![0, 2],
+ vec![1, 0],
+ vec![1, 2],
+ vec![2, 0],
+ vec![2, 1],
+ ]
+ ```
+
+ ; by @tobz1000
+
+ -
[`.combinations_with_replacement(k)`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.combinations_with_replacement):
+
+ `[0, 1, 2].iter().combinations_with_replacement(2)` yields
+
+ ```rust
+ [
+ vec![0, 0],
+ vec![0, 1],
+ vec![0, 2],
+ vec![1, 1],
+ vec![1, 2],
+ vec![2, 2],
+ ]
+ ```
+
+ ; by @tommilligan
+
+ - For reference, these methods join the already existing
[`.combinations(k)`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.combinations):
+
+ `[0, 1, 2].iter().combinations(2)` yields
+
+ ```rust
+ [
+ vec![0, 1],
+ vec![0, 2],
+ vec![1, 2],
+ ]
+ ```
+
+ - Improved the performance of
[`.fold()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.fold)-based
internal iteration for the
[`.intersperse()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.intersperse)
iterator ; by @jswrenn
+ - Added
[`.dedup_by()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.dedup_by),
[`.merge_by()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.merge_by)
and
[`.kmerge_by()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.kmerge_by)
adaptors that work like
[`.dedup()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.dedup),
[`.merge()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.merge)
and
[`.kmerge()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.kmerge),
but taking an additional custom comparison closure parameter. ; by @phimuemue
+ - Improved the performance of
[`.all_equal()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.all_equal)
; by @fyrchik
+ - Loosened the bounds on
[`.partition_map()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.partition_map)
to take just a `FnMut` closure rather than a `Fn` closure, and made its
implementation use internal iteration for better performance ; by
@danielhenrymantilla
+ - Added convenience methods to
[`EitherOrBoth`](https://docs.rs/itertools/0.8.1/itertools/enum.EitherOrBoth.html)
elements yielded from the
[`.zip_longest()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.zip_longest)
iterator adaptor ; by @Avi-D-coder
+ - Added
[`.sum1()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.sum1)
and
[`.product1()`](https://docs.rs/itertools/0.8.1/itertools/trait.Itertools.html#method.product1)
iterator methods that respectively try to return the sum and the product of
the elements of an iterator **when it is not empty**, otherwise they return
`None` ; by @Emerentius
+## 0.8.0
+ - Added new adaptor `.map_into()` for conversions using `Into` by @vorner
+ - Improved `Itertools` docs by @JohnHeitmann
+ - The return type of `.sorted_by_by_key()` is now an iterator, not a Vec.
+ - The return type of the `izip!(x, y)` macro with exactly two arguments is
now the usual `Iterator::zip`.
+ - Remove `.flatten()` in favour of std's `.flatten()`
+ - Deprecate `.foreach()` in favour of std's `.for_each()`
+ - Deprecate `.step()` in favour of std's `.step_by()`
+ - Deprecate `repeat_call` in favour of std's `repeat_with`
+ - Deprecate `.fold_while()` in favour of std's `.try_fold()`
+ - Require Rust 1.24 as minimal version.
+## 0.7.11
+ - Add convenience methods to `EitherOrBoth`, making it more similar to
`Option` and `Either` by @jethrogb
+## 0.7.10
+ - No changes.
+## 0.7.9
+ - New inclusion policy: See the readme about suggesting features for std
before accepting them in itertools.
+ - The `FoldWhile` type now implements `Eq` and `PartialEq` by @jturner314
+## 0.7.8
+ - Add new iterator method `.tree_fold1()` which is like `.fold1()` except
items are combined in a tree structure (see its docs). By @scottmcm
+ - Add more `Debug` impls by @phimuemue: KMerge, KMergeBy, MergeJoinBy,
ConsTuples, Intersperse, ProcessResults, RcIter, Tee, TupleWindows, Tee,
ZipLongest, ZipEq, Zip.
+## 0.7.7
+ - Add new iterator method `.into_group_map() -> HashMap<K, Vec<V>>` which
turns an iterator of `(K, V)` elements into such a hash table, where values are
grouped by key. By @tobz1000
+ - Add new free function `flatten` for the `.flatten()` adaptor. **NOTE:**
recent Rust nightlies have `Iterator::flatten` and thus a clash with our
flatten adaptor. One workaround is to use the itertools `flatten` free function.
+## 0.7.6
+ - Add new adaptor `.multi_cartesian_product()` which is an n-ary product
iterator by @tobz1000
+ - Add new method `.sorted_by_key()` by @Xion
+ - Provide simpler and faster `.count()` for `.unique()` and `.unique_by()`
+## 0.7.5
+ - `.multipeek()` now implements `PeekingNext`, by @nicopap.
+## 0.7.4
+ - Add new adaptor `.update()` by @lucasem; this adaptor is used to modify an
element before passing it on in an iterator chain.
+## 0.7.3
+ - Add new method `.collect_tuple()` by @matklad; it makes a tuple out of the
iterator's elements if the number of them matches **exactly**.
+ - Implement `fold` and `collect` for `.map_results()` which means it reuses
the code of the standard `.map()` for these methods.
+## 0.7.2
+ - Add new adaptor `.merge_join_by` by @srijs; a heterogeneous merge join for
two ordered sequences.
+## 0.7.1
+ - Iterator adaptors and iterators in itertools now use the same `must_use`
reminder that the standard library adaptors do, by @matematikaedit and @bluss
*“iterator adaptors are lazy and do nothing unless consumed”*.
+## 0.7.0
+ - Faster `izip!()` by @krdln
+ - `izip!()` is now a wrapper for repeated regular `.zip()` and a single
`.map()`. This means it optimizes as well as the standard library `.zip()` it
uses. **Note:** `multizip` and `izip!()` are now different! The former has a
named type but the latter optimizes better.
+ - Faster `.unique()`
+ - `no_std` support, which is opt-in!
+ - Many lovable features are still there without std, like `izip!()` or
`.format()` or `.merge()`, but not those that use collections.
+ - Trait bounds were required up front instead of just on the type:
`group_by`'s `PartialEq` by @Phlosioneer and `repeat_call`'s `FnMut`.
+ - Removed deprecated constructor `Zip::new` — use `izip!()` or `multizip()`
+## 0.6.5
+ - Fix bug in `.cartesian_product()`'s fold (which only was visible for
unfused iterators).
+## 0.6.4
+ - Add specific `fold` implementations for `.cartesian_product()` and
`cons_tuples()`, which improves their performance in fold, foreach, and
iterator consumers derived from them.
+## 0.6.3
+ - Add iterator adaptor `.positions(predicate)` by @tmccombs
+## 0.6.2
+ - Add function `process_results` which can “lift” a function of the regular
values of an iterator so that it can process the `Ok` values from an iterator
of `Results` instead, by @shepmaster
+ - Add iterator method `.concat()` which combines all iterator elements into
a single collection using the `Extend` trait, by @srijs
+## 0.6.1
+ - Better size hint testing and subsequent size hint bugfixes by @rkarp.
Fixes bugs in product, `interleave_shortest` size hints.
+ - New iterator method `.all_equal()` by @phimuemue
+## 0.6.0
+ - Deprecated names were removed in favour of their replacements
+ - `.flatten()` does not implement double ended iteration anymore
+ - `.fold_while()` uses `&mut self` and returns `FoldWhile<T>`, for
composability #168
+ - `.foreach()` and `.fold1()` use `self`, like `.fold()` does.
+ - `.combinations(0)` now produces a single empty vector. #174
+## 0.5.10
+ - Add itertools method `.kmerge_by()` (and corresponding free function)
+ - Relaxed trait requirement of `.kmerge()` and `.minmax()` to PartialOrd.
+## 0.5.9
+ - Add multipeek method `.reset_peek()`
+ - Add categories
+## 0.5.8
+ - Add iterator adaptor `.peeking_take_while()` and its trait `PeekingNext`.
+## 0.5.7
+ - Add iterator adaptor `.with_position()`
+ - Fix multipeek's performance for long peeks by using `VecDeque`.
+## 0.5.6
+ - Add `.map_results()`
+## 0.5.5
+ - Many more adaptors now implement `Debug`
+ - Add free function constructor `repeat_n`. `RepeatN::new` is now deprecated.
+## 0.5.4
+ - Add infinite generator function `iterate`, that takes a seed and a closure.
+## 0.5.3
+ - Special-cased `.fold()` for flatten and put back. `.foreach()` now uses
fold on the iterator, to pick up any iterator specific loop implementation.
+ - `.combinations(n)` asserts up front that `n != 0`, instead of running into
an error on the second iterator element.
+## 0.5.2
+ - Add `.tuples::<T>()` that iterates by two, three or four elements at a
time (where `T` is a tuple type).
+ - Add `.tuple_windows::<T>()` that iterates using a window of the two, three
or four most recent elements.
+ - Add `.next_tuple::<T>()` method, that picks the next two, three or four
elements in one go.
+ - `.interleave()` now has an accurate size hint.
+## 0.5.1
+ - Workaround module/function name clash that made racer crash on completing
itertools. Only internal changes needed.
+## 0.5.0
+ - [Release
announcement](https://bluss.github.io/rust/2016/09/26/itertools-0.5.0/)
+ - Renamed:
+ - `combinations` is now `tuple_combinations`
+ - `combinations_n` to `combinations`
+ - `group_by_lazy`, `chunks_lazy` to `group_by`, `chunks`
+ - `Unfold::new` to `unfold()`
+ - `RepeatCall::new` to `repeat_call()`
+ - `Zip::new` to `multizip`
+ - `PutBack::new`, `PutBackN::new` to `put_back`, `put_back_n`
+ - `PutBack::with_value` is now a builder setter, not a constructor
+ - `MultiPeek::new`, `.multipeek()` to `multipeek()`
+ - `format` to `format_with` and `format_default` to `format`
+ - `.into_rc()` to `rciter`
+ - `Partition` enum is now `Either`
+ - Module reorganization:
+ - All iterator structs are under `itertools::structs` but also reexported
to the top level, for backwards compatibility
+ - All free functions are reexported at the root, `itertools::free` will be
removed in the next version
+ - Removed:
+ - `ZipSlices`, use `.zip()` instead
+ - `.enumerate_from()`, `ZipTrusted`, due to being unstable
+ - `.mend_slices()`, moved to crate `odds`
+ - Stride, StrideMut, moved to crate `odds`
+ - `linspace()`, moved to crate `itertools-num`
+ - `.sort_by()`, use `.sorted_by()`
+ - `.is_empty_hint()`, use `.size_hint()`
+ - `.dropn()`, use `.dropping()`
+ - `.map_fn()`, use `.map()`
+ - `.slice()`, use `.take()` / `.skip()`
+ - helper traits in `misc`
+ - `new` constructors on iterator structs, use `Itertools` trait or free
functions instead
+ - `itertools::size_hint` is now private
+ - Behaviour changes:
+ - `format` and `format_with` helpers now panic if you try to format them
more than once.
+ - `repeat_call` is not double ended anymore
+ - New features:
+ - tuple flattening iterator is constructible with `cons_tuples`
+ - itertools reexports `Either` from the `either` crate. `Either<L, R>` is
an iterator when `L, R` are.
+ - `MinMaxResult` now implements `Copy` and `Clone`
+ - `tuple_combinations` supports 1-4 tuples of combinations (previously
just 2)
+## 0.4.19
+ - Add `.minmax_by()`
+ - Add `itertools::free::cloned`
+ - Add `itertools::free::rciter`
+ - Improve `.step(n)` slightly to take advantage of specialized Fuse better.
+## 0.4.18
+ - Only changes related to the "unstable" crate feature. This feature is more
or less deprecated.
+ - Use deprecated warnings when unstable is enabled. `.enumerate_from()`
will be removed imminently since it's using a deprecated libstd trait.
+## 0.4.17
+ - Fix bug in `.kmerge()` that caused it to often produce the wrong order #134
+## 0.4.16
+ - Improve precision of the `interleave_shortest` adaptor's size hint (it is
now computed exactly when possible).
+## 0.4.15
+ - Fixup on top of the workaround in 0.4.14. A function in `itertools::free`
was removed by mistake and now it is added back again.
+## 0.4.14
+ - Workaround an upstream regression in a Rust nightly build that broke
compilation of of `itertools::free::{interleave, merge}`
+## 0.4.13
+ - Add `.minmax()` and `.minmax_by_key()`, iterator methods for finding both
minimum and maximum in one scan.
+ - Add `.format_default()`, a simpler version of `.format()` (lazy formatting
for iterators).
+## 0.4.12
+ - Add `.zip_eq()`, an adaptor like `.zip()` except it ensures iterators of
inequal length don't pass silently (instead it panics).
+ - Add `.fold_while()`, an iterator method that is a fold that can
short-circuit.
+ - Add `.partition_map()`, an iterator method that can separate elements into
two collections.
+## 0.4.11
+ - Add `.get()` for `Stride{,Mut}` and `.get_mut()` for `StrideMut`
+## 0.4.10
+ - Improve performance of `.kmerge()`
+## 0.4.9
+ - Add k-ary merge adaptor `.kmerge()`
+ - Fix a bug in `.islice()` with ranges `a..b` where a `> b`.
+## 0.4.8
+ - Implement `Clone`, `Debug` for `Linspace`
+## 0.4.7
+ - Add function `diff_with()` that compares two iterators
+ - Add `.combinations_n()`, an n-ary combinations iterator
+ - Add methods `PutBack::with_value` and `PutBack::into_parts`.
+## 0.4.6
+ - Add method `.sorted()`
+ - Add module `itertools::free` with free function variants of common
iterator adaptors and methods. For example `enumerate(iterable)`,
`rev(iterable)`, and so on.
+## 0.4.5
+ - Add `.flatten()`
+## 0.4.4
+ - Allow composing `ZipSlices` with itself
+## 0.4.3
+ - Write `iproduct!()` as a single expression; this allows temporary values
in its arguments.
+## 0.4.2
+ - Add `.fold_options()`
+ - Require Rust 1.1 or later
+## 0.4.1
+ - Update `.dropping()` to take advantage of `.nth()`
+## 0.4.0
+ - `.merge()`, `.unique()` and `.dedup()` now perform better due to not using
function pointers
+ - Add free functions `enumerate()` and `rev()`
+ - Breaking changes:
+ - Return types of `.merge()` and `.merge_by()` renamed and changed
+ - Method `Merge::new` removed
+ - `.merge_by()` now takes a closure that returns bool.
+ - Return type of `.dedup()` changed
+ - Return type of `.mend_slices()` changed
+ - Return type of `.unique()` changed
+ - Removed function `times()`, struct `Times`: use a range instead
+ - Removed deprecated macro `icompr!()`
+ - Removed deprecated `FnMap` and method `.fn_map()`: use `.map_fn()`
+ - `.interleave_shortest()` is no longer guaranteed to act like fused
+## 0.3.25
+ - Rename `.sort_by()` to `.sorted_by()`. Old name is deprecated.
+ - Fix well-formedness warnings from RFC 1214, no user visible impact
+## 0.3.24
+ - Improve performance of `.merge()`'s ordering function slightly
+## 0.3.23
+ - Added `.chunks()`, similar to (and based on) `.group_by_lazy()`.
+ - Tweak linspace to match numpy.linspace and make it double ended.
+## 0.3.22
+ - Added `ZipSlices`, a fast zip for slices
+## 0.3.21
+ - Remove `Debug` impl for `Format`, it will have different use later
+## 0.3.20
+ - Optimize `.group_by_lazy()`
+## 0.3.19
+ - Added `.group_by_lazy()`, a possibly nonallocating group by
+ - Added `.format()`, a nonallocating formatting helper for iterators
+ - Remove uses of `RandomAccessIterator` since it has been deprecated in Rust.
+## 0.3.17
+ - Added (adopted) `Unfold` from Rust
+## 0.3.16
+ - Added adaptors `.unique()`, `.unique_by()`
+## 0.3.15
+ - Added method `.sort_by()`
+## 0.3.14
+ - Added adaptor `.while_some()`
+## 0.3.13
+ - Added adaptor `.interleave_shortest()`
+ - Added adaptor `.pad_using()`
+## 0.3.11
+ - Added `assert_equal` function
+## 0.3.10
+ - Bugfix `.combinations()` `size_hint`.
+## 0.3.8
+ - Added source `RepeatCall`
+## 0.3.7
+ - Added adaptor `PutBackN`
+ - Added adaptor `.combinations()`
+## 0.3.6
+ - Added `itertools::partition`, partition a sequence in place based on a
predicate.
+ - Deprecate `icompr!()` with no replacement.
+## 0.3.5
+ - `.map_fn()` replaces deprecated `.fn_map()`.
+## 0.3.4
+ - `.take_while_ref()` *by-ref adaptor*
+ - `.coalesce()` *adaptor*
+ - `.mend_slices()` *adaptor*
+## 0.3.3
+ - `.dropping_back()` *method*
+ - `.fold1()` *method*
+ - `.is_empty_hint()` *method*
diff --git a/rust/hw/char/pl011/vendor/itertools/Cargo.lock
b/rust/hw/char/pl011/vendor/itertools/Cargo.lock
new file mode 100644
index 0000000000..76936c9eea
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/Cargo.lock
@@ -0,0 +1,681 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anes"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bumpalo"
+version = "3.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+
+[[package]]
+name = "cast"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "ciborium"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f"
+dependencies = [
+ "ciborium-io",
+ "ciborium-ll",
+ "serde",
+]
+
+[[package]]
+name = "ciborium-io"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369"
+
+[[package]]
+name = "ciborium-ll"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b"
+dependencies = [
+ "ciborium-io",
+ "half",
+]
+
+[[package]]
+name = "clap"
+version = "3.2.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750"
+dependencies = [
+ "bitflags",
+ "clap_lex",
+ "indexmap",
+ "textwrap",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
+dependencies = [
+ "os_str_bytes",
+]
+
+[[package]]
+name = "criterion"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb"
+dependencies = [
+ "anes",
+ "atty",
+ "cast",
+ "ciborium",
+ "clap",
+ "criterion-plot",
+ "itertools 0.10.4",
+ "lazy_static",
+ "num-traits",
+ "oorandom",
+ "plotters",
+ "rayon",
+ "regex",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "tinytemplate",
+ "walkdir",
+]
+
+[[package]]
+name = "criterion-plot"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
+dependencies = [
+ "cast",
+ "itertools 0.10.4",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "once_cell",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
+[[package]]
+name = "either"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
+
+[[package]]
+name = "getrandom"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "half"
+version = "1.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8bf247779e67a9082a4790b45e71ac7cfd1321331a5c856a74a9faebdab78d0"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itertools"
+version = "0.11.0"
+dependencies = [
+ "criterion",
+ "either",
+ "paste",
+ "permutohedron",
+ "quickcheck",
+ "rand",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+
+[[package]]
+name = "js-sys"
+version = "0.3.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "os_str_bytes"
+version = "6.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff"
+
+[[package]]
+name = "paste"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
+
+[[package]]
+name = "permutohedron"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b687ff7b5da449d39e418ad391e5e08da53ec334903ddbb921db208908fc372c"
+
+[[package]]
+name = "plotters"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97"
+dependencies = [
+ "num-traits",
+ "plotters-backend",
+ "plotters-svg",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "plotters-backend"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142"
+
+[[package]]
+name = "plotters-svg"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f"
+dependencies = [
+ "plotters-backend",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quickcheck"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a44883e74aa97ad63db83c4bf8ca490f02b2fc02f92575e720c8551e843c945f"
+dependencies = [
+ "rand",
+ "rand_core",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+dependencies = [
+ "getrandom",
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "regex"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+
+[[package]]
+name = "ryu"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "serde"
+version = "1.0.144"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.144"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.85"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52205623b1b0f064a4e71182c3b18ae902267282930c6d5462c91b859668426e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16"
+
+[[package]]
+name = "tinytemplate"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
+
+[[package]]
+name = "web-sys"
+version = "0.3.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/rust/hw/char/pl011/vendor/itertools/Cargo.toml
b/rust/hw/char/pl011/vendor/itertools/Cargo.toml
new file mode 100644
index 0000000000..df3cbd8fd3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/Cargo.toml
@@ -0,0 +1,101 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.36.0"
+name = "itertools"
+version = "0.11.0"
+authors = ["bluss"]
+exclude = ["/bors.toml"]
+description = "Extra iterator adaptors, iterator methods, free functions, and
macros."
+documentation = "https://docs.rs/itertools/"
+readme = "README.md"
+keywords = [
+ "iterator",
+ "data-structure",
+ "zip",
+ "product",
+ "group-by",
+]
+categories = [
+ "algorithms",
+ "rust-patterns",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-itertools/itertools"
+
+[profile.bench]
+debug = 2
+
+[lib]
+test = false
+bench = false
+
+[[bench]]
+name = "tuple_combinations"
+harness = false
+
+[[bench]]
+name = "tuples"
+harness = false
+
+[[bench]]
+name = "fold_specialization"
+harness = false
+
+[[bench]]
+name = "combinations_with_replacement"
+harness = false
+
+[[bench]]
+name = "tree_fold1"
+harness = false
+
+[[bench]]
+name = "bench1"
+harness = false
+
+[[bench]]
+name = "combinations"
+harness = false
+
+[[bench]]
+name = "powerset"
+harness = false
+
+[dependencies.either]
+version = "1.0"
+default-features = false
+
+[dev-dependencies.criterion]
+version = "0.4.0"
+
+[dev-dependencies.paste]
+version = "1.0.0"
+
+[dev-dependencies.permutohedron]
+version = "0.2"
+
+[dev-dependencies.quickcheck]
+version = "0.9"
+default_features = false
+
+[dev-dependencies.rand]
+version = "0.7"
+
+[features]
+default = ["use_std"]
+use_alloc = []
+use_std = [
+ "use_alloc",
+ "either/use_std",
+]
diff --git a/rust/hw/char/pl011/vendor/itertools/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/itertools/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/rust/hw/char/pl011/vendor/itertools/LICENSE-MIT
b/rust/hw/char/pl011/vendor/itertools/LICENSE-MIT
new file mode 100644
index 0000000000..9203baa055
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2015
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/itertools/README.md
b/rust/hw/char/pl011/vendor/itertools/README.md
new file mode 100644
index 0000000000..626d10d0d0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/README.md
@@ -0,0 +1,44 @@
+# Itertools
+
+Extra iterator adaptors, functions and macros.
+
+Please read the [API documentation here](https://docs.rs/itertools/).
+
+[![build_status](https://github.com/rust-itertools/itertools/actions/workflows/ci.yml/badge.svg)](https://github.com/rust-itertools/itertools/actions)
+[![crates.io](https://img.shields.io/crates/v/itertools.svg)](https://crates.io/crates/itertools)
+
+How to use with Cargo:
+
+```toml
+[dependencies]
+itertools = "0.11.0"
+```
+
+How to use in your crate:
+
+```rust
+use itertools::Itertools;
+```
+
+## How to contribute
+
+- Fix a bug or implement a new thing
+- Include tests for your new feature, preferably a QuickCheck test
+- Make a Pull Request
+
+For new features, please first consider filing a PR to
[rust-lang/rust](https://github.com/rust-lang/rust),
+adding your new feature to the `Iterator` trait of the standard library, if
you believe it is reasonable.
+If it isn't accepted there, proposing it for inclusion in ``itertools`` is a
good idea.
+The reason for doing is this is so that we avoid future breakage as with
``.flatten()``.
+However, if your feature involves heap allocation, such as storing elements in
a ``Vec<T>``,
+then it can't be accepted into ``libcore``, and you should propose it for
``itertools`` directly instead.
+
+## License
+
+Dual-licensed to be compatible with the Rust project.
+
+Licensed under the Apache License, Version 2.0
+https://www.apache.org/licenses/LICENSE-2.0 or the MIT license
+https://opensource.org/licenses/MIT, at your
+option. This file may not be copied, modified, or distributed
+except according to those terms.
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/bench1.rs
b/rust/hw/char/pl011/vendor/itertools/benches/bench1.rs
new file mode 100644
index 0000000000..71278d17b6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/bench1.rs
@@ -0,0 +1,877 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+use itertools::free::cloned;
+use itertools::iproduct;
+
+use std::iter::repeat;
+use std::cmp;
+use std::ops::{Add, Range};
+
+mod extra;
+
+use crate::extra::ZipSlices;
+
+fn slice_iter(c: &mut Criterion) {
+ let xs: Vec<_> = repeat(1i32).take(20).collect();
+
+ c.bench_function("slice iter", move |b| {
+ b.iter(|| for elt in xs.iter() {
+ black_box(elt);
+ })
+ });
+}
+
+fn slice_iter_rev(c: &mut Criterion) {
+ let xs: Vec<_> = repeat(1i32).take(20).collect();
+
+ c.bench_function("slice iter rev", move |b| {
+ b.iter(|| for elt in xs.iter().rev() {
+ black_box(elt);
+ })
+ });
+}
+
+fn zip_default_zip(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zip default zip", move |b| {
+ b.iter(|| {
+ for (&x, &y) in xs.iter().zip(&ys) {
+ black_box(x);
+ black_box(y);
+ }
+ })
+ });
+}
+
+fn zipdot_i32_default_zip(c: &mut Criterion) {
+ let xs = vec![2; 1024];
+ let ys = vec![2; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot i32 default zip", move |b| {
+ b.iter(|| {
+ let mut s = 0;
+ for (&x, &y) in xs.iter().zip(&ys) {
+ s += x * y;
+ }
+ s
+ })
+ });
+}
+
+fn zipdot_f32_default_zip(c: &mut Criterion) {
+ let xs = vec![2f32; 1024];
+ let ys = vec![2f32; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot f32 default zip", move |b| {
+ b.iter(|| {
+ let mut s = 0.;
+ for (&x, &y) in xs.iter().zip(&ys) {
+ s += x * y;
+ }
+ s
+ })
+ });
+}
+
+fn zip_default_zip3(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let zs = vec![0; 766];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+ let zs = black_box(zs);
+
+ c.bench_function("zip default zip3", move |b| {
+ b.iter(|| {
+ for ((&x, &y), &z) in xs.iter().zip(&ys).zip(&zs) {
+ black_box(x);
+ black_box(y);
+ black_box(z);
+ }
+ })
+ });
+}
+
+fn zip_slices_ziptuple(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+
+ c.bench_function("zip slices ziptuple", move |b| {
+ b.iter(|| {
+ let xs = black_box(&xs);
+ let ys = black_box(&ys);
+ for (&x, &y) in itertools::multizip((xs, ys)) {
+ black_box(x);
+ black_box(y);
+ }
+ })
+ });
+}
+
+fn zipslices(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipslices", move |b| {
+ b.iter(|| {
+ for (&x, &y) in ZipSlices::new(&xs, &ys) {
+ black_box(x);
+ black_box(y);
+ }
+ })
+ });
+}
+
+fn zipslices_mut(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let xs = black_box(xs);
+ let mut ys = black_box(ys);
+
+ c.bench_function("zipslices mut", move |b| {
+ b.iter(|| {
+ for (&x, &mut y) in ZipSlices::from_slices(&xs[..], &mut ys[..]) {
+ black_box(x);
+ black_box(y);
+ }
+ })
+ });
+}
+
+fn zipdot_i32_zipslices(c: &mut Criterion) {
+ let xs = vec![2; 1024];
+ let ys = vec![2; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot i32 zipslices", move |b| {
+ b.iter(|| {
+ let mut s = 0i32;
+ for (&x, &y) in ZipSlices::new(&xs, &ys) {
+ s += x * y;
+ }
+ s
+ })
+ });
+}
+
+fn zipdot_f32_zipslices(c: &mut Criterion) {
+ let xs = vec![2f32; 1024];
+ let ys = vec![2f32; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot f32 zipslices", move |b| {
+ b.iter(|| {
+ let mut s = 0.;
+ for (&x, &y) in ZipSlices::new(&xs, &ys) {
+ s += x * y;
+ }
+ s
+ })
+ });
+}
+
+fn zip_checked_counted_loop(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zip checked counted loop", move |b| {
+ b.iter(|| {
+ // Must slice to equal lengths, and then bounds checks are
eliminated!
+ let len = cmp::min(xs.len(), ys.len());
+ let xs = &xs[..len];
+ let ys = &ys[..len];
+
+ for i in 0..len {
+ let x = xs[i];
+ let y = ys[i];
+ black_box(x);
+ black_box(y);
+ }
+ })
+ });
+}
+
+fn zipdot_i32_checked_counted_loop(c: &mut Criterion) {
+ let xs = vec![2; 1024];
+ let ys = vec![2; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot i32 checked counted loop", move |b| {
+ b.iter(|| {
+ // Must slice to equal lengths, and then bounds checks are
eliminated!
+ let len = cmp::min(xs.len(), ys.len());
+ let xs = &xs[..len];
+ let ys = &ys[..len];
+
+ let mut s = 0i32;
+
+ for i in 0..len {
+ s += xs[i] * ys[i];
+ }
+ s
+ })
+ });
+}
+
+fn zipdot_f32_checked_counted_loop(c: &mut Criterion) {
+ let xs = vec![2f32; 1024];
+ let ys = vec![2f32; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot f32 checked counted loop", move |b| {
+ b.iter(|| {
+ // Must slice to equal lengths, and then bounds checks are
eliminated!
+ let len = cmp::min(xs.len(), ys.len());
+ let xs = &xs[..len];
+ let ys = &ys[..len];
+
+ let mut s = 0.;
+
+ for i in 0..len {
+ s += xs[i] * ys[i];
+ }
+ s
+ })
+ });
+}
+
+fn zipdot_f32_checked_counted_unrolled_loop(c: &mut Criterion) {
+ let xs = vec![2f32; 1024];
+ let ys = vec![2f32; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot f32 checked counted unrolled loop", move |b| {
+ b.iter(|| {
+ // Must slice to equal lengths, and then bounds checks are
eliminated!
+ let len = cmp::min(xs.len(), ys.len());
+ let mut xs = &xs[..len];
+ let mut ys = &ys[..len];
+
+ let mut s = 0.;
+ let (mut p0, mut p1, mut p2, mut p3, mut p4, mut p5, mut p6, mut
p7) =
+ (0., 0., 0., 0., 0., 0., 0., 0.);
+
+ // how to unroll and have bounds checks eliminated (by cristicbz)
+ // split sum into eight parts to enable vectorization (by bluss)
+ while xs.len() >= 8 {
+ p0 += xs[0] * ys[0];
+ p1 += xs[1] * ys[1];
+ p2 += xs[2] * ys[2];
+ p3 += xs[3] * ys[3];
+ p4 += xs[4] * ys[4];
+ p5 += xs[5] * ys[5];
+ p6 += xs[6] * ys[6];
+ p7 += xs[7] * ys[7];
+
+ xs = &xs[8..];
+ ys = &ys[8..];
+ }
+ s += p0 + p4;
+ s += p1 + p5;
+ s += p2 + p6;
+ s += p3 + p7;
+
+ for i in 0..xs.len() {
+ s += xs[i] * ys[i];
+ }
+ s
+ })
+ });
+}
+
+fn zip_unchecked_counted_loop(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zip unchecked counted loop", move |b| {
+ b.iter(|| {
+ let len = cmp::min(xs.len(), ys.len());
+ for i in 0..len {
+ unsafe {
+ let x = *xs.get_unchecked(i);
+ let y = *ys.get_unchecked(i);
+ black_box(x);
+ black_box(y);
+ }
+ }
+ })
+ });
+}
+
+fn zipdot_i32_unchecked_counted_loop(c: &mut Criterion) {
+ let xs = vec![2; 1024];
+ let ys = vec![2; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot i32 unchecked counted loop", move |b| {
+ b.iter(|| {
+ let len = cmp::min(xs.len(), ys.len());
+ let mut s = 0i32;
+ for i in 0..len {
+ unsafe {
+ let x = *xs.get_unchecked(i);
+ let y = *ys.get_unchecked(i);
+ s += x * y;
+ }
+ }
+ s
+ })
+ });
+}
+
+fn zipdot_f32_unchecked_counted_loop(c: &mut Criterion) {
+ let xs = vec![2.; 1024];
+ let ys = vec![2.; 768];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+
+ c.bench_function("zipdot f32 unchecked counted loop", move |b| {
+ b.iter(|| {
+ let len = cmp::min(xs.len(), ys.len());
+ let mut s = 0f32;
+ for i in 0..len {
+ unsafe {
+ let x = *xs.get_unchecked(i);
+ let y = *ys.get_unchecked(i);
+ s += x * y;
+ }
+ }
+ s
+ })
+ });
+}
+
+fn zip_unchecked_counted_loop3(c: &mut Criterion) {
+ let xs = vec![0; 1024];
+ let ys = vec![0; 768];
+ let zs = vec![0; 766];
+ let xs = black_box(xs);
+ let ys = black_box(ys);
+ let zs = black_box(zs);
+
+ c.bench_function("zip unchecked counted loop3", move |b| {
+ b.iter(|| {
+ let len = cmp::min(xs.len(), cmp::min(ys.len(), zs.len()));
+ for i in 0..len {
+ unsafe {
+ let x = *xs.get_unchecked(i);
+ let y = *ys.get_unchecked(i);
+ let z = *zs.get_unchecked(i);
+ black_box(x);
+ black_box(y);
+ black_box(z);
+ }
+ }
+ })
+ });
+}
+
+fn group_by_lazy_1(c: &mut Criterion) {
+ let mut data = vec![0; 1024];
+ for (index, elt) in data.iter_mut().enumerate() {
+ *elt = index / 10;
+ }
+
+ let data = black_box(data);
+
+ c.bench_function("group by lazy 1", move |b| {
+ b.iter(|| {
+ for (_key, group) in &data.iter().group_by(|elt| **elt) {
+ for elt in group {
+ black_box(elt);
+ }
+ }
+ })
+ });
+}
+
+fn group_by_lazy_2(c: &mut Criterion) {
+ let mut data = vec![0; 1024];
+ for (index, elt) in data.iter_mut().enumerate() {
+ *elt = index / 2;
+ }
+
+ let data = black_box(data);
+
+ c.bench_function("group by lazy 2", move |b| {
+ b.iter(|| {
+ for (_key, group) in &data.iter().group_by(|elt| **elt) {
+ for elt in group {
+ black_box(elt);
+ }
+ }
+ })
+ });
+}
+
+fn slice_chunks(c: &mut Criterion) {
+ let data = vec![0; 1024];
+
+ let data = black_box(data);
+ let sz = black_box(10);
+
+ c.bench_function("slice chunks", move |b| {
+ b.iter(|| {
+ for group in data.chunks(sz) {
+ for elt in group {
+ black_box(elt);
+ }
+ }
+ })
+ });
+}
+
+fn chunks_lazy_1(c: &mut Criterion) {
+ let data = vec![0; 1024];
+
+ let data = black_box(data);
+ let sz = black_box(10);
+
+ c.bench_function("chunks lazy 1", move |b| {
+ b.iter(|| {
+ for group in &data.iter().chunks(sz) {
+ for elt in group {
+ black_box(elt);
+ }
+ }
+ })
+ });
+}
+
+fn equal(c: &mut Criterion) {
+ let data = vec![7; 1024];
+ let l = data.len();
+ let alpha = black_box(&data[1..]);
+ let beta = black_box(&data[..l - 1]);
+
+ c.bench_function("equal", move |b| {
+ b.iter(|| {
+ itertools::equal(alpha, beta)
+ })
+ });
+}
+
+fn merge_default(c: &mut Criterion) {
+ let mut data1 = vec![0; 1024];
+ let mut data2 = vec![0; 800];
+ let mut x = 0;
+ for (_, elt) in data1.iter_mut().enumerate() {
+ *elt = x;
+ x += 1;
+ }
+
+ let mut y = 0;
+ for (i, elt) in data2.iter_mut().enumerate() {
+ *elt += y;
+ if i % 3 == 0 {
+ y += 3;
+ } else {
+ y += 0;
+ }
+ }
+ let data1 = black_box(data1);
+ let data2 = black_box(data2);
+
+ c.bench_function("merge default", move |b| {
+ b.iter(|| {
+ data1.iter().merge(&data2).count()
+ })
+ });
+}
+
+fn merge_by_cmp(c: &mut Criterion) {
+ let mut data1 = vec![0; 1024];
+ let mut data2 = vec![0; 800];
+ let mut x = 0;
+ for (_, elt) in data1.iter_mut().enumerate() {
+ *elt = x;
+ x += 1;
+ }
+
+ let mut y = 0;
+ for (i, elt) in data2.iter_mut().enumerate() {
+ *elt += y;
+ if i % 3 == 0 {
+ y += 3;
+ } else {
+ y += 0;
+ }
+ }
+ let data1 = black_box(data1);
+ let data2 = black_box(data2);
+
+ c.bench_function("merge by cmp", move |b| {
+ b.iter(|| {
+ data1.iter().merge_by(&data2, PartialOrd::le).count()
+ })
+ });
+}
+
+fn merge_by_lt(c: &mut Criterion) {
+ let mut data1 = vec![0; 1024];
+ let mut data2 = vec![0; 800];
+ let mut x = 0;
+ for (_, elt) in data1.iter_mut().enumerate() {
+ *elt = x;
+ x += 1;
+ }
+
+ let mut y = 0;
+ for (i, elt) in data2.iter_mut().enumerate() {
+ *elt += y;
+ if i % 3 == 0 {
+ y += 3;
+ } else {
+ y += 0;
+ }
+ }
+ let data1 = black_box(data1);
+ let data2 = black_box(data2);
+
+ c.bench_function("merge by lt", move |b| {
+ b.iter(|| {
+ data1.iter().merge_by(&data2, |a, b| a <= b).count()
+ })
+ });
+}
+
+fn kmerge_default(c: &mut Criterion) {
+ let mut data1 = vec![0; 1024];
+ let mut data2 = vec![0; 800];
+ let mut x = 0;
+ for (_, elt) in data1.iter_mut().enumerate() {
+ *elt = x;
+ x += 1;
+ }
+
+ let mut y = 0;
+ for (i, elt) in data2.iter_mut().enumerate() {
+ *elt += y;
+ if i % 3 == 0 {
+ y += 3;
+ } else {
+ y += 0;
+ }
+ }
+ let data1 = black_box(data1);
+ let data2 = black_box(data2);
+ let its = &[data1.iter(), data2.iter()];
+
+ c.bench_function("kmerge default", move |b| {
+ b.iter(|| {
+ its.iter().cloned().kmerge().count()
+ })
+ });
+}
+
+fn kmerge_tenway(c: &mut Criterion) {
+ let mut data = vec![0; 10240];
+
+ let mut state = 1729u16;
+ fn rng(state: &mut u16) -> u16 {
+ let new = state.wrapping_mul(31421) + 6927;
+ *state = new;
+ new
+ }
+
+ for elt in &mut data {
+ *elt = rng(&mut state);
+ }
+
+ let mut chunks = Vec::new();
+ let mut rest = &mut data[..];
+ while rest.len() > 0 {
+ let chunk_len = 1 + rng(&mut state) % 512;
+ let chunk_len = cmp::min(rest.len(), chunk_len as usize);
+ let (fst, tail) = {rest}.split_at_mut(chunk_len);
+ fst.sort();
+ chunks.push(fst.iter().cloned());
+ rest = tail;
+ }
+
+ // println!("Chunk lengths: {}", chunks.iter().format_with(", ", |elt, f|
f(&elt.len())));
+
+ c.bench_function("kmerge tenway", move |b| {
+ b.iter(|| {
+ chunks.iter().cloned().kmerge().count()
+ })
+ });
+}
+
+fn fast_integer_sum<I>(iter: I) -> I::Item
+ where I: IntoIterator,
+ I::Item: Default + Add<Output=I::Item>
+{
+ iter.into_iter().fold(<_>::default(), |x, y| x + y)
+}
+
+fn step_vec_2(c: &mut Criterion) {
+ let v = vec![0; 1024];
+
+ c.bench_function("step vec 2", move |b| {
+ b.iter(|| {
+ fast_integer_sum(cloned(v.iter().step_by(2)))
+ })
+ });
+}
+
+fn step_vec_10(c: &mut Criterion) {
+ let v = vec![0; 1024];
+
+ c.bench_function("step vec 10", move |b| {
+ b.iter(|| {
+ fast_integer_sum(cloned(v.iter().step_by(10)))
+ })
+ });
+}
+
+fn step_range_2(c: &mut Criterion) {
+ let v = black_box(0..1024);
+
+ c.bench_function("step range 2", move |b| {
+ b.iter(|| {
+ fast_integer_sum(v.clone().step_by(2))
+ })
+ });
+}
+
+fn step_range_10(c: &mut Criterion) {
+ let v = black_box(0..1024);
+
+ c.bench_function("step range 10", move |b| {
+ b.iter(|| {
+ fast_integer_sum(v.clone().step_by(10))
+ })
+ });
+}
+
+fn cartesian_product_iterator(c: &mut Criterion) {
+ let xs = vec![0; 16];
+
+ c.bench_function("cartesian product iterator", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ for (&x, &y, &z) in iproduct!(&xs, &xs, &xs) {
+ sum += x;
+ sum += y;
+ sum += z;
+ }
+ sum
+ })
+ });
+}
+
+fn cartesian_product_fold(c: &mut Criterion) {
+ let xs = vec![0; 16];
+
+ c.bench_function("cartesian product fold", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ iproduct!(&xs, &xs, &xs).fold((), |(), (&x, &y, &z)| {
+ sum += x;
+ sum += y;
+ sum += z;
+ });
+ sum
+ })
+ });
+}
+
+fn multi_cartesian_product_iterator(c: &mut Criterion) {
+ let xs = [vec![0; 16], vec![0; 16], vec![0; 16]];
+
+ c.bench_function("multi cartesian product iterator", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ for x in xs.iter().multi_cartesian_product() {
+ sum += x[0];
+ sum += x[1];
+ sum += x[2];
+ }
+ sum
+ })
+ });
+}
+
+fn multi_cartesian_product_fold(c: &mut Criterion) {
+ let xs = [vec![0; 16], vec![0; 16], vec![0; 16]];
+
+ c.bench_function("multi cartesian product fold", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ xs.iter().multi_cartesian_product().fold((), |(), x| {
+ sum += x[0];
+ sum += x[1];
+ sum += x[2];
+ });
+ sum
+ })
+ });
+}
+
+fn cartesian_product_nested_for(c: &mut Criterion) {
+ let xs = vec![0; 16];
+
+ c.bench_function("cartesian product nested for", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ for &x in &xs {
+ for &y in &xs {
+ for &z in &xs {
+ sum += x;
+ sum += y;
+ sum += z;
+ }
+ }
+ }
+ sum
+ })
+ });
+}
+
+fn all_equal(c: &mut Criterion) {
+ let mut xs = vec![0; 5_000_000];
+ xs.extend(vec![1; 5_000_000]);
+
+ c.bench_function("all equal", move |b| {
+ b.iter(|| xs.iter().all_equal())
+ });
+}
+
+fn all_equal_for(c: &mut Criterion) {
+ let mut xs = vec![0; 5_000_000];
+ xs.extend(vec![1; 5_000_000]);
+
+ c.bench_function("all equal for", move |b| {
+ b.iter(|| {
+ for &x in &xs {
+ if x != xs[0] {
+ return false;
+ }
+ }
+ true
+ })
+ });
+}
+
+fn all_equal_default(c: &mut Criterion) {
+ let mut xs = vec![0; 5_000_000];
+ xs.extend(vec![1; 5_000_000]);
+
+ c.bench_function("all equal default", move |b| {
+ b.iter(|| xs.iter().dedup().nth(1).is_none())
+ });
+}
+
+const PERM_COUNT: usize = 6;
+
+fn permutations_iter(c: &mut Criterion) {
+ struct NewIterator(Range<usize>);
+
+ impl Iterator for NewIterator {
+ type Item = usize;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+ }
+
+ c.bench_function("permutations iter", move |b| {
+ b.iter(|| {
+ for _ in NewIterator(0..PERM_COUNT).permutations(PERM_COUNT) {
+
+ }
+ })
+ });
+}
+
+fn permutations_range(c: &mut Criterion) {
+ c.bench_function("permutations range", move |b| {
+ b.iter(|| {
+ for _ in (0..PERM_COUNT).permutations(PERM_COUNT) {
+
+ }
+ })
+ });
+}
+
+fn permutations_slice(c: &mut Criterion) {
+ let v = (0..PERM_COUNT).collect_vec();
+
+ c.bench_function("permutations slice", move |b| {
+ b.iter(|| {
+ for _ in v.as_slice().iter().permutations(PERM_COUNT) {
+
+ }
+ })
+ });
+}
+
+criterion_group!(
+ benches,
+ slice_iter,
+ slice_iter_rev,
+ zip_default_zip,
+ zipdot_i32_default_zip,
+ zipdot_f32_default_zip,
+ zip_default_zip3,
+ zip_slices_ziptuple,
+ zipslices,
+ zipslices_mut,
+ zipdot_i32_zipslices,
+ zipdot_f32_zipslices,
+ zip_checked_counted_loop,
+ zipdot_i32_checked_counted_loop,
+ zipdot_f32_checked_counted_loop,
+ zipdot_f32_checked_counted_unrolled_loop,
+ zip_unchecked_counted_loop,
+ zipdot_i32_unchecked_counted_loop,
+ zipdot_f32_unchecked_counted_loop,
+ zip_unchecked_counted_loop3,
+ group_by_lazy_1,
+ group_by_lazy_2,
+ slice_chunks,
+ chunks_lazy_1,
+ equal,
+ merge_default,
+ merge_by_cmp,
+ merge_by_lt,
+ kmerge_default,
+ kmerge_tenway,
+ step_vec_2,
+ step_vec_10,
+ step_range_2,
+ step_range_10,
+ cartesian_product_iterator,
+ cartesian_product_fold,
+ multi_cartesian_product_iterator,
+ multi_cartesian_product_fold,
+ cartesian_product_nested_for,
+ all_equal,
+ all_equal_for,
+ all_equal_default,
+ permutations_iter,
+ permutations_range,
+ permutations_slice,
+);
+criterion_main!(benches);
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/combinations.rs
b/rust/hw/char/pl011/vendor/itertools/benches/combinations.rs
new file mode 100644
index 0000000000..e7433a4cb0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/combinations.rs
@@ -0,0 +1,125 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+// approximate 100_000 iterations for each combination
+const N1: usize = 100_000;
+const N2: usize = 448;
+const N3: usize = 86;
+const N4: usize = 41;
+const N14: usize = 21;
+
+fn comb_for1(c: &mut Criterion) {
+ c.bench_function("comb for1", move |b| {
+ b.iter(|| {
+ for i in 0..N1 {
+ black_box(vec![i]);
+ }
+ })
+ });
+}
+
+fn comb_for2(c: &mut Criterion) {
+ c.bench_function("comb for2", move |b| {
+ b.iter(|| {
+ for i in 0..N2 {
+ for j in (i + 1)..N2 {
+ black_box(vec![i, j]);
+ }
+ }
+ })
+ });
+}
+
+fn comb_for3(c: &mut Criterion) {
+ c.bench_function("comb for3", move |b| {
+ b.iter(|| {
+ for i in 0..N3 {
+ for j in (i + 1)..N3 {
+ for k in (j + 1)..N3 {
+ black_box(vec![i, j, k]);
+ }
+ }
+ }
+ })
+ });
+}
+
+fn comb_for4(c: &mut Criterion) {
+ c.bench_function("comb for4", move |b| {
+ b.iter(|| {
+ for i in 0..N4 {
+ for j in (i + 1)..N4 {
+ for k in (j + 1)..N4 {
+ for l in (k + 1)..N4 {
+ black_box(vec![i, j, k, l]);
+ }
+ }
+ }
+ }
+ })
+ });
+}
+
+fn comb_c1(c: &mut Criterion) {
+ c.bench_function("comb c1", move |b| {
+ b.iter(|| {
+ for combo in (0..N1).combinations(1) {
+ black_box(combo);
+ }
+ })
+ });
+}
+
+fn comb_c2(c: &mut Criterion) {
+ c.bench_function("comb c2", move |b| {
+ b.iter(|| {
+ for combo in (0..N2).combinations(2) {
+ black_box(combo);
+ }
+ })
+ });
+}
+
+fn comb_c3(c: &mut Criterion) {
+ c.bench_function("comb c3", move |b| {
+ b.iter(|| {
+ for combo in (0..N3).combinations(3) {
+ black_box(combo);
+ }
+ })
+ });
+}
+
+fn comb_c4(c: &mut Criterion) {
+ c.bench_function("comb c4", move |b| {
+ b.iter(|| {
+ for combo in (0..N4).combinations(4) {
+ black_box(combo);
+ }
+ })
+ });
+}
+
+fn comb_c14(c: &mut Criterion) {
+ c.bench_function("comb c14", move |b| {
+ b.iter(|| {
+ for combo in (0..N14).combinations(14) {
+ black_box(combo);
+ }
+ })
+ });
+}
+
+criterion_group!(
+ benches,
+ comb_for1,
+ comb_for2,
+ comb_for3,
+ comb_for4,
+ comb_c1,
+ comb_c2,
+ comb_c3,
+ comb_c4,
+ comb_c14,
+);
+criterion_main!(benches);
diff --git
a/rust/hw/char/pl011/vendor/itertools/benches/combinations_with_replacement.rs
b/rust/hw/char/pl011/vendor/itertools/benches/combinations_with_replacement.rs
new file mode 100644
index 0000000000..8e4fa3dc3b
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/itertools/benches/combinations_with_replacement.rs
@@ -0,0 +1,40 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+fn comb_replacement_n10_k5(c: &mut Criterion) {
+ c.bench_function("comb replacement n10k5", move |b| {
+ b.iter(|| {
+ for i in (0..10).combinations_with_replacement(5) {
+ black_box(i);
+ }
+ })
+ });
+}
+
+fn comb_replacement_n5_k10(c: &mut Criterion) {
+ c.bench_function("comb replacement n5 k10", move |b| {
+ b.iter(|| {
+ for i in (0..5).combinations_with_replacement(10) {
+ black_box(i);
+ }
+ })
+ });
+}
+
+fn comb_replacement_n10_k10(c: &mut Criterion) {
+ c.bench_function("comb replacement n10 k10", move |b| {
+ b.iter(|| {
+ for i in (0..10).combinations_with_replacement(10) {
+ black_box(i);
+ }
+ })
+ });
+}
+
+criterion_group!(
+ benches,
+ comb_replacement_n10_k5,
+ comb_replacement_n5_k10,
+ comb_replacement_n10_k10,
+);
+criterion_main!(benches);
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/extra/mod.rs
b/rust/hw/char/pl011/vendor/itertools/benches/extra/mod.rs
new file mode 100644
index 0000000000..52fe5cc3fe
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/extra/mod.rs
@@ -0,0 +1,2 @@
+pub use self::zipslices::ZipSlices;
+mod zipslices;
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/extra/zipslices.rs
b/rust/hw/char/pl011/vendor/itertools/benches/extra/zipslices.rs
new file mode 100644
index 0000000000..633be59068
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/extra/zipslices.rs
@@ -0,0 +1,188 @@
+use std::cmp;
+
+// Note: There are different ways to implement ZipSlices.
+// This version performed the best in benchmarks.
+//
+// I also implemented a version with three pointers (tptr, tend, uptr),
+// that mimiced slice::Iter and only checked bounds by using tptr == tend,
+// but that was inferior to this solution.
+
+/// An iterator which iterates two slices simultaneously.
+///
+/// `ZipSlices` acts like a double-ended `.zip()` iterator.
+///
+/// It was intended to be more efficient than `.zip()`, and it was, then
+/// rustc changed how it optimizes so it can not promise improved performance
+/// at this time.
+///
+/// Note that elements past the end of the shortest of the two slices are
ignored.
+///
+/// Iterator element type for `ZipSlices<T, U>` is `(T::Item, U::Item)`. For
example,
+/// for a `ZipSlices<&'a [A], &'b mut [B]>`, the element type is `(&'a A, &'b
mut B)`.
+#[derive(Clone)]
+pub struct ZipSlices<T, U> {
+ t: T,
+ u: U,
+ len: usize,
+ index: usize,
+}
+
+impl<'a, 'b, A, B> ZipSlices<&'a [A], &'b [B]> {
+ /// Create a new `ZipSlices` from slices `a` and `b`.
+ ///
+ /// Act like a double-ended `.zip()` iterator, but more efficiently.
+ ///
+ /// Note that elements past the end of the shortest of the two slices are
ignored.
+ #[inline(always)]
+ pub fn new(a: &'a [A], b: &'b [B]) -> Self {
+ let minl = cmp::min(a.len(), b.len());
+ ZipSlices {
+ t: a,
+ u: b,
+ len: minl,
+ index: 0,
+ }
+ }
+}
+
+impl<T, U> ZipSlices<T, U>
+ where T: Slice,
+ U: Slice
+{
+ /// Create a new `ZipSlices` from slices `a` and `b`.
+ ///
+ /// Act like a double-ended `.zip()` iterator, but more efficiently.
+ ///
+ /// Note that elements past the end of the shortest of the two slices are
ignored.
+ #[inline(always)]
+ pub fn from_slices(a: T, b: U) -> Self {
+ let minl = cmp::min(a.len(), b.len());
+ ZipSlices {
+ t: a,
+ u: b,
+ len: minl,
+ index: 0,
+ }
+ }
+}
+
+impl<T, U> Iterator for ZipSlices<T, U>
+ where T: Slice,
+ U: Slice
+{
+ type Item = (T::Item, U::Item);
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<Self::Item> {
+ unsafe {
+ if self.index >= self.len {
+ None
+ } else {
+ let i = self.index;
+ self.index += 1;
+ Some((
+ self.t.get_unchecked(i),
+ self.u.get_unchecked(i)))
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.len - self.index;
+ (len, Some(len))
+ }
+}
+
+impl<T, U> DoubleEndedIterator for ZipSlices<T, U>
+ where T: Slice,
+ U: Slice
+{
+ #[inline(always)]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ unsafe {
+ if self.index >= self.len {
+ None
+ } else {
+ self.len -= 1;
+ let i = self.len;
+ Some((
+ self.t.get_unchecked(i),
+ self.u.get_unchecked(i)))
+ }
+ }
+ }
+}
+
+impl<T, U> ExactSizeIterator for ZipSlices<T, U>
+ where T: Slice,
+ U: Slice
+{}
+
+unsafe impl<T, U> Slice for ZipSlices<T, U>
+ where T: Slice,
+ U: Slice
+{
+ type Item = (T::Item, U::Item);
+
+ fn len(&self) -> usize {
+ self.len - self.index
+ }
+
+ unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item {
+ (self.t.get_unchecked(i),
+ self.u.get_unchecked(i))
+ }
+}
+
+/// A helper trait to let `ZipSlices` accept both `&[T]` and `&mut [T]`.
+///
+/// Unsafe trait because:
+///
+/// - Implementors must guarantee that `get_unchecked` is valid for all
indices `0..len()`.
+pub unsafe trait Slice {
+ /// The type of a reference to the slice's elements
+ type Item;
+ #[doc(hidden)]
+ fn len(&self) -> usize;
+ #[doc(hidden)]
+ unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item;
+}
+
+unsafe impl<'a, T> Slice for &'a [T] {
+ type Item = &'a T;
+ #[inline(always)]
+ fn len(&self) -> usize { (**self).len() }
+ #[inline(always)]
+ unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
+ debug_assert!(i < self.len());
+ (**self).get_unchecked(i)
+ }
+}
+
+unsafe impl<'a, T> Slice for &'a mut [T] {
+ type Item = &'a mut T;
+ #[inline(always)]
+ fn len(&self) -> usize { (**self).len() }
+ #[inline(always)]
+ unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
+ debug_assert!(i < self.len());
+ // override the lifetime constraints of &mut &'a mut [T]
+ (*(*self as *mut [T])).get_unchecked_mut(i)
+ }
+}
+
+#[test]
+fn zipslices() {
+
+ let xs = [1, 2, 3, 4, 5, 6];
+ let ys = [1, 2, 3, 7];
+ ::itertools::assert_equal(ZipSlices::new(&xs, &ys), xs.iter().zip(&ys));
+
+ let xs = [1, 2, 3, 4, 5, 6];
+ let mut ys = [0; 6];
+ for (x, y) in ZipSlices::from_slices(&xs[..], &mut ys[..]) {
+ *y = *x;
+ }
+ ::itertools::assert_equal(&xs, &ys);
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/fold_specialization.rs
b/rust/hw/char/pl011/vendor/itertools/benches/fold_specialization.rs
new file mode 100644
index 0000000000..5de4671e98
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/fold_specialization.rs
@@ -0,0 +1,73 @@
+use criterion::{criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+struct Unspecialized<I>(I);
+
+impl<I> Iterator for Unspecialized<I>
+where I: Iterator
+{
+ type Item = I::Item;
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.0.size_hint()
+ }
+}
+
+mod specialization {
+ use super::*;
+
+ pub mod intersperse {
+ use super::*;
+
+ pub fn external(c: &mut Criterion)
+ {
+ let arr = [1; 1024];
+
+ c.bench_function("external", move |b| {
+ b.iter(|| {
+ let mut sum = 0;
+ for &x in arr.iter().intersperse(&0) {
+ sum += x;
+ }
+ sum
+ })
+ });
+ }
+
+ pub fn internal_specialized(c: &mut Criterion)
+ {
+ let arr = [1; 1024];
+
+ c.bench_function("internal specialized", move |b| {
+ b.iter(|| {
+ arr.iter().intersperse(&0).fold(0, |acc, x| acc + x)
+ })
+ });
+ }
+
+ pub fn internal_unspecialized(c: &mut Criterion)
+ {
+ let arr = [1; 1024];
+
+ c.bench_function("internal unspecialized", move |b| {
+ b.iter(|| {
+ Unspecialized(arr.iter().intersperse(&0)).fold(0, |acc, x|
acc + x)
+ })
+ });
+ }
+ }
+}
+
+criterion_group!(
+ benches,
+ specialization::intersperse::external,
+ specialization::intersperse::internal_specialized,
+ specialization::intersperse::internal_unspecialized,
+);
+criterion_main!(benches);
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/powerset.rs
b/rust/hw/char/pl011/vendor/itertools/benches/powerset.rs
new file mode 100644
index 0000000000..074550bc44
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/powerset.rs
@@ -0,0 +1,44 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+// Keep aggregate generated elements the same, regardless of powerset length.
+const TOTAL_ELEMENTS: usize = 1 << 12;
+const fn calc_iters(n: usize) -> usize {
+ TOTAL_ELEMENTS / (1 << n)
+}
+
+fn powerset_n(c: &mut Criterion, n: usize) {
+ let id = format!("powerset {}", n);
+ c.bench_function(id.as_str(), move |b| {
+ b.iter(|| {
+ for _ in 0..calc_iters(n) {
+ for elt in (0..n).powerset() {
+ black_box(elt);
+ }
+ }
+ })
+ });
+}
+
+fn powerset_0(c: &mut Criterion) { powerset_n(c, 0); }
+
+fn powerset_1(c: &mut Criterion) { powerset_n(c, 1); }
+
+fn powerset_2(c: &mut Criterion) { powerset_n(c, 2); }
+
+fn powerset_4(c: &mut Criterion) { powerset_n(c, 4); }
+
+fn powerset_8(c: &mut Criterion) { powerset_n(c, 8); }
+
+fn powerset_12(c: &mut Criterion) { powerset_n(c, 12); }
+
+criterion_group!(
+ benches,
+ powerset_0,
+ powerset_1,
+ powerset_2,
+ powerset_4,
+ powerset_8,
+ powerset_12,
+);
+criterion_main!(benches);
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/tree_fold1.rs
b/rust/hw/char/pl011/vendor/itertools/benches/tree_fold1.rs
new file mode 100644
index 0000000000..f12995db8e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/tree_fold1.rs
@@ -0,0 +1,144 @@
+use criterion::{criterion_group, criterion_main, Criterion};
+use itertools::{Itertools, cloned};
+
+trait IterEx : Iterator {
+ // Another efficient implementation against which to compare,
+ // but needs `std` so is less desirable.
+ fn tree_fold1_vec<F>(self, mut f: F) -> Option<Self::Item>
+ where F: FnMut(Self::Item, Self::Item) -> Self::Item,
+ Self: Sized,
+ {
+ let hint = self.size_hint().0;
+ let cap = std::mem::size_of::<usize>() * 8 - hint.leading_zeros() as
usize;
+ let mut stack = Vec::with_capacity(cap);
+ self.enumerate().for_each(|(mut i, mut x)| {
+ while (i & 1) != 0 {
+ x = f(stack.pop().unwrap(), x);
+ i >>= 1;
+ }
+ stack.push(x);
+ });
+ stack.into_iter().fold1(f)
+ }
+}
+impl<T:Iterator> IterEx for T {}
+
+macro_rules! def_benchs {
+ ($N:expr,
+ $FUN:ident,
+ $BENCH_NAME:ident,
+ ) => (
+ mod $BENCH_NAME {
+ use super::*;
+
+ pub fn sum(c: &mut Criterion) {
+ let v: Vec<u32> = (0.. $N).collect();
+
+ c.bench_function(&(stringify!($BENCH_NAME).replace('_', " ") +
" sum"), move |b| {
+ b.iter(|| {
+ cloned(&v).$FUN(|x, y| x + y)
+ })
+ });
+ }
+
+ pub fn complex_iter(c: &mut Criterion) {
+ let u = (3..).take($N / 2);
+ let v = (5..).take($N / 2);
+ let it = u.chain(v);
+
+ c.bench_function(&(stringify!($BENCH_NAME).replace('_', " ") +
" complex iter"), move |b| {
+ b.iter(|| {
+ it.clone().map(|x| x as f32).$FUN(f32::atan2)
+ })
+ });
+ }
+
+ pub fn string_format(c: &mut Criterion) {
+ // This goes quadratic with linear `fold1`, so use a smaller
+ // size to not waste too much time in travis. The allocations
+ // in here are so expensive anyway that it'll still take
+ // way longer per iteration than the other two benchmarks.
+ let v: Vec<u32> = (0.. ($N/4)).collect();
+
+ c.bench_function(&(stringify!($BENCH_NAME).replace('_', " ") +
" string format"), move |b| {
+ b.iter(|| {
+ cloned(&v).map(|x| x.to_string()).$FUN(|x, y|
format!("{} + {}", x, y))
+ })
+ });
+ }
+ }
+
+ criterion_group!(
+ $BENCH_NAME,
+ $BENCH_NAME::sum,
+ $BENCH_NAME::complex_iter,
+ $BENCH_NAME::string_format,
+ );
+ )
+}
+
+def_benchs!{
+ 10_000,
+ fold1,
+ fold1_10k,
+}
+
+def_benchs!{
+ 10_000,
+ tree_fold1,
+ tree_fold1_stack_10k,
+}
+
+def_benchs!{
+ 10_000,
+ tree_fold1_vec,
+ tree_fold1_vec_10k,
+}
+
+def_benchs!{
+ 100,
+ fold1,
+ fold1_100,
+}
+
+def_benchs!{
+ 100,
+ tree_fold1,
+ tree_fold1_stack_100,
+}
+
+def_benchs!{
+ 100,
+ tree_fold1_vec,
+ tree_fold1_vec_100,
+}
+
+def_benchs!{
+ 8,
+ fold1,
+ fold1_08,
+}
+
+def_benchs!{
+ 8,
+ tree_fold1,
+ tree_fold1_stack_08,
+}
+
+def_benchs!{
+ 8,
+ tree_fold1_vec,
+ tree_fold1_vec_08,
+}
+
+criterion_main!(
+ fold1_10k,
+ tree_fold1_stack_10k,
+ tree_fold1_vec_10k,
+ fold1_100,
+ tree_fold1_stack_100,
+ tree_fold1_vec_100,
+ fold1_08,
+ tree_fold1_stack_08,
+ tree_fold1_vec_08,
+);
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/tuple_combinations.rs
b/rust/hw/char/pl011/vendor/itertools/benches/tuple_combinations.rs
new file mode 100644
index 0000000000..4e26b282e8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/tuple_combinations.rs
@@ -0,0 +1,113 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+// approximate 100_000 iterations for each combination
+const N1: usize = 100_000;
+const N2: usize = 448;
+const N3: usize = 86;
+const N4: usize = 41;
+
+fn tuple_comb_for1(c: &mut Criterion) {
+ c.bench_function("tuple comb for1", move |b| {
+ b.iter(|| {
+ for i in 0..N1 {
+ black_box(i);
+ }
+ })
+ });
+}
+
+fn tuple_comb_for2(c: &mut Criterion) {
+ c.bench_function("tuple comb for2", move |b| {
+ b.iter(|| {
+ for i in 0..N2 {
+ for j in (i + 1)..N2 {
+ black_box(i + j);
+ }
+ }
+ })
+ });
+}
+
+fn tuple_comb_for3(c: &mut Criterion) {
+ c.bench_function("tuple comb for3", move |b| {
+ b.iter(|| {
+ for i in 0..N3 {
+ for j in (i + 1)..N3 {
+ for k in (j + 1)..N3 {
+ black_box(i + j + k);
+ }
+ }
+ }
+ })
+ });
+}
+
+fn tuple_comb_for4(c: &mut Criterion) {
+ c.bench_function("tuple comb for4", move |b| {
+ b.iter(|| {
+ for i in 0..N4 {
+ for j in (i + 1)..N4 {
+ for k in (j + 1)..N4 {
+ for l in (k + 1)..N4 {
+ black_box(i + j + k + l);
+ }
+ }
+ }
+ }
+ })
+ });
+}
+
+fn tuple_comb_c1(c: &mut Criterion) {
+ c.bench_function("tuple comb c1", move |b| {
+ b.iter(|| {
+ for (i,) in (0..N1).tuple_combinations() {
+ black_box(i);
+ }
+ })
+ });
+}
+
+fn tuple_comb_c2(c: &mut Criterion) {
+ c.bench_function("tuple comb c2", move |b| {
+ b.iter(|| {
+ for (i, j) in (0..N2).tuple_combinations() {
+ black_box(i + j);
+ }
+ })
+ });
+}
+
+fn tuple_comb_c3(c: &mut Criterion) {
+ c.bench_function("tuple comb c3", move |b| {
+ b.iter(|| {
+ for (i, j, k) in (0..N3).tuple_combinations() {
+ black_box(i + j + k);
+ }
+ })
+ });
+}
+
+fn tuple_comb_c4(c: &mut Criterion) {
+ c.bench_function("tuple comb c4", move |b| {
+ b.iter(|| {
+ for (i, j, k, l) in (0..N4).tuple_combinations() {
+ black_box(i + j + k + l);
+ }
+ })
+ });
+}
+
+criterion_group!(
+ benches,
+ tuple_comb_for1,
+ tuple_comb_for2,
+ tuple_comb_for3,
+ tuple_comb_for4,
+ tuple_comb_c1,
+ tuple_comb_c2,
+ tuple_comb_c3,
+ tuple_comb_c4,
+);
+criterion_main!(benches);
diff --git a/rust/hw/char/pl011/vendor/itertools/benches/tuples.rs
b/rust/hw/char/pl011/vendor/itertools/benches/tuples.rs
new file mode 100644
index 0000000000..ea50aaaee1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/benches/tuples.rs
@@ -0,0 +1,213 @@
+use criterion::{criterion_group, criterion_main, Criterion};
+use itertools::Itertools;
+
+fn s1(a: u32) -> u32 {
+ a
+}
+
+fn s2(a: u32, b: u32) -> u32 {
+ a + b
+}
+
+fn s3(a: u32, b: u32, c: u32) -> u32 {
+ a + b + c
+}
+
+fn s4(a: u32, b: u32, c: u32, d: u32) -> u32 {
+ a + b + c + d
+}
+
+fn sum_s1(s: &[u32]) -> u32 {
+ s1(s[0])
+}
+
+fn sum_s2(s: &[u32]) -> u32 {
+ s2(s[0], s[1])
+}
+
+fn sum_s3(s: &[u32]) -> u32 {
+ s3(s[0], s[1], s[2])
+}
+
+fn sum_s4(s: &[u32]) -> u32 {
+ s4(s[0], s[1], s[2], s[3])
+}
+
+fn sum_t1(s: &(&u32, )) -> u32 {
+ s1(*s.0)
+}
+
+fn sum_t2(s: &(&u32, &u32)) -> u32 {
+ s2(*s.0, *s.1)
+}
+
+fn sum_t3(s: &(&u32, &u32, &u32)) -> u32 {
+ s3(*s.0, *s.1, *s.2)
+}
+
+fn sum_t4(s: &(&u32, &u32, &u32, &u32)) -> u32 {
+ s4(*s.0, *s.1, *s.2, *s.3)
+}
+
+macro_rules! def_benchs {
+ ($N:expr;
+ $BENCH_GROUP:ident,
+ $TUPLE_FUN:ident,
+ $TUPLES:ident,
+ $TUPLE_WINDOWS:ident;
+ $SLICE_FUN:ident,
+ $CHUNKS:ident,
+ $WINDOWS:ident;
+ $FOR_CHUNKS:ident,
+ $FOR_WINDOWS:ident
+ ) => (
+ fn $FOR_CHUNKS(c: &mut Criterion) {
+ let v: Vec<u32> = (0.. $N * 1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($FOR_CHUNKS).replace('_', " "), move
|b| {
+ b.iter(|| {
+ let mut j = 0;
+ for _ in 0..1_000 {
+ s += $SLICE_FUN(&v[j..(j + $N)]);
+ j += $N;
+ }
+ s
+ })
+ });
+ }
+
+ fn $FOR_WINDOWS(c: &mut Criterion) {
+ let v: Vec<u32> = (0..1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($FOR_WINDOWS).replace('_', " "), move
|b| {
+ b.iter(|| {
+ for i in 0..(1_000 - $N) {
+ s += $SLICE_FUN(&v[i..(i + $N)]);
+ }
+ s
+ })
+ });
+ }
+
+ fn $TUPLES(c: &mut Criterion) {
+ let v: Vec<u32> = (0.. $N * 1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($TUPLES).replace('_', " "), move |b| {
+ b.iter(|| {
+ for x in v.iter().tuples() {
+ s += $TUPLE_FUN(&x);
+ }
+ s
+ })
+ });
+ }
+
+ fn $CHUNKS(c: &mut Criterion) {
+ let v: Vec<u32> = (0.. $N * 1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($CHUNKS).replace('_', " "), move |b| {
+ b.iter(|| {
+ for x in v.chunks($N) {
+ s += $SLICE_FUN(x);
+ }
+ s
+ })
+ });
+ }
+
+ fn $TUPLE_WINDOWS(c: &mut Criterion) {
+ let v: Vec<u32> = (0..1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($TUPLE_WINDOWS).replace('_', " "),
move |b| {
+ b.iter(|| {
+ for x in v.iter().tuple_windows() {
+ s += $TUPLE_FUN(&x);
+ }
+ s
+ })
+ });
+ }
+
+ fn $WINDOWS(c: &mut Criterion) {
+ let v: Vec<u32> = (0..1_000).collect();
+ let mut s = 0;
+ c.bench_function(&stringify!($WINDOWS).replace('_', " "), move |b|
{
+ b.iter(|| {
+ for x in v.windows($N) {
+ s += $SLICE_FUN(x);
+ }
+ s
+ })
+ });
+ }
+
+ criterion_group!(
+ $BENCH_GROUP,
+ $FOR_CHUNKS,
+ $FOR_WINDOWS,
+ $TUPLES,
+ $CHUNKS,
+ $TUPLE_WINDOWS,
+ $WINDOWS,
+ );
+ )
+}
+
+def_benchs!{
+ 1;
+ benches_1,
+ sum_t1,
+ tuple_chunks_1,
+ tuple_windows_1;
+ sum_s1,
+ slice_chunks_1,
+ slice_windows_1;
+ for_chunks_1,
+ for_windows_1
+}
+
+def_benchs!{
+ 2;
+ benches_2,
+ sum_t2,
+ tuple_chunks_2,
+ tuple_windows_2;
+ sum_s2,
+ slice_chunks_2,
+ slice_windows_2;
+ for_chunks_2,
+ for_windows_2
+}
+
+def_benchs!{
+ 3;
+ benches_3,
+ sum_t3,
+ tuple_chunks_3,
+ tuple_windows_3;
+ sum_s3,
+ slice_chunks_3,
+ slice_windows_3;
+ for_chunks_3,
+ for_windows_3
+}
+
+def_benchs!{
+ 4;
+ benches_4,
+ sum_t4,
+ tuple_chunks_4,
+ tuple_windows_4;
+ sum_s4,
+ slice_chunks_4,
+ slice_windows_4;
+ for_chunks_4,
+ for_windows_4
+}
+
+criterion_main!(
+ benches_1,
+ benches_2,
+ benches_3,
+ benches_4,
+);
diff --git a/rust/hw/char/pl011/vendor/itertools/examples/iris.data
b/rust/hw/char/pl011/vendor/itertools/examples/iris.data
new file mode 100644
index 0000000000..a3490e0e07
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/examples/iris.data
@@ -0,0 +1,150 @@
+5.1,3.5,1.4,0.2,Iris-setosa
+4.9,3.0,1.4,0.2,Iris-setosa
+4.7,3.2,1.3,0.2,Iris-setosa
+4.6,3.1,1.5,0.2,Iris-setosa
+5.0,3.6,1.4,0.2,Iris-setosa
+5.4,3.9,1.7,0.4,Iris-setosa
+4.6,3.4,1.4,0.3,Iris-setosa
+5.0,3.4,1.5,0.2,Iris-setosa
+4.4,2.9,1.4,0.2,Iris-setosa
+4.9,3.1,1.5,0.1,Iris-setosa
+5.4,3.7,1.5,0.2,Iris-setosa
+4.8,3.4,1.6,0.2,Iris-setosa
+4.8,3.0,1.4,0.1,Iris-setosa
+4.3,3.0,1.1,0.1,Iris-setosa
+5.8,4.0,1.2,0.2,Iris-setosa
+5.7,4.4,1.5,0.4,Iris-setosa
+5.4,3.9,1.3,0.4,Iris-setosa
+5.1,3.5,1.4,0.3,Iris-setosa
+5.7,3.8,1.7,0.3,Iris-setosa
+5.1,3.8,1.5,0.3,Iris-setosa
+5.4,3.4,1.7,0.2,Iris-setosa
+5.1,3.7,1.5,0.4,Iris-setosa
+4.6,3.6,1.0,0.2,Iris-setosa
+5.1,3.3,1.7,0.5,Iris-setosa
+4.8,3.4,1.9,0.2,Iris-setosa
+5.0,3.0,1.6,0.2,Iris-setosa
+5.0,3.4,1.6,0.4,Iris-setosa
+5.2,3.5,1.5,0.2,Iris-setosa
+5.2,3.4,1.4,0.2,Iris-setosa
+4.7,3.2,1.6,0.2,Iris-setosa
+4.8,3.1,1.6,0.2,Iris-setosa
+5.4,3.4,1.5,0.4,Iris-setosa
+5.2,4.1,1.5,0.1,Iris-setosa
+5.5,4.2,1.4,0.2,Iris-setosa
+4.9,3.1,1.5,0.1,Iris-setosa
+5.0,3.2,1.2,0.2,Iris-setosa
+5.5,3.5,1.3,0.2,Iris-setosa
+4.9,3.1,1.5,0.1,Iris-setosa
+4.4,3.0,1.3,0.2,Iris-setosa
+5.1,3.4,1.5,0.2,Iris-setosa
+5.0,3.5,1.3,0.3,Iris-setosa
+4.5,2.3,1.3,0.3,Iris-setosa
+4.4,3.2,1.3,0.2,Iris-setosa
+5.0,3.5,1.6,0.6,Iris-setosa
+5.1,3.8,1.9,0.4,Iris-setosa
+4.8,3.0,1.4,0.3,Iris-setosa
+5.1,3.8,1.6,0.2,Iris-setosa
+4.6,3.2,1.4,0.2,Iris-setosa
+5.3,3.7,1.5,0.2,Iris-setosa
+5.0,3.3,1.4,0.2,Iris-setosa
+7.0,3.2,4.7,1.4,Iris-versicolor
+6.4,3.2,4.5,1.5,Iris-versicolor
+6.9,3.1,4.9,1.5,Iris-versicolor
+5.5,2.3,4.0,1.3,Iris-versicolor
+6.5,2.8,4.6,1.5,Iris-versicolor
+5.7,2.8,4.5,1.3,Iris-versicolor
+6.3,3.3,4.7,1.6,Iris-versicolor
+4.9,2.4,3.3,1.0,Iris-versicolor
+6.6,2.9,4.6,1.3,Iris-versicolor
+5.2,2.7,3.9,1.4,Iris-versicolor
+5.0,2.0,3.5,1.0,Iris-versicolor
+5.9,3.0,4.2,1.5,Iris-versicolor
+6.0,2.2,4.0,1.0,Iris-versicolor
+6.1,2.9,4.7,1.4,Iris-versicolor
+5.6,2.9,3.6,1.3,Iris-versicolor
+6.7,3.1,4.4,1.4,Iris-versicolor
+5.6,3.0,4.5,1.5,Iris-versicolor
+5.8,2.7,4.1,1.0,Iris-versicolor
+6.2,2.2,4.5,1.5,Iris-versicolor
+5.6,2.5,3.9,1.1,Iris-versicolor
+5.9,3.2,4.8,1.8,Iris-versicolor
+6.1,2.8,4.0,1.3,Iris-versicolor
+6.3,2.5,4.9,1.5,Iris-versicolor
+6.1,2.8,4.7,1.2,Iris-versicolor
+6.4,2.9,4.3,1.3,Iris-versicolor
+6.6,3.0,4.4,1.4,Iris-versicolor
+6.8,2.8,4.8,1.4,Iris-versicolor
+6.7,3.0,5.0,1.7,Iris-versicolor
+6.0,2.9,4.5,1.5,Iris-versicolor
+5.7,2.6,3.5,1.0,Iris-versicolor
+5.5,2.4,3.8,1.1,Iris-versicolor
+5.5,2.4,3.7,1.0,Iris-versicolor
+5.8,2.7,3.9,1.2,Iris-versicolor
+6.0,2.7,5.1,1.6,Iris-versicolor
+5.4,3.0,4.5,1.5,Iris-versicolor
+6.0,3.4,4.5,1.6,Iris-versicolor
+6.7,3.1,4.7,1.5,Iris-versicolor
+6.3,2.3,4.4,1.3,Iris-versicolor
+5.6,3.0,4.1,1.3,Iris-versicolor
+5.5,2.5,4.0,1.3,Iris-versicolor
+5.5,2.6,4.4,1.2,Iris-versicolor
+6.1,3.0,4.6,1.4,Iris-versicolor
+5.8,2.6,4.0,1.2,Iris-versicolor
+5.0,2.3,3.3,1.0,Iris-versicolor
+5.6,2.7,4.2,1.3,Iris-versicolor
+5.7,3.0,4.2,1.2,Iris-versicolor
+5.7,2.9,4.2,1.3,Iris-versicolor
+6.2,2.9,4.3,1.3,Iris-versicolor
+5.1,2.5,3.0,1.1,Iris-versicolor
+5.7,2.8,4.1,1.3,Iris-versicolor
+6.3,3.3,6.0,2.5,Iris-virginica
+5.8,2.7,5.1,1.9,Iris-virginica
+7.1,3.0,5.9,2.1,Iris-virginica
+6.3,2.9,5.6,1.8,Iris-virginica
+6.5,3.0,5.8,2.2,Iris-virginica
+7.6,3.0,6.6,2.1,Iris-virginica
+4.9,2.5,4.5,1.7,Iris-virginica
+7.3,2.9,6.3,1.8,Iris-virginica
+6.7,2.5,5.8,1.8,Iris-virginica
+7.2,3.6,6.1,2.5,Iris-virginica
+6.5,3.2,5.1,2.0,Iris-virginica
+6.4,2.7,5.3,1.9,Iris-virginica
+6.8,3.0,5.5,2.1,Iris-virginica
+5.7,2.5,5.0,2.0,Iris-virginica
+5.8,2.8,5.1,2.4,Iris-virginica
+6.4,3.2,5.3,2.3,Iris-virginica
+6.5,3.0,5.5,1.8,Iris-virginica
+7.7,3.8,6.7,2.2,Iris-virginica
+7.7,2.6,6.9,2.3,Iris-virginica
+6.0,2.2,5.0,1.5,Iris-virginica
+6.9,3.2,5.7,2.3,Iris-virginica
+5.6,2.8,4.9,2.0,Iris-virginica
+7.7,2.8,6.7,2.0,Iris-virginica
+6.3,2.7,4.9,1.8,Iris-virginica
+6.7,3.3,5.7,2.1,Iris-virginica
+7.2,3.2,6.0,1.8,Iris-virginica
+6.2,2.8,4.8,1.8,Iris-virginica
+6.1,3.0,4.9,1.8,Iris-virginica
+6.4,2.8,5.6,2.1,Iris-virginica
+7.2,3.0,5.8,1.6,Iris-virginica
+7.4,2.8,6.1,1.9,Iris-virginica
+7.9,3.8,6.4,2.0,Iris-virginica
+6.4,2.8,5.6,2.2,Iris-virginica
+6.3,2.8,5.1,1.5,Iris-virginica
+6.1,2.6,5.6,1.4,Iris-virginica
+7.7,3.0,6.1,2.3,Iris-virginica
+6.3,3.4,5.6,2.4,Iris-virginica
+6.4,3.1,5.5,1.8,Iris-virginica
+6.0,3.0,4.8,1.8,Iris-virginica
+6.9,3.1,5.4,2.1,Iris-virginica
+6.7,3.1,5.6,2.4,Iris-virginica
+6.9,3.1,5.1,2.3,Iris-virginica
+5.8,2.7,5.1,1.9,Iris-virginica
+6.8,3.2,5.9,2.3,Iris-virginica
+6.7,3.3,5.7,2.5,Iris-virginica
+6.7,3.0,5.2,2.3,Iris-virginica
+6.3,2.5,5.0,1.9,Iris-virginica
+6.5,3.0,5.2,2.0,Iris-virginica
+6.2,3.4,5.4,2.3,Iris-virginica
+5.9,3.0,5.1,1.8,Iris-virginica
diff --git a/rust/hw/char/pl011/vendor/itertools/examples/iris.rs
b/rust/hw/char/pl011/vendor/itertools/examples/iris.rs
new file mode 100644
index 0000000000..987d9e9cba
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/examples/iris.rs
@@ -0,0 +1,137 @@
+///
+/// This example parses, sorts and groups the iris dataset
+/// and does some simple manipulations.
+///
+/// Iterators and itertools functionality are used throughout.
+
+use itertools::Itertools;
+use std::collections::HashMap;
+use std::iter::repeat;
+use std::num::ParseFloatError;
+use std::str::FromStr;
+
+static DATA: &'static str = include_str!("iris.data");
+
+#[derive(Clone, Debug)]
+struct Iris {
+ name: String,
+ data: [f32; 4],
+}
+
+#[derive(Clone, Debug)]
+enum ParseError {
+ Numeric(ParseFloatError),
+ Other(&'static str),
+}
+
+impl From<ParseFloatError> for ParseError {
+ fn from(err: ParseFloatError) -> Self {
+ ParseError::Numeric(err)
+ }
+}
+
+/// Parse an Iris from a comma-separated line
+impl FromStr for Iris {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let mut iris = Iris { name: "".into(), data: [0.; 4] };
+ let mut parts = s.split(",").map(str::trim);
+
+ // using Iterator::by_ref()
+ for (index, part) in parts.by_ref().take(4).enumerate() {
+ iris.data[index] = part.parse::<f32>()?;
+ }
+ if let Some(name) = parts.next() {
+ iris.name = name.into();
+ } else {
+ return Err(ParseError::Other("Missing name"))
+ }
+ Ok(iris)
+ }
+}
+
+fn main() {
+ // using Itertools::fold_results to create the result of parsing
+ let irises = DATA.lines()
+ .map(str::parse)
+ .fold_ok(Vec::new(), |mut v, iris: Iris| {
+ v.push(iris);
+ v
+ });
+ let mut irises = match irises {
+ Err(e) => {
+ println!("Error parsing: {:?}", e);
+ std::process::exit(1);
+ }
+ Ok(data) => data,
+ };
+
+ // Sort them and group them
+ irises.sort_by(|a, b| Ord::cmp(&a.name, &b.name));
+
+ // using Iterator::cycle()
+ let mut plot_symbols = "+ox".chars().cycle();
+ let mut symbolmap = HashMap::new();
+
+ // using Itertools::group_by
+ for (species, species_group) in &irises.iter().group_by(|iris| &iris.name)
{
+ // assign a plot symbol
+ symbolmap.entry(species).or_insert_with(|| {
+ plot_symbols.next().unwrap()
+ });
+ println!("{} (symbol={})", species, symbolmap[species]);
+
+ for iris in species_group {
+ // using Itertools::format for lazy formatting
+ println!("{:>3.1}", iris.data.iter().format(", "));
+ }
+
+ }
+
+ // Look at all combinations of the four columns
+ //
+ // See https://en.wikipedia.org/wiki/Iris_flower_data_set
+ //
+ let n = 30; // plot size
+ let mut plot = vec![' '; n * n];
+
+ // using Itertools::tuple_combinations
+ for (a, b) in (0..4).tuple_combinations() {
+ println!("Column {} vs {}:", a, b);
+
+ // Clear plot
+ //
+ // using std::iter::repeat;
+ // using Itertools::set_from
+ plot.iter_mut().set_from(repeat(' '));
+
+ // using Itertools::minmax
+ let min_max = |data: &[Iris], col| {
+ data.iter()
+ .map(|iris| iris.data[col])
+ .minmax()
+ .into_option()
+ .expect("Can't find min/max of empty iterator")
+ };
+ let (min_x, max_x) = min_max(&irises, a);
+ let (min_y, max_y) = min_max(&irises, b);
+
+ // Plot the data points
+ let round_to_grid = |x, min, max| ((x - min) / (max - min) * ((n - 1)
as f32)) as usize;
+ let flip = |ix| n - 1 - ix; // reverse axis direction
+
+ for iris in &irises {
+ let ix = round_to_grid(iris.data[a], min_x, max_x);
+ let iy = flip(round_to_grid(iris.data[b], min_y, max_y));
+ plot[n * iy + ix] = symbolmap[&iris.name];
+ }
+
+ // render plot
+ //
+ // using Itertools::join
+ for line in plot.chunks(n) {
+ println!("{}", line.iter().join(" "))
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/meson.build
b/rust/hw/char/pl011/vendor/itertools/meson.build
new file mode 100644
index 0000000000..3fb976c06d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/meson.build
@@ -0,0 +1,18 @@
+_itertools_rs = static_library(
+ 'itertools',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2018',
+ '--cfg', 'feature="use_std"',
+ '--cfg', 'feature="use_alloc"',
+ ],
+ dependencies: [
+ dep_either,
+ ],
+)
+
+dep_itertools = declare_dependency(
+ link_with: _itertools_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/itertools/src/adaptors/coalesce.rs
b/rust/hw/char/pl011/vendor/itertools/src/adaptors/coalesce.rs
new file mode 100644
index 0000000000..3df7cc5823
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/adaptors/coalesce.rs
@@ -0,0 +1,235 @@
+use std::fmt;
+use std::iter::FusedIterator;
+
+use crate::size_hint;
+
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct CoalesceBy<I, F, T>
+where
+ I: Iterator,
+{
+ iter: I,
+ last: Option<T>,
+ f: F,
+}
+
+impl<I: Clone, F: Clone, T: Clone> Clone for CoalesceBy<I, F, T>
+where
+ I: Iterator,
+{
+ clone_fields!(last, iter, f);
+}
+
+impl<I, F, T> fmt::Debug for CoalesceBy<I, F, T>
+where
+ I: Iterator + fmt::Debug,
+ T: fmt::Debug,
+{
+ debug_fmt_fields!(CoalesceBy, iter);
+}
+
+pub trait CoalescePredicate<Item, T> {
+ fn coalesce_pair(&mut self, t: T, item: Item) -> Result<T, (T, T)>;
+}
+
+impl<I, F, T> Iterator for CoalesceBy<I, F, T>
+where
+ I: Iterator,
+ F: CoalescePredicate<I::Item, T>,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ // this fuses the iterator
+ let last = self.last.take()?;
+
+ let self_last = &mut self.last;
+ let self_f = &mut self.f;
+ Some(
+ self.iter
+ .try_fold(last, |last, next| match self_f.coalesce_pair(last,
next) {
+ Ok(joined) => Ok(joined),
+ Err((last_, next_)) => {
+ *self_last = Some(next_);
+ Err(last_)
+ }
+ })
+ .unwrap_or_else(|x| x),
+ )
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (low, hi) = size_hint::add_scalar(self.iter.size_hint(),
self.last.is_some() as usize);
+ ((low > 0) as usize, hi)
+ }
+
+ fn fold<Acc, FnAcc>(self, acc: Acc, mut fn_acc: FnAcc) -> Acc
+ where
+ FnAcc: FnMut(Acc, Self::Item) -> Acc,
+ {
+ if let Some(last) = self.last {
+ let mut f = self.f;
+ let (last, acc) = self.iter.fold((last, acc), |(last, acc), elt| {
+ match f.coalesce_pair(last, elt) {
+ Ok(joined) => (joined, acc),
+ Err((last_, next_)) => (next_, fn_acc(acc, last_)),
+ }
+ });
+ fn_acc(acc, last)
+ } else {
+ acc
+ }
+ }
+}
+
+impl<I: Iterator, F: CoalescePredicate<I::Item, T>, T> FusedIterator for
CoalesceBy<I, F, T> {}
+
+/// An iterator adaptor that may join together adjacent elements.
+///
+/// See [`.coalesce()`](crate::Itertools::coalesce) for more information.
+pub type Coalesce<I, F> = CoalesceBy<I, F, <I as Iterator>::Item>;
+
+impl<F, Item, T> CoalescePredicate<Item, T> for F
+where
+ F: FnMut(T, Item) -> Result<T, (T, T)>,
+{
+ fn coalesce_pair(&mut self, t: T, item: Item) -> Result<T, (T, T)> {
+ self(t, item)
+ }
+}
+
+/// Create a new `Coalesce`.
+pub fn coalesce<I, F>(mut iter: I, f: F) -> Coalesce<I, F>
+where
+ I: Iterator,
+{
+ Coalesce {
+ last: iter.next(),
+ iter,
+ f,
+ }
+}
+
+/// An iterator adaptor that removes repeated duplicates, determining equality
using a comparison function.
+///
+/// See [`.dedup_by()`](crate::Itertools::dedup_by) or
[`.dedup()`](crate::Itertools::dedup) for more information.
+pub type DedupBy<I, Pred> = CoalesceBy<I, DedupPred2CoalescePred<Pred>, <I as
Iterator>::Item>;
+
+#[derive(Clone)]
+pub struct DedupPred2CoalescePred<DP>(DP);
+
+impl<DP> fmt::Debug for DedupPred2CoalescePred<DP> {
+ debug_fmt_fields!(DedupPred2CoalescePred,);
+}
+
+pub trait DedupPredicate<T> {
+ // TODO replace by Fn(&T, &T)->bool once Rust supports it
+ fn dedup_pair(&mut self, a: &T, b: &T) -> bool;
+}
+
+impl<DP, T> CoalescePredicate<T, T> for DedupPred2CoalescePred<DP>
+where
+ DP: DedupPredicate<T>,
+{
+ fn coalesce_pair(&mut self, t: T, item: T) -> Result<T, (T, T)> {
+ if self.0.dedup_pair(&t, &item) {
+ Ok(t)
+ } else {
+ Err((t, item))
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct DedupEq;
+
+impl<T: PartialEq> DedupPredicate<T> for DedupEq {
+ fn dedup_pair(&mut self, a: &T, b: &T) -> bool {
+ a == b
+ }
+}
+
+impl<T, F: FnMut(&T, &T) -> bool> DedupPredicate<T> for F {
+ fn dedup_pair(&mut self, a: &T, b: &T) -> bool {
+ self(a, b)
+ }
+}
+
+/// Create a new `DedupBy`.
+pub fn dedup_by<I, Pred>(mut iter: I, dedup_pred: Pred) -> DedupBy<I, Pred>
+where
+ I: Iterator,
+{
+ DedupBy {
+ last: iter.next(),
+ iter,
+ f: DedupPred2CoalescePred(dedup_pred),
+ }
+}
+
+/// An iterator adaptor that removes repeated duplicates.
+///
+/// See [`.dedup()`](crate::Itertools::dedup) for more information.
+pub type Dedup<I> = DedupBy<I, DedupEq>;
+
+/// Create a new `Dedup`.
+pub fn dedup<I>(iter: I) -> Dedup<I>
+where
+ I: Iterator,
+{
+ dedup_by(iter, DedupEq)
+}
+
+/// An iterator adaptor that removes repeated duplicates, while keeping a
count of how many
+/// repeated elements were present. This will determine equality using a
comparison function.
+///
+/// See [`.dedup_by_with_count()`](crate::Itertools::dedup_by_with_count) or
+/// [`.dedup_with_count()`](crate::Itertools::dedup_with_count) for more
information.
+pub type DedupByWithCount<I, Pred> =
+ CoalesceBy<I, DedupPredWithCount2CoalescePred<Pred>, (usize, <I as
Iterator>::Item)>;
+
+#[derive(Clone, Debug)]
+pub struct DedupPredWithCount2CoalescePred<DP>(DP);
+
+impl<DP, T> CoalescePredicate<T, (usize, T)> for
DedupPredWithCount2CoalescePred<DP>
+where
+ DP: DedupPredicate<T>,
+{
+ fn coalesce_pair(
+ &mut self,
+ (c, t): (usize, T),
+ item: T,
+ ) -> Result<(usize, T), ((usize, T), (usize, T))> {
+ if self.0.dedup_pair(&t, &item) {
+ Ok((c + 1, t))
+ } else {
+ Err(((c, t), (1, item)))
+ }
+ }
+}
+
+/// An iterator adaptor that removes repeated duplicates, while keeping a
count of how many
+/// repeated elements were present.
+///
+/// See [`.dedup_with_count()`](crate::Itertools::dedup_with_count) for more
information.
+pub type DedupWithCount<I> = DedupByWithCount<I, DedupEq>;
+
+/// Create a new `DedupByWithCount`.
+pub fn dedup_by_with_count<I, Pred>(mut iter: I, dedup_pred: Pred) ->
DedupByWithCount<I, Pred>
+where
+ I: Iterator,
+{
+ DedupByWithCount {
+ last: iter.next().map(|v| (1, v)),
+ iter,
+ f: DedupPredWithCount2CoalescePred(dedup_pred),
+ }
+}
+
+/// Create a new `DedupWithCount`.
+pub fn dedup_with_count<I>(iter: I) -> DedupWithCount<I>
+where
+ I: Iterator,
+{
+ dedup_by_with_count(iter, DedupEq)
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/adaptors/map.rs
b/rust/hw/char/pl011/vendor/itertools/src/adaptors/map.rs
new file mode 100644
index 0000000000..cf5e5a00d5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/adaptors/map.rs
@@ -0,0 +1,124 @@
+use std::iter::FromIterator;
+use std::marker::PhantomData;
+
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct MapSpecialCase<I, F> {
+ iter: I,
+ f: F,
+}
+
+pub trait MapSpecialCaseFn<T> {
+ type Out;
+ fn call(&mut self, t: T) -> Self::Out;
+}
+
+impl<I, R> Iterator for MapSpecialCase<I, R>
+where
+ I: Iterator,
+ R: MapSpecialCaseFn<I::Item>,
+{
+ type Item = R::Out;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next().map(|i| self.f.call(i))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ fn fold<Acc, Fold>(self, init: Acc, mut fold_f: Fold) -> Acc
+ where
+ Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut f = self.f;
+ self.iter.fold(init, move |acc, v| fold_f(acc, f.call(v)))
+ }
+
+ fn collect<C>(self) -> C
+ where
+ C: FromIterator<Self::Item>,
+ {
+ let mut f = self.f;
+ self.iter.map(move |v| f.call(v)).collect()
+ }
+}
+
+impl<I, R> DoubleEndedIterator for MapSpecialCase<I, R>
+where
+ I: DoubleEndedIterator,
+ R: MapSpecialCaseFn<I::Item>,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.iter.next_back().map(|i| self.f.call(i))
+ }
+}
+
+impl<I, R> ExactSizeIterator for MapSpecialCase<I, R>
+where
+ I: ExactSizeIterator,
+ R: MapSpecialCaseFn<I::Item>,
+{
+}
+
+/// An iterator adapter to apply a transformation within a nested `Result::Ok`.
+///
+/// See [`.map_ok()`](crate::Itertools::map_ok) for more information.
+pub type MapOk<I, F> = MapSpecialCase<I, MapSpecialCaseFnOk<F>>;
+
+/// See [`MapOk`].
+#[deprecated(note = "Use MapOk instead", since = "0.10.0")]
+pub type MapResults<I, F> = MapOk<I, F>;
+
+impl<F, T, U, E> MapSpecialCaseFn<Result<T, E>> for MapSpecialCaseFnOk<F>
+where
+ F: FnMut(T) -> U,
+{
+ type Out = Result<U, E>;
+ fn call(&mut self, t: Result<T, E>) -> Self::Out {
+ t.map(|v| self.0(v))
+ }
+}
+
+#[derive(Clone)]
+pub struct MapSpecialCaseFnOk<F>(F);
+
+impl<F> std::fmt::Debug for MapSpecialCaseFnOk<F> {
+ debug_fmt_fields!(MapSpecialCaseFnOk,);
+}
+
+/// Create a new `MapOk` iterator.
+pub fn map_ok<I, F, T, U, E>(iter: I, f: F) -> MapOk<I, F>
+where
+ I: Iterator<Item = Result<T, E>>,
+ F: FnMut(T) -> U,
+{
+ MapSpecialCase {
+ iter,
+ f: MapSpecialCaseFnOk(f),
+ }
+}
+
+/// An iterator adapter to apply `Into` conversion to each element.
+///
+/// See [`.map_into()`](crate::Itertools::map_into) for more information.
+pub type MapInto<I, R> = MapSpecialCase<I, MapSpecialCaseFnInto<R>>;
+
+impl<T: Into<U>, U> MapSpecialCaseFn<T> for MapSpecialCaseFnInto<U> {
+ type Out = U;
+ fn call(&mut self, t: T) -> Self::Out {
+ t.into()
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct MapSpecialCaseFnInto<U>(PhantomData<U>);
+
+/// Create a new [`MapInto`] iterator.
+pub fn map_into<I, R>(iter: I) -> MapInto<I, R> {
+ MapSpecialCase {
+ iter,
+ f: MapSpecialCaseFnInto(PhantomData),
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/adaptors/mod.rs
b/rust/hw/char/pl011/vendor/itertools/src/adaptors/mod.rs
new file mode 100644
index 0000000000..1695bbd655
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/adaptors/mod.rs
@@ -0,0 +1,1151 @@
+//! Licensed under the Apache License, Version 2.0
+//! <https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+//! <https://opensource.org/licenses/MIT>, at your
+//! option. This file may not be copied, modified, or distributed
+//! except according to those terms.
+
+mod coalesce;
+mod map;
+mod multi_product;
+pub use self::coalesce::*;
+pub use self::map::{map_into, map_ok, MapInto, MapOk};
+#[allow(deprecated)]
+pub use self::map::MapResults;
+#[cfg(feature = "use_alloc")]
+pub use self::multi_product::*;
+
+use std::fmt;
+use std::iter::{Fuse, Peekable, FromIterator, FusedIterator};
+use std::marker::PhantomData;
+use crate::size_hint;
+
+/// An iterator adaptor that alternates elements from two iterators until both
+/// run out.
+///
+/// This iterator is *fused*.
+///
+/// See [`.interleave()`](crate::Itertools::interleave) for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Interleave<I, J> {
+ a: Fuse<I>,
+ b: Fuse<J>,
+ flag: bool,
+}
+
+/// Create an iterator that interleaves elements in `i` and `j`.
+///
+/// [`IntoIterator`] enabled version of `[Itertools::interleave]`.
+pub fn interleave<I, J>(i: I, j: J) -> Interleave<<I as
IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
+ where I: IntoIterator,
+ J: IntoIterator<Item = I::Item>
+{
+ Interleave {
+ a: i.into_iter().fuse(),
+ b: j.into_iter().fuse(),
+ flag: false,
+ }
+}
+
+impl<I, J> Iterator for Interleave<I, J>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.flag = !self.flag;
+ if self.flag {
+ match self.a.next() {
+ None => self.b.next(),
+ r => r,
+ }
+ } else {
+ match self.b.next() {
+ None => self.a.next(),
+ r => r,
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add(self.a.size_hint(), self.b.size_hint())
+ }
+}
+
+impl<I, J> FusedIterator for Interleave<I, J>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{}
+
+/// An iterator adaptor that alternates elements from the two iterators until
+/// one of them runs out.
+///
+/// This iterator is *fused*.
+///
+/// See [`.interleave_shortest()`](crate::Itertools::interleave_shortest)
+/// for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct InterleaveShortest<I, J>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{
+ it0: I,
+ it1: J,
+ phase: bool, // false ==> it0, true ==> it1
+}
+
+/// Create a new `InterleaveShortest` iterator.
+pub fn interleave_shortest<I, J>(a: I, b: J) -> InterleaveShortest<I, J>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{
+ InterleaveShortest {
+ it0: a,
+ it1: b,
+ phase: false,
+ }
+}
+
+impl<I, J> Iterator for InterleaveShortest<I, J>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{
+ type Item = I::Item;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ let e = if self.phase { self.it1.next() } else { self.it0.next() };
+ if e.is_some() {
+ self.phase = !self.phase;
+ }
+ e
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (curr_hint, next_hint) = {
+ let it0_hint = self.it0.size_hint();
+ let it1_hint = self.it1.size_hint();
+ if self.phase {
+ (it1_hint, it0_hint)
+ } else {
+ (it0_hint, it1_hint)
+ }
+ };
+ let (curr_lower, curr_upper) = curr_hint;
+ let (next_lower, next_upper) = next_hint;
+ let (combined_lower, combined_upper) =
+ size_hint::mul_scalar(size_hint::min(curr_hint, next_hint), 2);
+ let lower =
+ if curr_lower > next_lower {
+ combined_lower + 1
+ } else {
+ combined_lower
+ };
+ let upper = {
+ let extra_elem = match (curr_upper, next_upper) {
+ (_, None) => false,
+ (None, Some(_)) => true,
+ (Some(curr_max), Some(next_max)) => curr_max > next_max,
+ };
+ if extra_elem {
+ combined_upper.and_then(|x| x.checked_add(1))
+ } else {
+ combined_upper
+ }
+ };
+ (lower, upper)
+ }
+}
+
+impl<I, J> FusedIterator for InterleaveShortest<I, J>
+ where I: FusedIterator,
+ J: FusedIterator<Item = I::Item>
+{}
+
+#[derive(Clone, Debug)]
+/// An iterator adaptor that allows putting back a single
+/// item to the front of the iterator.
+///
+/// Iterator element type is `I::Item`.
+pub struct PutBack<I>
+ where I: Iterator
+{
+ top: Option<I::Item>,
+ iter: I,
+}
+
+/// Create an iterator where you can put back a single item
+pub fn put_back<I>(iterable: I) -> PutBack<I::IntoIter>
+ where I: IntoIterator
+{
+ PutBack {
+ top: None,
+ iter: iterable.into_iter(),
+ }
+}
+
+impl<I> PutBack<I>
+ where I: Iterator
+{
+ /// put back value `value` (builder method)
+ pub fn with_value(mut self, value: I::Item) -> Self {
+ self.put_back(value);
+ self
+ }
+
+ /// Split the `PutBack` into its parts.
+ #[inline]
+ pub fn into_parts(self) -> (Option<I::Item>, I) {
+ let PutBack{top, iter} = self;
+ (top, iter)
+ }
+
+ /// Put back a single value to the front of the iterator.
+ ///
+ /// If a value is already in the put back slot, it is overwritten.
+ #[inline]
+ pub fn put_back(&mut self, x: I::Item) {
+ self.top = Some(x);
+ }
+}
+
+impl<I> Iterator for PutBack<I>
+ where I: Iterator
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.top {
+ None => self.iter.next(),
+ ref mut some => some.take(),
+ }
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // Not ExactSizeIterator because size may be larger than usize
+ size_hint::add_scalar(self.iter.size_hint(), self.top.is_some() as
usize)
+ }
+
+ fn count(self) -> usize {
+ self.iter.count() + (self.top.is_some() as usize)
+ }
+
+ fn last(self) -> Option<Self::Item> {
+ self.iter.last().or(self.top)
+ }
+
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ match self.top {
+ None => self.iter.nth(n),
+ ref mut some => {
+ if n == 0 {
+ some.take()
+ } else {
+ *some = None;
+ self.iter.nth(n - 1)
+ }
+ }
+ }
+ }
+
+ fn all<G>(&mut self, mut f: G) -> bool
+ where G: FnMut(Self::Item) -> bool
+ {
+ if let Some(elt) = self.top.take() {
+ if !f(elt) {
+ return false;
+ }
+ }
+ self.iter.all(f)
+ }
+
+ fn fold<Acc, G>(mut self, init: Acc, mut f: G) -> Acc
+ where G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut accum = init;
+ if let Some(elt) = self.top.take() {
+ accum = f(accum, elt);
+ }
+ self.iter.fold(accum, f)
+ }
+}
+
+#[derive(Debug, Clone)]
+/// An iterator adaptor that iterates over the cartesian product of
+/// the element sets of two iterators `I` and `J`.
+///
+/// Iterator element type is `(I::Item, J::Item)`.
+///
+/// See [`.cartesian_product()`](crate::Itertools::cartesian_product) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Product<I, J>
+ where I: Iterator
+{
+ a: I,
+ a_cur: Option<I::Item>,
+ b: J,
+ b_orig: J,
+}
+
+/// Create a new cartesian product iterator
+///
+/// Iterator element type is `(I::Item, J::Item)`.
+pub fn cartesian_product<I, J>(mut i: I, j: J) -> Product<I, J>
+ where I: Iterator,
+ J: Clone + Iterator,
+ I::Item: Clone
+{
+ Product {
+ a_cur: i.next(),
+ a: i,
+ b: j.clone(),
+ b_orig: j,
+ }
+}
+
+impl<I, J> Iterator for Product<I, J>
+ where I: Iterator,
+ J: Clone + Iterator,
+ I::Item: Clone
+{
+ type Item = (I::Item, J::Item);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let elt_b = match self.b.next() {
+ None => {
+ self.b = self.b_orig.clone();
+ match self.b.next() {
+ None => return None,
+ Some(x) => {
+ self.a_cur = self.a.next();
+ x
+ }
+ }
+ }
+ Some(x) => x
+ };
+ self.a_cur.as_ref().map(|a| (a.clone(), elt_b))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let has_cur = self.a_cur.is_some() as usize;
+ // Not ExactSizeIterator because size may be larger than usize
+ let (b_min, b_max) = self.b.size_hint();
+
+ // Compute a * b_orig + b for both lower and upper bound
+ size_hint::add(
+ size_hint::mul(self.a.size_hint(), self.b_orig.size_hint()),
+ (b_min * has_cur, b_max.map(move |x| x * has_cur)))
+ }
+
+ fn fold<Acc, G>(mut self, mut accum: Acc, mut f: G) -> Acc
+ where G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ // use a split loop to handle the loose a_cur as well as avoiding to
+ // clone b_orig at the end.
+ if let Some(mut a) = self.a_cur.take() {
+ let mut b = self.b;
+ loop {
+ accum = b.fold(accum, |acc, elt| f(acc, (a.clone(), elt)));
+
+ // we can only continue iterating a if we had a first element;
+ if let Some(next_a) = self.a.next() {
+ b = self.b_orig.clone();
+ a = next_a;
+ } else {
+ break;
+ }
+ }
+ }
+ accum
+ }
+}
+
+impl<I, J> FusedIterator for Product<I, J>
+ where I: FusedIterator,
+ J: Clone + FusedIterator,
+ I::Item: Clone
+{}
+
+/// A “meta iterator adaptor”. Its closure receives a reference to the iterator
+/// and may pick off as many elements as it likes, to produce the next
iterator element.
+///
+/// Iterator element type is *X*, if the return type of `F` is *Option\<X\>*.
+///
+/// See [`.batching()`](crate::Itertools::batching) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Batching<I, F> {
+ f: F,
+ iter: I,
+}
+
+impl<I, F> fmt::Debug for Batching<I, F> where I: fmt::Debug {
+ debug_fmt_fields!(Batching, iter);
+}
+
+/// Create a new Batching iterator.
+pub fn batching<I, F>(iter: I, f: F) -> Batching<I, F> {
+ Batching { f, iter }
+}
+
+impl<B, F, I> Iterator for Batching<I, F>
+ where I: Iterator,
+ F: FnMut(&mut I) -> Option<B>
+{
+ type Item = B;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ (self.f)(&mut self.iter)
+ }
+}
+
+/// An iterator adaptor that steps a number elements in the base iterator
+/// for each iteration.
+///
+/// The iterator steps by yielding the next element from the base iterator,
+/// then skipping forward *n-1* elements.
+///
+/// See [`.step()`](crate::Itertools::step) for more information.
+#[deprecated(note="Use std .step_by() instead", since="0.8.0")]
+#[allow(deprecated)]
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Step<I> {
+ iter: Fuse<I>,
+ skip: usize,
+}
+
+/// Create a `Step` iterator.
+///
+/// **Panics** if the step is 0.
+#[allow(deprecated)]
+pub fn step<I>(iter: I, step: usize) -> Step<I>
+ where I: Iterator
+{
+ assert!(step != 0);
+ Step {
+ iter: iter.fuse(),
+ skip: step - 1,
+ }
+}
+
+#[allow(deprecated)]
+impl<I> Iterator for Step<I>
+ where I: Iterator
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ let elt = self.iter.next();
+ if self.skip > 0 {
+ self.iter.nth(self.skip - 1);
+ }
+ elt
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (low, high) = self.iter.size_hint();
+ let div = |x: usize| {
+ if x == 0 {
+ 0
+ } else {
+ 1 + (x - 1) / (self.skip + 1)
+ }
+ };
+ (div(low), high.map(div))
+ }
+}
+
+// known size
+#[allow(deprecated)]
+impl<I> ExactSizeIterator for Step<I>
+ where I: ExactSizeIterator
+{}
+
+pub trait MergePredicate<T> {
+ fn merge_pred(&mut self, a: &T, b: &T) -> bool;
+}
+
+#[derive(Clone, Debug)]
+pub struct MergeLte;
+
+impl<T: PartialOrd> MergePredicate<T> for MergeLte {
+ fn merge_pred(&mut self, a: &T, b: &T) -> bool {
+ a <= b
+ }
+}
+
+/// An iterator adaptor that merges the two base iterators in ascending order.
+/// If both base iterators are sorted (ascending), the result is sorted.
+///
+/// Iterator element type is `I::Item`.
+///
+/// See [`.merge()`](crate::Itertools::merge_by) for more information.
+pub type Merge<I, J> = MergeBy<I, J, MergeLte>;
+
+/// Create an iterator that merges elements in `i` and `j`.
+///
+/// [`IntoIterator`] enabled version of
[`Itertools::merge`](crate::Itertools::merge).
+///
+/// ```
+/// use itertools::merge;
+///
+/// for elt in merge(&[1, 2, 3], &[2, 3, 4]) {
+/// /* loop body */
+/// }
+/// ```
+pub fn merge<I, J>(i: I, j: J) -> Merge<<I as IntoIterator>::IntoIter, <J as
IntoIterator>::IntoIter>
+ where I: IntoIterator,
+ J: IntoIterator<Item = I::Item>,
+ I::Item: PartialOrd
+{
+ merge_by_new(i, j, MergeLte)
+}
+
+/// An iterator adaptor that merges the two base iterators in ascending order.
+/// If both base iterators are sorted (ascending), the result is sorted.
+///
+/// Iterator element type is `I::Item`.
+///
+/// See [`.merge_by()`](crate::Itertools::merge_by) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct MergeBy<I, J, F>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>
+{
+ a: Peekable<I>,
+ b: Peekable<J>,
+ fused: Option<bool>,
+ cmp: F,
+}
+
+impl<I, J, F> fmt::Debug for MergeBy<I, J, F>
+ where I: Iterator + fmt::Debug, J: Iterator<Item = I::Item> + fmt::Debug,
+ I::Item: fmt::Debug,
+{
+ debug_fmt_fields!(MergeBy, a, b);
+}
+
+impl<T, F: FnMut(&T, &T)->bool> MergePredicate<T> for F {
+ fn merge_pred(&mut self, a: &T, b: &T) -> bool {
+ self(a, b)
+ }
+}
+
+/// Create a `MergeBy` iterator.
+pub fn merge_by_new<I, J, F>(a: I, b: J, cmp: F) -> MergeBy<I::IntoIter,
J::IntoIter, F>
+ where I: IntoIterator,
+ J: IntoIterator<Item = I::Item>,
+ F: MergePredicate<I::Item>,
+{
+ MergeBy {
+ a: a.into_iter().peekable(),
+ b: b.into_iter().peekable(),
+ fused: None,
+ cmp,
+ }
+}
+
+impl<I, J, F> Clone for MergeBy<I, J, F>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>,
+ Peekable<I>: Clone,
+ Peekable<J>: Clone,
+ F: Clone
+{
+ clone_fields!(a, b, fused, cmp);
+}
+
+impl<I, J, F> Iterator for MergeBy<I, J, F>
+ where I: Iterator,
+ J: Iterator<Item = I::Item>,
+ F: MergePredicate<I::Item>
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let less_than = match self.fused {
+ Some(lt) => lt,
+ None => match (self.a.peek(), self.b.peek()) {
+ (Some(a), Some(b)) => self.cmp.merge_pred(a, b),
+ (Some(_), None) => {
+ self.fused = Some(true);
+ true
+ }
+ (None, Some(_)) => {
+ self.fused = Some(false);
+ false
+ }
+ (None, None) => return None,
+ }
+ };
+ if less_than {
+ self.a.next()
+ } else {
+ self.b.next()
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // Not ExactSizeIterator because size may be larger than usize
+ size_hint::add(self.a.size_hint(), self.b.size_hint())
+ }
+}
+
+impl<I, J, F> FusedIterator for MergeBy<I, J, F>
+ where I: FusedIterator,
+ J: FusedIterator<Item = I::Item>,
+ F: MergePredicate<I::Item>
+{}
+
+/// An iterator adaptor that borrows from a `Clone`-able iterator
+/// to only pick off elements while the predicate returns `true`.
+///
+/// See [`.take_while_ref()`](crate::Itertools::take_while_ref) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct TakeWhileRef<'a, I: 'a, F> {
+ iter: &'a mut I,
+ f: F,
+}
+
+impl<'a, I, F> fmt::Debug for TakeWhileRef<'a, I, F>
+ where I: Iterator + fmt::Debug,
+{
+ debug_fmt_fields!(TakeWhileRef, iter);
+}
+
+/// Create a new `TakeWhileRef` from a reference to clonable iterator.
+pub fn take_while_ref<I, F>(iter: &mut I, f: F) -> TakeWhileRef<I, F>
+ where I: Iterator + Clone
+{
+ TakeWhileRef { iter, f }
+}
+
+impl<'a, I, F> Iterator for TakeWhileRef<'a, I, F>
+ where I: Iterator + Clone,
+ F: FnMut(&I::Item) -> bool
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let old = self.iter.clone();
+ match self.iter.next() {
+ None => None,
+ Some(elt) => {
+ if (self.f)(&elt) {
+ Some(elt)
+ } else {
+ *self.iter = old;
+ None
+ }
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+}
+
+/// An iterator adaptor that filters `Option<A>` iterator elements
+/// and produces `A`. Stops on the first `None` encountered.
+///
+/// See [`.while_some()`](crate::Itertools::while_some) for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct WhileSome<I> {
+ iter: I,
+}
+
+/// Create a new `WhileSome<I>`.
+pub fn while_some<I>(iter: I) -> WhileSome<I> {
+ WhileSome { iter }
+}
+
+impl<I, A> Iterator for WhileSome<I>
+ where I: Iterator<Item = Option<A>>
+{
+ type Item = A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.iter.next() {
+ None | Some(None) => None,
+ Some(elt) => elt,
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+}
+
+/// An iterator to iterate through all combinations in a `Clone`-able iterator
that produces tuples
+/// of a specific size.
+///
+/// See [`.tuple_combinations()`](crate::Itertools::tuple_combinations) for
more
+/// information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct TupleCombinations<I, T>
+ where I: Iterator,
+ T: HasCombination<I>
+{
+ iter: T::Combination,
+ _mi: PhantomData<I>,
+}
+
+pub trait HasCombination<I>: Sized {
+ type Combination: From<I> + Iterator<Item = Self>;
+}
+
+/// Create a new `TupleCombinations` from a clonable iterator.
+pub fn tuple_combinations<T, I>(iter: I) -> TupleCombinations<I, T>
+ where I: Iterator + Clone,
+ I::Item: Clone,
+ T: HasCombination<I>,
+{
+ TupleCombinations {
+ iter: T::Combination::from(iter),
+ _mi: PhantomData,
+ }
+}
+
+impl<I, T> Iterator for TupleCombinations<I, T>
+ where I: Iterator,
+ T: HasCombination<I>,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next()
+ }
+}
+
+impl<I, T> FusedIterator for TupleCombinations<I, T>
+ where I: FusedIterator,
+ T: HasCombination<I>,
+{}
+
+#[derive(Clone, Debug)]
+pub struct Tuple1Combination<I> {
+ iter: I,
+}
+
+impl<I> From<I> for Tuple1Combination<I> {
+ fn from(iter: I) -> Self {
+ Tuple1Combination { iter }
+ }
+}
+
+impl<I: Iterator> Iterator for Tuple1Combination<I> {
+ type Item = (I::Item,);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next().map(|x| (x,))
+ }
+}
+
+impl<I: Iterator> HasCombination<I> for (I::Item,) {
+ type Combination = Tuple1Combination<I>;
+}
+
+macro_rules! impl_tuple_combination {
+ ($C:ident $P:ident ; $($X:ident)*) => (
+ #[derive(Clone, Debug)]
+ pub struct $C<I: Iterator> {
+ item: Option<I::Item>,
+ iter: I,
+ c: $P<I>,
+ }
+
+ impl<I: Iterator + Clone> From<I> for $C<I> {
+ fn from(mut iter: I) -> Self {
+ Self {
+ item: iter.next(),
+ iter: iter.clone(),
+ c: iter.into(),
+ }
+ }
+ }
+
+ impl<I: Iterator + Clone> From<I> for $C<Fuse<I>> {
+ fn from(iter: I) -> Self {
+ Self::from(iter.fuse())
+ }
+ }
+
+ impl<I, A> Iterator for $C<I>
+ where I: Iterator<Item = A> + Clone,
+ I::Item: Clone
+ {
+ type Item = (A, $(ignore_ident!($X, A)),*);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(($($X),*,)) = self.c.next() {
+ let z = self.item.clone().unwrap();
+ Some((z, $($X),*))
+ } else {
+ self.item = self.iter.next();
+ self.item.clone().and_then(|z| {
+ self.c = self.iter.clone().into();
+ self.c.next().map(|($($X),*,)| (z, $($X),*))
+ })
+ }
+ }
+ }
+
+ impl<I, A> HasCombination<I> for (A, $(ignore_ident!($X, A)),*)
+ where I: Iterator<Item = A> + Clone,
+ I::Item: Clone
+ {
+ type Combination = $C<Fuse<I>>;
+ }
+ )
+}
+
+// This snippet generates the twelve `impl_tuple_combination!` invocations:
+// use core::iter;
+// use itertools::Itertools;
+//
+// for i in 2..=12 {
+// println!("impl_tuple_combination!(Tuple{arity}Combination
Tuple{prev}Combination; {idents});",
+// arity = i,
+// prev = i - 1,
+// idents = ('a'..'z').take(i - 1).join(" "),
+// );
+// }
+// It could probably be replaced by a bit more macro cleverness.
+impl_tuple_combination!(Tuple2Combination Tuple1Combination; a);
+impl_tuple_combination!(Tuple3Combination Tuple2Combination; a b);
+impl_tuple_combination!(Tuple4Combination Tuple3Combination; a b c);
+impl_tuple_combination!(Tuple5Combination Tuple4Combination; a b c d);
+impl_tuple_combination!(Tuple6Combination Tuple5Combination; a b c d e);
+impl_tuple_combination!(Tuple7Combination Tuple6Combination; a b c d e f);
+impl_tuple_combination!(Tuple8Combination Tuple7Combination; a b c d e f g);
+impl_tuple_combination!(Tuple9Combination Tuple8Combination; a b c d e f g h);
+impl_tuple_combination!(Tuple10Combination Tuple9Combination; a b c d e f g h
i);
+impl_tuple_combination!(Tuple11Combination Tuple10Combination; a b c d e f g h
i j);
+impl_tuple_combination!(Tuple12Combination Tuple11Combination; a b c d e f g h
i j k);
+
+/// An iterator adapter to filter values within a nested `Result::Ok`.
+///
+/// See [`.filter_ok()`](crate::Itertools::filter_ok) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct FilterOk<I, F> {
+ iter: I,
+ f: F
+}
+
+impl<I, F> fmt::Debug for FilterOk<I, F>
+where
+ I: fmt::Debug,
+{
+ debug_fmt_fields!(FilterOk, iter);
+}
+
+/// Create a new `FilterOk` iterator.
+pub fn filter_ok<I, F, T, E>(iter: I, f: F) -> FilterOk<I, F>
+ where I: Iterator<Item = Result<T, E>>,
+ F: FnMut(&T) -> bool,
+{
+ FilterOk {
+ iter,
+ f,
+ }
+}
+
+impl<I, F, T, E> Iterator for FilterOk<I, F>
+ where I: Iterator<Item = Result<T, E>>,
+ F: FnMut(&T) -> bool,
+{
+ type Item = Result<T, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.iter.next() {
+ Some(Ok(v)) => {
+ if (self.f)(&v) {
+ return Some(Ok(v));
+ }
+ },
+ Some(Err(e)) => return Some(Err(e)),
+ None => return None,
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+
+ fn fold<Acc, Fold>(self, init: Acc, fold_f: Fold) -> Acc
+ where Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut f = self.f;
+ self.iter.filter(|v| {
+ v.as_ref().map(&mut f).unwrap_or(true)
+ }).fold(init, fold_f)
+ }
+
+ fn collect<C>(self) -> C
+ where C: FromIterator<Self::Item>
+ {
+ let mut f = self.f;
+ self.iter.filter(|v| {
+ v.as_ref().map(&mut f).unwrap_or(true)
+ }).collect()
+ }
+}
+
+impl<I, F, T, E> FusedIterator for FilterOk<I, F>
+ where I: FusedIterator<Item = Result<T, E>>,
+ F: FnMut(&T) -> bool,
+{}
+
+/// An iterator adapter to filter and apply a transformation on values within
a nested `Result::Ok`.
+///
+/// See [`.filter_map_ok()`](crate::Itertools::filter_map_ok) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct FilterMapOk<I, F> {
+ iter: I,
+ f: F
+}
+
+impl<I, F> fmt::Debug for FilterMapOk<I, F>
+where
+ I: fmt::Debug,
+{
+ debug_fmt_fields!(FilterMapOk, iter);
+}
+
+fn transpose_result<T, E>(result: Result<Option<T>, E>) -> Option<Result<T,
E>> {
+ match result {
+ Ok(Some(v)) => Some(Ok(v)),
+ Ok(None) => None,
+ Err(e) => Some(Err(e)),
+ }
+}
+
+/// Create a new `FilterOk` iterator.
+pub fn filter_map_ok<I, F, T, U, E>(iter: I, f: F) -> FilterMapOk<I, F>
+ where I: Iterator<Item = Result<T, E>>,
+ F: FnMut(T) -> Option<U>,
+{
+ FilterMapOk {
+ iter,
+ f,
+ }
+}
+
+impl<I, F, T, U, E> Iterator for FilterMapOk<I, F>
+ where I: Iterator<Item = Result<T, E>>,
+ F: FnMut(T) -> Option<U>,
+{
+ type Item = Result<U, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.iter.next() {
+ Some(Ok(v)) => {
+ if let Some(v) = (self.f)(v) {
+ return Some(Ok(v));
+ }
+ },
+ Some(Err(e)) => return Some(Err(e)),
+ None => return None,
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+
+ fn fold<Acc, Fold>(self, init: Acc, fold_f: Fold) -> Acc
+ where Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut f = self.f;
+ self.iter.filter_map(|v| {
+ transpose_result(v.map(&mut f))
+ }).fold(init, fold_f)
+ }
+
+ fn collect<C>(self) -> C
+ where C: FromIterator<Self::Item>
+ {
+ let mut f = self.f;
+ self.iter.filter_map(|v| {
+ transpose_result(v.map(&mut f))
+ }).collect()
+ }
+}
+
+impl<I, F, T, U, E> FusedIterator for FilterMapOk<I, F>
+ where I: FusedIterator<Item = Result<T, E>>,
+ F: FnMut(T) -> Option<U>,
+{}
+
+/// An iterator adapter to get the positions of each element that matches a
predicate.
+///
+/// See [`.positions()`](crate::Itertools::positions) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Positions<I, F> {
+ iter: I,
+ f: F,
+ count: usize,
+}
+
+impl<I, F> fmt::Debug for Positions<I, F>
+where
+ I: fmt::Debug,
+{
+ debug_fmt_fields!(Positions, iter, count);
+}
+
+/// Create a new `Positions` iterator.
+pub fn positions<I, F>(iter: I, f: F) -> Positions<I, F>
+ where I: Iterator,
+ F: FnMut(I::Item) -> bool,
+{
+ Positions {
+ iter,
+ f,
+ count: 0
+ }
+}
+
+impl<I, F> Iterator for Positions<I, F>
+ where I: Iterator,
+ F: FnMut(I::Item) -> bool,
+{
+ type Item = usize;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.next() {
+ let i = self.count;
+ self.count = i + 1;
+ if (self.f)(v) {
+ return Some(i);
+ }
+ }
+ None
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+}
+
+impl<I, F> DoubleEndedIterator for Positions<I, F>
+ where I: DoubleEndedIterator + ExactSizeIterator,
+ F: FnMut(I::Item) -> bool,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.next_back() {
+ if (self.f)(v) {
+ return Some(self.count + self.iter.len())
+ }
+ }
+ None
+ }
+}
+
+impl<I, F> FusedIterator for Positions<I, F>
+ where I: FusedIterator,
+ F: FnMut(I::Item) -> bool,
+{}
+
+/// An iterator adapter to apply a mutating function to each element before
yielding it.
+///
+/// See [`.update()`](crate::Itertools::update) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Update<I, F> {
+ iter: I,
+ f: F,
+}
+
+impl<I, F> fmt::Debug for Update<I, F>
+where
+ I: fmt::Debug,
+{
+ debug_fmt_fields!(Update, iter);
+}
+
+/// Create a new `Update` iterator.
+pub fn update<I, F>(iter: I, f: F) -> Update<I, F>
+where
+ I: Iterator,
+ F: FnMut(&mut I::Item),
+{
+ Update { iter, f }
+}
+
+impl<I, F> Iterator for Update<I, F>
+where
+ I: Iterator,
+ F: FnMut(&mut I::Item),
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(mut v) = self.iter.next() {
+ (self.f)(&mut v);
+ Some(v)
+ } else {
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ fn fold<Acc, G>(self, init: Acc, mut g: G) -> Acc
+ where G: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut f = self.f;
+ self.iter.fold(init, move |acc, mut v| { f(&mut v); g(acc, v) })
+ }
+
+ // if possible, re-use inner iterator specializations in collect
+ fn collect<C>(self) -> C
+ where C: FromIterator<Self::Item>
+ {
+ let mut f = self.f;
+ self.iter.map(move |mut v| { f(&mut v); v }).collect()
+ }
+}
+
+impl<I, F> ExactSizeIterator for Update<I, F>
+where
+ I: ExactSizeIterator,
+ F: FnMut(&mut I::Item),
+{}
+
+impl<I, F> DoubleEndedIterator for Update<I, F>
+where
+ I: DoubleEndedIterator,
+ F: FnMut(&mut I::Item),
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if let Some(mut v) = self.iter.next_back() {
+ (self.f)(&mut v);
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+
+impl<I, F> FusedIterator for Update<I, F>
+where
+ I: FusedIterator,
+ F: FnMut(&mut I::Item),
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/adaptors/multi_product.rs
b/rust/hw/char/pl011/vendor/itertools/src/adaptors/multi_product.rs
new file mode 100644
index 0000000000..0b38406987
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/adaptors/multi_product.rs
@@ -0,0 +1,230 @@
+#![cfg(feature = "use_alloc")]
+
+use crate::size_hint;
+use crate::Itertools;
+
+use alloc::vec::Vec;
+
+#[derive(Clone)]
+/// An iterator adaptor that iterates over the cartesian product of
+/// multiple iterators of type `I`.
+///
+/// An iterator element type is `Vec<I>`.
+///
+/// See
[`.multi_cartesian_product()`](crate::Itertools::multi_cartesian_product)
+/// for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct MultiProduct<I>(Vec<MultiProductIter<I>>)
+ where I: Iterator + Clone,
+ I::Item: Clone;
+
+impl<I> std::fmt::Debug for MultiProduct<I>
+where
+ I: Iterator + Clone + std::fmt::Debug,
+ I::Item: Clone + std::fmt::Debug,
+{
+ debug_fmt_fields!(CoalesceBy, 0);
+}
+
+/// Create a new cartesian product iterator over an arbitrary number
+/// of iterators of the same type.
+///
+/// Iterator element is of type `Vec<H::Item::Item>`.
+pub fn multi_cartesian_product<H>(iters: H) -> MultiProduct<<H::Item as
IntoIterator>::IntoIter>
+ where H: Iterator,
+ H::Item: IntoIterator,
+ <H::Item as IntoIterator>::IntoIter: Clone,
+ <H::Item as IntoIterator>::Item: Clone
+{
+ MultiProduct(iters.map(|i| MultiProductIter::new(i.into_iter())).collect())
+}
+
+#[derive(Clone, Debug)]
+/// Holds the state of a single iterator within a `MultiProduct`.
+struct MultiProductIter<I>
+ where I: Iterator + Clone,
+ I::Item: Clone
+{
+ cur: Option<I::Item>,
+ iter: I,
+ iter_orig: I,
+}
+
+/// Holds the current state during an iteration of a `MultiProduct`.
+#[derive(Debug)]
+enum MultiProductIterState {
+ StartOfIter,
+ MidIter { on_first_iter: bool },
+}
+
+impl<I> MultiProduct<I>
+ where I: Iterator + Clone,
+ I::Item: Clone
+{
+ /// Iterates the rightmost iterator, then recursively iterates iterators
+ /// to the left if necessary.
+ ///
+ /// Returns true if the iteration succeeded, else false.
+ fn iterate_last(
+ multi_iters: &mut [MultiProductIter<I>],
+ mut state: MultiProductIterState
+ ) -> bool {
+ use self::MultiProductIterState::*;
+
+ if let Some((last, rest)) = multi_iters.split_last_mut() {
+ let on_first_iter = match state {
+ StartOfIter => {
+ let on_first_iter = !last.in_progress();
+ state = MidIter { on_first_iter };
+ on_first_iter
+ },
+ MidIter { on_first_iter } => on_first_iter
+ };
+
+ if !on_first_iter {
+ last.iterate();
+ }
+
+ if last.in_progress() {
+ true
+ } else if MultiProduct::iterate_last(rest, state) {
+ last.reset();
+ last.iterate();
+ // If iterator is None twice consecutively, then iterator is
+ // empty; whole product is empty.
+ last.in_progress()
+ } else {
+ false
+ }
+ } else {
+ // Reached end of iterator list. On initialisation, return true.
+ // At end of iteration (final iterator finishes), finish.
+ match state {
+ StartOfIter => false,
+ MidIter { on_first_iter } => on_first_iter
+ }
+ }
+ }
+
+ /// Returns the unwrapped value of the next iteration.
+ fn curr_iterator(&self) -> Vec<I::Item> {
+ self.0.iter().map(|multi_iter| {
+ multi_iter.cur.clone().unwrap()
+ }).collect()
+ }
+
+ /// Returns true if iteration has started and has not yet finished; false
+ /// otherwise.
+ fn in_progress(&self) -> bool {
+ if let Some(last) = self.0.last() {
+ last.in_progress()
+ } else {
+ false
+ }
+ }
+}
+
+impl<I> MultiProductIter<I>
+ where I: Iterator + Clone,
+ I::Item: Clone
+{
+ fn new(iter: I) -> Self {
+ MultiProductIter {
+ cur: None,
+ iter: iter.clone(),
+ iter_orig: iter
+ }
+ }
+
+ /// Iterate the managed iterator.
+ fn iterate(&mut self) {
+ self.cur = self.iter.next();
+ }
+
+ /// Reset the managed iterator.
+ fn reset(&mut self) {
+ self.iter = self.iter_orig.clone();
+ }
+
+ /// Returns true if the current iterator has been started and has not yet
+ /// finished; false otherwise.
+ fn in_progress(&self) -> bool {
+ self.cur.is_some()
+ }
+}
+
+impl<I> Iterator for MultiProduct<I>
+ where I: Iterator + Clone,
+ I::Item: Clone
+{
+ type Item = Vec<I::Item>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if MultiProduct::iterate_last(
+ &mut self.0,
+ MultiProductIterState::StartOfIter
+ ) {
+ Some(self.curr_iterator())
+ } else {
+ None
+ }
+ }
+
+ fn count(self) -> usize {
+ if self.0.is_empty() {
+ return 0;
+ }
+
+ if !self.in_progress() {
+ return self.0.into_iter().fold(1, |acc, multi_iter| {
+ acc * multi_iter.iter.count()
+ });
+ }
+
+ self.0.into_iter().fold(
+ 0,
+ |acc, MultiProductIter { iter, iter_orig, cur: _ }| {
+ let total_count = iter_orig.count();
+ let cur_count = iter.count();
+ acc * total_count + cur_count
+ }
+ )
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // Not ExactSizeIterator because size may be larger than usize
+ if self.0.is_empty() {
+ return (0, Some(0));
+ }
+
+ if !self.in_progress() {
+ return self.0.iter().fold((1, Some(1)), |acc, multi_iter| {
+ size_hint::mul(acc, multi_iter.iter.size_hint())
+ });
+ }
+
+ self.0.iter().fold(
+ (0, Some(0)),
+ |acc, &MultiProductIter { ref iter, ref iter_orig, cur: _ }| {
+ let cur_size = iter.size_hint();
+ let total_size = iter_orig.size_hint();
+ size_hint::add(size_hint::mul(acc, total_size), cur_size)
+ }
+ )
+ }
+
+ fn last(self) -> Option<Self::Item> {
+ let iter_count = self.0.len();
+
+ let lasts: Self::Item = self.0.into_iter()
+ .map(|multi_iter| multi_iter.iter.last())
+ .while_some()
+ .collect();
+
+ if lasts.len() == iter_count {
+ Some(lasts)
+ } else {
+ None
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/combinations.rs
b/rust/hw/char/pl011/vendor/itertools/src/combinations.rs
new file mode 100644
index 0000000000..68a59c5e4d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/combinations.rs
@@ -0,0 +1,128 @@
+use std::fmt;
+use std::iter::FusedIterator;
+
+use super::lazy_buffer::LazyBuffer;
+use alloc::vec::Vec;
+
+/// An iterator to iterate through all the `k`-length combinations in an
iterator.
+///
+/// See [`.combinations()`](crate::Itertools::combinations) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Combinations<I: Iterator> {
+ indices: Vec<usize>,
+ pool: LazyBuffer<I>,
+ first: bool,
+}
+
+impl<I> Clone for Combinations<I>
+ where I: Clone + Iterator,
+ I::Item: Clone,
+{
+ clone_fields!(indices, pool, first);
+}
+
+impl<I> fmt::Debug for Combinations<I>
+ where I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug,
+{
+ debug_fmt_fields!(Combinations, indices, pool, first);
+}
+
+/// Create a new `Combinations` from a clonable iterator.
+pub fn combinations<I>(iter: I, k: usize) -> Combinations<I>
+ where I: Iterator
+{
+ let mut pool = LazyBuffer::new(iter);
+ pool.prefill(k);
+
+ Combinations {
+ indices: (0..k).collect(),
+ pool,
+ first: true,
+ }
+}
+
+impl<I: Iterator> Combinations<I> {
+ /// Returns the length of a combination produced by this iterator.
+ #[inline]
+ pub fn k(&self) -> usize { self.indices.len() }
+
+ /// Returns the (current) length of the pool from which combination
elements are
+ /// selected. This value can change between invocations of
[`next`](Combinations::next).
+ #[inline]
+ pub fn n(&self) -> usize { self.pool.len() }
+
+ /// Returns a reference to the source iterator.
+ #[inline]
+ pub(crate) fn src(&self) -> &I { &self.pool.it }
+
+ /// Resets this `Combinations` back to an initial state for combinations
of length
+ /// `k` over the same pool data source. If `k` is larger than the current
length
+ /// of the data pool an attempt is made to prefill the pool so that it
holds `k`
+ /// elements.
+ pub(crate) fn reset(&mut self, k: usize) {
+ self.first = true;
+
+ if k < self.indices.len() {
+ self.indices.truncate(k);
+ for i in 0..k {
+ self.indices[i] = i;
+ }
+
+ } else {
+ for i in 0..self.indices.len() {
+ self.indices[i] = i;
+ }
+ self.indices.extend(self.indices.len()..k);
+ self.pool.prefill(k);
+ }
+ }
+}
+
+impl<I> Iterator for Combinations<I>
+ where I: Iterator,
+ I::Item: Clone
+{
+ type Item = Vec<I::Item>;
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.first {
+ if self.k() > self.n() {
+ return None;
+ }
+ self.first = false;
+ } else if self.indices.is_empty() {
+ return None;
+ } else {
+ // Scan from the end, looking for an index to increment
+ let mut i: usize = self.indices.len() - 1;
+
+ // Check if we need to consume more from the iterator
+ if self.indices[i] == self.pool.len() - 1 {
+ self.pool.get_next(); // may change pool size
+ }
+
+ while self.indices[i] == i + self.pool.len() - self.indices.len() {
+ if i > 0 {
+ i -= 1;
+ } else {
+ // Reached the last combination
+ return None;
+ }
+ }
+
+ // Increment index, and reset the ones to its right
+ self.indices[i] += 1;
+ for j in i+1..self.indices.len() {
+ self.indices[j] = self.indices[j - 1] + 1;
+ }
+ }
+
+ // Create result vector based on the indices
+ Some(self.indices.iter().map(|i| self.pool[*i].clone()).collect())
+ }
+}
+
+impl<I> FusedIterator for Combinations<I>
+ where I: Iterator,
+ I::Item: Clone
+{}
diff --git
a/rust/hw/char/pl011/vendor/itertools/src/combinations_with_replacement.rs
b/rust/hw/char/pl011/vendor/itertools/src/combinations_with_replacement.rs
new file mode 100644
index 0000000000..0fec9671ac
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/combinations_with_replacement.rs
@@ -0,0 +1,109 @@
+use alloc::vec::Vec;
+use std::fmt;
+use std::iter::FusedIterator;
+
+use super::lazy_buffer::LazyBuffer;
+
+/// An iterator to iterate through all the `n`-length combinations in an
iterator, with replacement.
+///
+/// See
[`.combinations_with_replacement()`](crate::Itertools::combinations_with_replacement)
+/// for more information.
+#[derive(Clone)]
+pub struct CombinationsWithReplacement<I>
+where
+ I: Iterator,
+ I::Item: Clone,
+{
+ indices: Vec<usize>,
+ pool: LazyBuffer<I>,
+ first: bool,
+}
+
+impl<I> fmt::Debug for CombinationsWithReplacement<I>
+where
+ I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug + Clone,
+{
+ debug_fmt_fields!(Combinations, indices, pool, first);
+}
+
+impl<I> CombinationsWithReplacement<I>
+where
+ I: Iterator,
+ I::Item: Clone,
+{
+ /// Map the current mask over the pool to get an output combination
+ fn current(&self) -> Vec<I::Item> {
+ self.indices.iter().map(|i| self.pool[*i].clone()).collect()
+ }
+}
+
+/// Create a new `CombinationsWithReplacement` from a clonable iterator.
+pub fn combinations_with_replacement<I>(iter: I, k: usize) ->
CombinationsWithReplacement<I>
+where
+ I: Iterator,
+ I::Item: Clone,
+{
+ let indices: Vec<usize> = alloc::vec![0; k];
+ let pool: LazyBuffer<I> = LazyBuffer::new(iter);
+
+ CombinationsWithReplacement {
+ indices,
+ pool,
+ first: true,
+ }
+}
+
+impl<I> Iterator for CombinationsWithReplacement<I>
+where
+ I: Iterator,
+ I::Item: Clone,
+{
+ type Item = Vec<I::Item>;
+ fn next(&mut self) -> Option<Self::Item> {
+ // If this is the first iteration, return early
+ if self.first {
+ // In empty edge cases, stop iterating immediately
+ return if !(self.indices.is_empty() || self.pool.get_next()) {
+ None
+ // Otherwise, yield the initial state
+ } else {
+ self.first = false;
+ Some(self.current())
+ };
+ }
+
+ // Check if we need to consume more from the iterator
+ // This will run while we increment our first index digit
+ self.pool.get_next();
+
+ // Work out where we need to update our indices
+ let mut increment: Option<(usize, usize)> = None;
+ for (i, indices_int) in self.indices.iter().enumerate().rev() {
+ if *indices_int < self.pool.len()-1 {
+ increment = Some((i, indices_int + 1));
+ break;
+ }
+ }
+
+ match increment {
+ // If we can update the indices further
+ Some((increment_from, increment_value)) => {
+ // We need to update the rightmost non-max value
+ // and all those to the right
+ for indices_index in increment_from..self.indices.len() {
+ self.indices[indices_index] = increment_value;
+ }
+ Some(self.current())
+ }
+ // Otherwise, we're done
+ None => None,
+ }
+ }
+}
+
+impl<I> FusedIterator for CombinationsWithReplacement<I>
+where
+ I: Iterator,
+ I::Item: Clone,
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/concat_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/concat_impl.rs
new file mode 100644
index 0000000000..f022ec90af
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/concat_impl.rs
@@ -0,0 +1,23 @@
+use crate::Itertools;
+
+/// Combine all an iterator's elements into one element by using [`Extend`].
+///
+/// [`IntoIterator`]-enabled version of [`Itertools::concat`].
+///
+/// This combinator will extend the first item with each of the rest of the
+/// items of the iterator. If the iterator is empty, the default value of
+/// `I::Item` is returned.
+///
+/// ```rust
+/// use itertools::concat;
+///
+/// let input = vec![vec![1], vec![2, 3], vec![4, 5, 6]];
+/// assert_eq!(concat(input), vec![1, 2, 3, 4, 5, 6]);
+/// ```
+pub fn concat<I>(iterable: I) -> I::Item
+ where I: IntoIterator,
+ I::Item: Extend<<<I as IntoIterator>::Item as IntoIterator>::Item> +
IntoIterator + Default
+{
+ #[allow(deprecated)] //TODO: once msrv hits 1.51. replace `fold1` with
`reduce`
+ iterable.into_iter().fold1(|mut a, b| { a.extend(b); a
}).unwrap_or_default()
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/cons_tuples_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/cons_tuples_impl.rs
new file mode 100644
index 0000000000..ae0f48f349
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/cons_tuples_impl.rs
@@ -0,0 +1,64 @@
+
+macro_rules! impl_cons_iter(
+ ($_A:ident, $_B:ident, ) => (); // stop
+
+ ($A:ident, $($B:ident,)*) => (
+ impl_cons_iter!($($B,)*);
+ #[allow(non_snake_case)]
+ impl<X, Iter, $($B),*> Iterator for ConsTuples<Iter, (($($B,)*), X)>
+ where Iter: Iterator<Item = (($($B,)*), X)>,
+ {
+ type Item = ($($B,)* X, );
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next().map(|(($($B,)*), x)| ($($B,)* x, ))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+ fn fold<Acc, Fold>(self, accum: Acc, mut f: Fold) -> Acc
+ where Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ self.iter.fold(accum, move |acc, (($($B,)*), x)| f(acc,
($($B,)* x, )))
+ }
+ }
+
+ #[allow(non_snake_case)]
+ impl<X, Iter, $($B),*> DoubleEndedIterator for ConsTuples<Iter,
(($($B,)*), X)>
+ where Iter: DoubleEndedIterator<Item = (($($B,)*), X)>,
+ {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.iter.next().map(|(($($B,)*), x)| ($($B,)* x, ))
+ }
+ }
+
+ );
+);
+
+impl_cons_iter!(A, B, C, D, E, F, G, H, I, J, K, L,);
+
+/// An iterator that maps an iterator of tuples like
+/// `((A, B), C)` to an iterator of `(A, B, C)`.
+///
+/// Used by the `iproduct!()` macro.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Debug)]
+pub struct ConsTuples<I, J>
+ where I: Iterator<Item=J>,
+{
+ iter: I,
+}
+
+impl<I, J> Clone for ConsTuples<I, J>
+ where I: Clone + Iterator<Item=J>,
+{
+ clone_fields!(iter);
+}
+
+/// Create an iterator that maps for example iterators of
+/// `((A, B), C)` to `(A, B, C)`.
+pub fn cons_tuples<I, J>(iterable: I) -> ConsTuples<I::IntoIter, J>
+ where I: IntoIterator<Item=J>
+{
+ ConsTuples { iter: iterable.into_iter() }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/diff.rs
b/rust/hw/char/pl011/vendor/itertools/src/diff.rs
new file mode 100644
index 0000000000..1731f0639f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/diff.rs
@@ -0,0 +1,61 @@
+//! "Diff"ing iterators for caching elements to sequential collections without
requiring the new
+//! elements' iterator to be `Clone`.
+//!
+//! - [`Diff`] (produced by the [`diff_with`] function)
+//! describes the difference between two non-`Clone` iterators `I` and `J`
after breaking ASAP from
+//! a lock-step comparison.
+
+use crate::free::put_back;
+use crate::structs::PutBack;
+
+/// A type returned by the [`diff_with`] function.
+///
+/// `Diff` represents the way in which the elements yielded by the iterator
`I` differ to some
+/// iterator `J`.
+pub enum Diff<I, J>
+ where I: Iterator,
+ J: Iterator
+{
+ /// The index of the first non-matching element along with both iterator's
remaining elements
+ /// starting with the first mis-match.
+ FirstMismatch(usize, PutBack<I>, PutBack<J>),
+ /// The total number of elements that were in `J` along with the remaining
elements of `I`.
+ Shorter(usize, PutBack<I>),
+ /// The total number of elements that were in `I` along with the remaining
elements of `J`.
+ Longer(usize, PutBack<J>),
+}
+
+/// Compares every element yielded by both `i` and `j` with the given function
in lock-step and
+/// returns a [`Diff`] which describes how `j` differs from `i`.
+///
+/// If the number of elements yielded by `j` is less than the number of
elements yielded by `i`,
+/// the number of `j` elements yielded will be returned along with `i`'s
remaining elements as
+/// `Diff::Shorter`.
+///
+/// If the two elements of a step differ, the index of those elements along
with the remaining
+/// elements of both `i` and `j` are returned as `Diff::FirstMismatch`.
+///
+/// If `i` becomes exhausted before `j` becomes exhausted, the number of
elements in `i` along with
+/// the remaining `j` elements will be returned as `Diff::Longer`.
+pub fn diff_with<I, J, F>(i: I, j: J, is_equal: F)
+ -> Option<Diff<I::IntoIter, J::IntoIter>>
+ where I: IntoIterator,
+ J: IntoIterator,
+ F: Fn(&I::Item, &J::Item) -> bool
+{
+ let mut i = i.into_iter();
+ let mut j = j.into_iter();
+ let mut idx = 0;
+ while let Some(i_elem) = i.next() {
+ match j.next() {
+ None => return Some(Diff::Shorter(idx,
put_back(i).with_value(i_elem))),
+ Some(j_elem) => if !is_equal(&i_elem, &j_elem) {
+ let remaining_i = put_back(i).with_value(i_elem);
+ let remaining_j = put_back(j).with_value(j_elem);
+ return Some(Diff::FirstMismatch(idx, remaining_i,
remaining_j));
+ },
+ }
+ idx += 1;
+ }
+ j.next().map(|j_elem| Diff::Longer(idx, put_back(j).with_value(j_elem)))
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/duplicates_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/duplicates_impl.rs
new file mode 100644
index 0000000000..28eda44a97
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/duplicates_impl.rs
@@ -0,0 +1,216 @@
+use std::hash::Hash;
+
+mod private {
+ use std::collections::HashMap;
+ use std::hash::Hash;
+ use std::fmt;
+
+ #[derive(Clone)]
+ #[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+ pub struct DuplicatesBy<I: Iterator, Key, F> {
+ pub(crate) iter: I,
+ pub(crate) meta: Meta<Key, F>,
+ }
+
+ impl<I, V, F> fmt::Debug for DuplicatesBy<I, V, F>
+ where
+ I: Iterator + fmt::Debug,
+ V: fmt::Debug + Hash + Eq,
+ {
+ debug_fmt_fields!(DuplicatesBy, iter, meta.used);
+ }
+
+ impl<I: Iterator, Key: Eq + Hash, F> DuplicatesBy<I, Key, F> {
+ pub(crate) fn new(iter: I, key_method: F) -> Self {
+ DuplicatesBy {
+ iter,
+ meta: Meta {
+ used: HashMap::new(),
+ pending: 0,
+ key_method,
+ },
+ }
+ }
+ }
+
+ #[derive(Clone)]
+ pub struct Meta<Key, F> {
+ used: HashMap<Key, bool>,
+ pending: usize,
+ key_method: F,
+ }
+
+ impl<Key, F> Meta<Key, F>
+ where
+ Key: Eq + Hash,
+ {
+ /// Takes an item and returns it back to the caller if it's the second
time we see it.
+ /// Otherwise the item is consumed and None is returned
+ #[inline(always)]
+ fn filter<I>(&mut self, item: I) -> Option<I>
+ where
+ F: KeyMethod<Key, I>,
+ {
+ let kv = self.key_method.make(item);
+ match self.used.get_mut(kv.key_ref()) {
+ None => {
+ self.used.insert(kv.key(), false);
+ self.pending += 1;
+ None
+ }
+ Some(true) => None,
+ Some(produced) => {
+ *produced = true;
+ self.pending -= 1;
+ Some(kv.value())
+ }
+ }
+ }
+ }
+
+ impl<I, Key, F> Iterator for DuplicatesBy<I, Key, F>
+ where
+ I: Iterator,
+ Key: Eq + Hash,
+ F: KeyMethod<Key, I::Item>,
+ {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let DuplicatesBy { iter, meta } = self;
+ iter.find_map(|v| meta.filter(v))
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (_, hi) = self.iter.size_hint();
+ let hi = hi.map(|hi| {
+ if hi <= self.meta.pending {
+ // fewer or equally many iter-remaining elements than
pending elements
+ // => at most, each iter-remaining element is matched
+ hi
+ } else {
+ // fewer pending elements than iter-remaining elements
+ // => at most:
+ // * each pending element is matched
+ // * the other iter-remaining elements come in pairs
+ self.meta.pending + (hi - self.meta.pending) / 2
+ }
+ });
+ // The lower bound is always 0 since we might only get unique
items from now on
+ (0, hi)
+ }
+ }
+
+ impl<I, Key, F> DoubleEndedIterator for DuplicatesBy<I, Key, F>
+ where
+ I: DoubleEndedIterator,
+ Key: Eq + Hash,
+ F: KeyMethod<Key, I::Item>,
+ {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ let DuplicatesBy { iter, meta } = self;
+ iter.rev().find_map(|v| meta.filter(v))
+ }
+ }
+
+ /// A keying method for use with `DuplicatesBy`
+ pub trait KeyMethod<K, V> {
+ type Container: KeyXorValue<K, V>;
+
+ fn make(&mut self, value: V) -> Self::Container;
+ }
+
+ /// Apply the identity function to elements before checking them for
equality.
+ #[derive(Debug)]
+ pub struct ById;
+ impl<V> KeyMethod<V, V> for ById {
+ type Container = JustValue<V>;
+
+ fn make(&mut self, v: V) -> Self::Container {
+ JustValue(v)
+ }
+ }
+
+ /// Apply a user-supplied function to elements before checking them for
equality.
+ pub struct ByFn<F>(pub(crate) F);
+ impl<F> fmt::Debug for ByFn<F> {
+ debug_fmt_fields!(ByFn,);
+ }
+ impl<K, V, F> KeyMethod<K, V> for ByFn<F>
+ where
+ F: FnMut(&V) -> K,
+ {
+ type Container = KeyValue<K, V>;
+
+ fn make(&mut self, v: V) -> Self::Container {
+ KeyValue((self.0)(&v), v)
+ }
+ }
+
+ // Implementors of this trait can hold onto a key and a value but only
give access to one of them
+ // at a time. This allows the key and the value to be the same value
internally
+ pub trait KeyXorValue<K, V> {
+ fn key_ref(&self) -> &K;
+ fn key(self) -> K;
+ fn value(self) -> V;
+ }
+
+ #[derive(Debug)]
+ pub struct KeyValue<K, V>(K, V);
+ impl<K, V> KeyXorValue<K, V> for KeyValue<K, V> {
+ fn key_ref(&self) -> &K {
+ &self.0
+ }
+ fn key(self) -> K {
+ self.0
+ }
+ fn value(self) -> V {
+ self.1
+ }
+ }
+
+ #[derive(Debug)]
+ pub struct JustValue<V>(V);
+ impl<V> KeyXorValue<V, V> for JustValue<V> {
+ fn key_ref(&self) -> &V {
+ &self.0
+ }
+ fn key(self) -> V {
+ self.0
+ }
+ fn value(self) -> V {
+ self.0
+ }
+ }
+}
+
+/// An iterator adapter to filter for duplicate elements.
+///
+/// See [`.duplicates_by()`](crate::Itertools::duplicates_by) for more
information.
+pub type DuplicatesBy<I, V, F> = private::DuplicatesBy<I, V, private::ByFn<F>>;
+
+/// Create a new `DuplicatesBy` iterator.
+pub fn duplicates_by<I, Key, F>(iter: I, f: F) -> DuplicatesBy<I, Key, F>
+where
+ Key: Eq + Hash,
+ F: FnMut(&I::Item) -> Key,
+ I: Iterator,
+{
+ DuplicatesBy::new(iter, private::ByFn(f))
+}
+
+/// An iterator adapter to filter out duplicate elements.
+///
+/// See [`.duplicates()`](crate::Itertools::duplicates) for more information.
+pub type Duplicates<I> = private::DuplicatesBy<I, <I as Iterator>::Item,
private::ById>;
+
+/// Create a new `Duplicates` iterator.
+pub fn duplicates<I>(iter: I) -> Duplicates<I>
+where
+ I: Iterator,
+ I::Item: Eq + Hash,
+{
+ Duplicates::new(iter, private::ById)
+}
+
diff --git a/rust/hw/char/pl011/vendor/itertools/src/either_or_both.rs
b/rust/hw/char/pl011/vendor/itertools/src/either_or_both.rs
new file mode 100644
index 0000000000..cf65fe7885
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/either_or_both.rs
@@ -0,0 +1,495 @@
+use core::ops::{Deref, DerefMut};
+
+use crate::EitherOrBoth::*;
+
+use either::Either;
+
+/// Value that either holds a single A or B, or both.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum EitherOrBoth<A, B> {
+ /// Both values are present.
+ Both(A, B),
+ /// Only the left value of type `A` is present.
+ Left(A),
+ /// Only the right value of type `B` is present.
+ Right(B),
+}
+
+impl<A, B> EitherOrBoth<A, B> {
+ /// If `Left`, or `Both`, return true. Otherwise, return false.
+ pub fn has_left(&self) -> bool {
+ self.as_ref().left().is_some()
+ }
+
+ /// If `Right`, or `Both`, return true, otherwise, return false.
+ pub fn has_right(&self) -> bool {
+ self.as_ref().right().is_some()
+ }
+
+ /// If `Left`, return true. Otherwise, return false.
+ /// Exclusive version of [`has_left`](EitherOrBoth::has_left).
+ pub fn is_left(&self) -> bool {
+ match *self {
+ Left(_) => true,
+ _ => false,
+ }
+ }
+
+ /// If `Right`, return true. Otherwise, return false.
+ /// Exclusive version of [`has_right`](EitherOrBoth::has_right).
+ pub fn is_right(&self) -> bool {
+ match *self {
+ Right(_) => true,
+ _ => false,
+ }
+ }
+
+ /// If `Both`, return true. Otherwise, return false.
+ pub fn is_both(&self) -> bool {
+ self.as_ref().both().is_some()
+ }
+
+ /// If `Left`, or `Both`, return `Some` with the left value. Otherwise,
return `None`.
+ pub fn left(self) -> Option<A> {
+ match self {
+ Left(left) | Both(left, _) => Some(left),
+ _ => None,
+ }
+ }
+
+ /// If `Right`, or `Both`, return `Some` with the right value. Otherwise,
return `None`.
+ pub fn right(self) -> Option<B> {
+ match self {
+ Right(right) | Both(_, right) => Some(right),
+ _ => None,
+ }
+ }
+
+ /// If `Left`, return `Some` with the left value. If `Right` or `Both`,
return `None`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// // On the `Left` variant.
+ /// # use itertools::{EitherOrBoth, EitherOrBoth::{Left, Right, Both}};
+ /// let x: EitherOrBoth<_, ()> = Left("bonjour");
+ /// assert_eq!(x.just_left(), Some("bonjour"));
+ ///
+ /// // On the `Right` variant.
+ /// let x: EitherOrBoth<(), _> = Right("hola");
+ /// assert_eq!(x.just_left(), None);
+ ///
+ /// // On the `Both` variant.
+ /// let x = Both("bonjour", "hola");
+ /// assert_eq!(x.just_left(), None);
+ /// ```
+ pub fn just_left(self) -> Option<A> {
+ match self {
+ Left(left) => Some(left),
+ _ => None,
+ }
+ }
+
+ /// If `Right`, return `Some` with the right value. If `Left` or `Both`,
return `None`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// // On the `Left` variant.
+ /// # use itertools::{EitherOrBoth::{Left, Right, Both}, EitherOrBoth};
+ /// let x: EitherOrBoth<_, ()> = Left("auf wiedersehen");
+ /// assert_eq!(x.just_left(), Some("auf wiedersehen"));
+ ///
+ /// // On the `Right` variant.
+ /// let x: EitherOrBoth<(), _> = Right("adios");
+ /// assert_eq!(x.just_left(), None);
+ ///
+ /// // On the `Both` variant.
+ /// let x = Both("auf wiedersehen", "adios");
+ /// assert_eq!(x.just_left(), None);
+ /// ```
+ pub fn just_right(self) -> Option<B> {
+ match self {
+ Right(right) => Some(right),
+ _ => None,
+ }
+ }
+
+ /// If `Both`, return `Some` containing the left and right values.
Otherwise, return `None`.
+ pub fn both(self) -> Option<(A, B)> {
+ match self {
+ Both(a, b) => Some((a, b)),
+ _ => None,
+ }
+ }
+
+ /// If `Left` or `Both`, return the left value. Otherwise, convert the
right value and return it.
+ pub fn into_left(self) -> A
+ where
+ B: Into<A>,
+ {
+ match self {
+ Left(a) | Both(a, _) => a,
+ Right(b) => b.into(),
+ }
+ }
+
+ /// If `Right` or `Both`, return the right value. Otherwise, convert the
left value and return it.
+ pub fn into_right(self) -> B
+ where
+ A: Into<B>,
+ {
+ match self {
+ Right(b) | Both(_, b) => b,
+ Left(a) => a.into(),
+ }
+ }
+
+ /// Converts from `&EitherOrBoth<A, B>` to `EitherOrBoth<&A, &B>`.
+ pub fn as_ref(&self) -> EitherOrBoth<&A, &B> {
+ match *self {
+ Left(ref left) => Left(left),
+ Right(ref right) => Right(right),
+ Both(ref left, ref right) => Both(left, right),
+ }
+ }
+
+ /// Converts from `&mut EitherOrBoth<A, B>` to `EitherOrBoth<&mut A, &mut
B>`.
+ pub fn as_mut(&mut self) -> EitherOrBoth<&mut A, &mut B> {
+ match *self {
+ Left(ref mut left) => Left(left),
+ Right(ref mut right) => Right(right),
+ Both(ref mut left, ref mut right) => Both(left, right),
+ }
+ }
+
+ /// Converts from `&EitherOrBoth<A, B>` to `EitherOrBoth<&_, &_>` using
the [`Deref`] trait.
+ pub fn as_deref(&self) -> EitherOrBoth<&A::Target, &B::Target>
+ where
+ A: Deref,
+ B: Deref,
+ {
+ match *self {
+ Left(ref left) => Left(left),
+ Right(ref right) => Right(right),
+ Both(ref left, ref right) => Both(left, right),
+ }
+ }
+
+ /// Converts from `&mut EitherOrBoth<A, B>` to `EitherOrBoth<&mut _, &mut
_>` using the [`DerefMut`] trait.
+ pub fn as_deref_mut(&mut self) -> EitherOrBoth<&mut A::Target, &mut
B::Target>
+ where
+ A: DerefMut,
+ B: DerefMut,
+ {
+ match *self {
+ Left(ref mut left) => Left(left),
+ Right(ref mut right) => Right(right),
+ Both(ref mut left, ref mut right) => Both(left, right),
+ }
+ }
+
+ /// Convert `EitherOrBoth<A, B>` to `EitherOrBoth<B, A>`.
+ pub fn flip(self) -> EitherOrBoth<B, A> {
+ match self {
+ Left(a) => Right(a),
+ Right(b) => Left(b),
+ Both(a, b) => Both(b, a),
+ }
+ }
+
+ /// Apply the function `f` on the value `a` in `Left(a)` or `Both(a, b)`
variants. If it is
+ /// present rewrapping the result in `self`'s original variant.
+ pub fn map_left<F, M>(self, f: F) -> EitherOrBoth<M, B>
+ where
+ F: FnOnce(A) -> M,
+ {
+ match self {
+ Both(a, b) => Both(f(a), b),
+ Left(a) => Left(f(a)),
+ Right(b) => Right(b),
+ }
+ }
+
+ /// Apply the function `f` on the value `b` in `Right(b)` or `Both(a, b)`
variants.
+ /// If it is present rewrapping the result in `self`'s original variant.
+ pub fn map_right<F, M>(self, f: F) -> EitherOrBoth<A, M>
+ where
+ F: FnOnce(B) -> M,
+ {
+ match self {
+ Left(a) => Left(a),
+ Right(b) => Right(f(b)),
+ Both(a, b) => Both(a, f(b)),
+ }
+ }
+
+ /// Apply the functions `f` and `g` on the value `a` and `b` respectively;
+ /// found in `Left(a)`, `Right(b)`, or `Both(a, b)` variants.
+ /// The Result is rewrapped `self`'s original variant.
+ pub fn map_any<F, L, G, R>(self, f: F, g: G) -> EitherOrBoth<L, R>
+ where
+ F: FnOnce(A) -> L,
+ G: FnOnce(B) -> R,
+ {
+ match self {
+ Left(a) => Left(f(a)),
+ Right(b) => Right(g(b)),
+ Both(a, b) => Both(f(a), g(b)),
+ }
+ }
+
+ /// Apply the function `f` on the value `a` in `Left(a)` or `Both(a, _)`
variants if it is
+ /// present.
+ pub fn left_and_then<F, L>(self, f: F) -> EitherOrBoth<L, B>
+ where
+ F: FnOnce(A) -> EitherOrBoth<L, B>,
+ {
+ match self {
+ Left(a) | Both(a, _) => f(a),
+ Right(b) => Right(b),
+ }
+ }
+
+ /// Apply the function `f` on the value `b`
+ /// in `Right(b)` or `Both(_, b)` variants if it is present.
+ pub fn right_and_then<F, R>(self, f: F) -> EitherOrBoth<A, R>
+ where
+ F: FnOnce(B) -> EitherOrBoth<A, R>,
+ {
+ match self {
+ Left(a) => Left(a),
+ Right(b) | Both(_, b) => f(b),
+ }
+ }
+
+ /// Returns a tuple consisting of the `l` and `r` in `Both(l, r)`, if
present.
+ /// Otherwise, returns the wrapped value for the present element, and the
supplied
+ /// value for the other. The first (`l`) argument is used for a missing
`Left`
+ /// value. The second (`r`) argument is used for a missing `Right` value.
+ ///
+ /// Arguments passed to `or` are eagerly evaluated; if you are passing
+ /// the result of a function call, it is recommended to use [`or_else`],
+ /// which is lazily evaluated.
+ ///
+ /// [`or_else`]: EitherOrBoth::or_else
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use itertools::EitherOrBoth;
+ /// assert_eq!(EitherOrBoth::Both("tree", 1).or("stone", 5), ("tree", 1));
+ /// assert_eq!(EitherOrBoth::Left("tree").or("stone", 5), ("tree", 5));
+ /// assert_eq!(EitherOrBoth::Right(1).or("stone", 5), ("stone", 1));
+ /// ```
+ pub fn or(self, l: A, r: B) -> (A, B) {
+ match self {
+ Left(inner_l) => (inner_l, r),
+ Right(inner_r) => (l, inner_r),
+ Both(inner_l, inner_r) => (inner_l, inner_r),
+ }
+ }
+
+ /// Returns a tuple consisting of the `l` and `r` in `Both(l, r)`, if
present.
+ /// Otherwise, returns the wrapped value for the present element, and the
[`default`](Default::default)
+ /// for the other.
+ pub fn or_default(self) -> (A, B)
+ where
+ A: Default,
+ B: Default,
+ {
+ match self {
+ EitherOrBoth::Left(l) => (l, B::default()),
+ EitherOrBoth::Right(r) => (A::default(), r),
+ EitherOrBoth::Both(l, r) => (l, r),
+ }
+ }
+
+ /// Returns a tuple consisting of the `l` and `r` in `Both(l, r)`, if
present.
+ /// Otherwise, returns the wrapped value for the present element, and
computes the
+ /// missing value with the supplied closure. The first argument (`l`) is
used for a
+ /// missing `Left` value. The second argument (`r`) is used for a missing
`Right` value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use itertools::EitherOrBoth;
+ /// let k = 10;
+ /// assert_eq!(EitherOrBoth::Both("tree", 1).or_else(|| "stone", || 2 *
k), ("tree", 1));
+ /// assert_eq!(EitherOrBoth::Left("tree").or_else(|| "stone", || 2 * k),
("tree", 20));
+ /// assert_eq!(EitherOrBoth::Right(1).or_else(|| "stone", || 2 * k),
("stone", 1));
+ /// ```
+ pub fn or_else<L: FnOnce() -> A, R: FnOnce() -> B>(self, l: L, r: R) ->
(A, B) {
+ match self {
+ Left(inner_l) => (inner_l, r()),
+ Right(inner_r) => (l(), inner_r),
+ Both(inner_l, inner_r) => (inner_l, inner_r),
+ }
+ }
+
+ /// Returns a mutable reference to the left value. If the left value is
not present,
+ /// it is replaced with `val`.
+ pub fn left_or_insert(&mut self, val: A) -> &mut A {
+ self.left_or_insert_with(|| val)
+ }
+
+ /// Returns a mutable reference to the right value. If the right value is
not present,
+ /// it is replaced with `val`.
+ pub fn right_or_insert(&mut self, val: B) -> &mut B {
+ self.right_or_insert_with(|| val)
+ }
+
+ /// If the left value is not present, replace it the value computed by the
closure `f`.
+ /// Returns a mutable reference to the now-present left value.
+ pub fn left_or_insert_with<F>(&mut self, f: F) -> &mut A
+ where
+ F: FnOnce() -> A,
+ {
+ match self {
+ Left(left) | Both(left, _) => left,
+ Right(_) => self.insert_left(f()),
+ }
+ }
+
+ /// If the right value is not present, replace it the value computed by
the closure `f`.
+ /// Returns a mutable reference to the now-present right value.
+ pub fn right_or_insert_with<F>(&mut self, f: F) -> &mut B
+ where
+ F: FnOnce() -> B,
+ {
+ match self {
+ Right(right) | Both(_, right) => right,
+ Left(_) => self.insert_right(f()),
+ }
+ }
+
+ /// Sets the `left` value of this instance, and returns a mutable
reference to it.
+ /// Does not affect the `right` value.
+ ///
+ /// # Examples
+ /// ```
+ /// # use itertools::{EitherOrBoth, EitherOrBoth::{Left, Right, Both}};
+ ///
+ /// // Overwriting a pre-existing value.
+ /// let mut either: EitherOrBoth<_, ()> = Left(0_u32);
+ /// assert_eq!(*either.insert_left(69), 69);
+ ///
+ /// // Inserting a second value.
+ /// let mut either = Right("no");
+ /// assert_eq!(*either.insert_left("yes"), "yes");
+ /// assert_eq!(either, Both("yes", "no"));
+ /// ```
+ pub fn insert_left(&mut self, val: A) -> &mut A {
+ match self {
+ Left(left) | Both(left, _) => {
+ *left = val;
+ left
+ }
+ Right(right) => {
+ // This is like a map in place operation. We move out of the
reference,
+ // change the value, and then move back into the reference.
+ unsafe {
+ // SAFETY: We know this pointer is valid for reading since
we got it from a reference.
+ let right = std::ptr::read(right as *mut _);
+ // SAFETY: Again, we know the pointer is valid since we
got it from a reference.
+ std::ptr::write(self as *mut _, Both(val, right));
+ }
+
+ if let Both(left, _) = self {
+ left
+ } else {
+ // SAFETY: The above pattern will always match, since we
just
+ // set `self` equal to `Both`.
+ unsafe { std::hint::unreachable_unchecked() }
+ }
+ }
+ }
+ }
+
+ /// Sets the `right` value of this instance, and returns a mutable
reference to it.
+ /// Does not affect the `left` value.
+ ///
+ /// # Examples
+ /// ```
+ /// # use itertools::{EitherOrBoth, EitherOrBoth::{Left, Both}};
+ /// // Overwriting a pre-existing value.
+ /// let mut either: EitherOrBoth<_, ()> = Left(0_u32);
+ /// assert_eq!(*either.insert_left(69), 69);
+ ///
+ /// // Inserting a second value.
+ /// let mut either = Left("what's");
+ /// assert_eq!(*either.insert_right(9 + 10), 21 - 2);
+ /// assert_eq!(either, Both("what's", 9+10));
+ /// ```
+ pub fn insert_right(&mut self, val: B) -> &mut B {
+ match self {
+ Right(right) | Both(_, right) => {
+ *right = val;
+ right
+ }
+ Left(left) => {
+ // This is like a map in place operation. We move out of the
reference,
+ // change the value, and then move back into the reference.
+ unsafe {
+ // SAFETY: We know this pointer is valid for reading since
we got it from a reference.
+ let left = std::ptr::read(left as *mut _);
+ // SAFETY: Again, we know the pointer is valid since we
got it from a reference.
+ std::ptr::write(self as *mut _, Both(left, val));
+ }
+ if let Both(_, right) = self {
+ right
+ } else {
+ // SAFETY: The above pattern will always match, since we
just
+ // set `self` equal to `Both`.
+ unsafe { std::hint::unreachable_unchecked() }
+ }
+ }
+ }
+ }
+
+ /// Set `self` to `Both(..)`, containing the specified left and right
values,
+ /// and returns a mutable reference to those values.
+ pub fn insert_both(&mut self, left: A, right: B) -> (&mut A, &mut B) {
+ *self = Both(left, right);
+ if let Both(left, right) = self {
+ (left, right)
+ } else {
+ // SAFETY: The above pattern will always match, since we just
+ // set `self` equal to `Both`.
+ unsafe { std::hint::unreachable_unchecked() }
+ }
+ }
+}
+
+impl<T> EitherOrBoth<T, T> {
+ /// Return either value of left, right, or apply a function `f` to both
values if both are present.
+ /// The input function has to return the same type as both Right and Left
carry.
+ ///
+ /// # Examples
+ /// ```
+ /// # use itertools::EitherOrBoth;
+ /// assert_eq!(EitherOrBoth::Both(3, 7).reduce(u32::max), 7);
+ /// assert_eq!(EitherOrBoth::Left(3).reduce(u32::max), 3);
+ /// assert_eq!(EitherOrBoth::Right(7).reduce(u32::max), 7);
+ /// ```
+ pub fn reduce<F>(self, f: F) -> T
+ where
+ F: FnOnce(T, T) -> T,
+ {
+ match self {
+ Left(a) => a,
+ Right(b) => b,
+ Both(a, b) => f(a, b),
+ }
+ }
+}
+
+impl<A, B> Into<Option<Either<A, B>>> for EitherOrBoth<A, B> {
+ fn into(self) -> Option<Either<A, B>> {
+ match self {
+ EitherOrBoth::Left(l) => Some(Either::Left(l)),
+ EitherOrBoth::Right(r) => Some(Either::Right(r)),
+ _ => None,
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/exactly_one_err.rs
b/rust/hw/char/pl011/vendor/itertools/src/exactly_one_err.rs
new file mode 100644
index 0000000000..c54ae77ca9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/exactly_one_err.rs
@@ -0,0 +1,110 @@
+#[cfg(feature = "use_std")]
+use std::error::Error;
+use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
+
+use std::iter::ExactSizeIterator;
+
+use either::Either;
+
+use crate::size_hint;
+
+/// Iterator returned for the error case of `IterTools::exactly_one()`
+/// This iterator yields exactly the same elements as the input iterator.
+///
+/// During the execution of `exactly_one` the iterator must be mutated. This
wrapper
+/// effectively "restores" the state of the input iterator when it's handed
back.
+///
+/// This is very similar to `PutBackN` except this iterator only supports 0-2
elements and does not
+/// use a `Vec`.
+#[derive(Clone)]
+pub struct ExactlyOneError<I>
+where
+ I: Iterator,
+{
+ first_two: Option<Either<[I::Item; 2], I::Item>>,
+ inner: I,
+}
+
+impl<I> ExactlyOneError<I>
+where
+ I: Iterator,
+{
+ /// Creates a new `ExactlyOneErr` iterator.
+ pub(crate) fn new(first_two: Option<Either<[I::Item; 2], I::Item>>, inner:
I) -> Self {
+ Self { first_two, inner }
+ }
+
+ fn additional_len(&self) -> usize {
+ match self.first_two {
+ Some(Either::Left(_)) => 2,
+ Some(Either::Right(_)) => 1,
+ None => 0,
+ }
+ }
+}
+
+impl<I> Iterator for ExactlyOneError<I>
+where
+ I: Iterator,
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.first_two.take() {
+ Some(Either::Left([first, second])) => {
+ self.first_two = Some(Either::Right(second));
+ Some(first)
+ },
+ Some(Either::Right(second)) => {
+ Some(second)
+ }
+ None => {
+ self.inner.next()
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add_scalar(self.inner.size_hint(), self.additional_len())
+ }
+}
+
+
+impl<I> ExactSizeIterator for ExactlyOneError<I> where I: ExactSizeIterator {}
+
+impl<I> Display for ExactlyOneError<I>
+ where I: Iterator,
+{
+ fn fmt(&self, f: &mut Formatter) -> FmtResult {
+ let additional = self.additional_len();
+ if additional > 0 {
+ write!(f, "got at least 2 elements when exactly one was expected")
+ } else {
+ write!(f, "got zero elements when exactly one was expected")
+ }
+ }
+}
+
+impl<I> Debug for ExactlyOneError<I>
+ where I: Iterator + Debug,
+ I::Item: Debug,
+{
+ fn fmt(&self, f: &mut Formatter) -> FmtResult {
+ match &self.first_two {
+ Some(Either::Left([first, second])) => {
+ write!(f, "ExactlyOneError[First: {:?}, Second: {:?},
RemainingIter: {:?}]", first, second, self.inner)
+ },
+ Some(Either::Right(second)) => {
+ write!(f, "ExactlyOneError[Second: {:?}, RemainingIter:
{:?}]", second, self.inner)
+ }
+ None => {
+ write!(f, "ExactlyOneError[RemainingIter: {:?}]", self.inner)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "use_std")]
+impl<I> Error for ExactlyOneError<I> where I: Iterator + Debug, I::Item:
Debug, {}
+
+
diff --git a/rust/hw/char/pl011/vendor/itertools/src/extrema_set.rs
b/rust/hw/char/pl011/vendor/itertools/src/extrema_set.rs
new file mode 100644
index 0000000000..ae128364c5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/extrema_set.rs
@@ -0,0 +1,48 @@
+use std::cmp::Ordering;
+
+/// Implementation guts for `min_set`, `min_set_by`, and `min_set_by_key`.
+pub fn min_set_impl<I, K, F, Compare>(
+ mut it: I,
+ mut key_for: F,
+ mut compare: Compare,
+) -> Vec<I::Item>
+where
+ I: Iterator,
+ F: FnMut(&I::Item) -> K,
+ Compare: FnMut(&I::Item, &I::Item, &K, &K) -> Ordering,
+{
+ match it.next() {
+ None => Vec::new(),
+ Some(element) => {
+ let mut current_key = key_for(&element);
+ let mut result = vec![element];
+ it.for_each(|element| {
+ let key = key_for(&element);
+ match compare(&element, &result[0], &key, ¤t_key) {
+ Ordering::Less => {
+ result.clear();
+ result.push(element);
+ current_key = key;
+ }
+ Ordering::Equal => {
+ result.push(element);
+ }
+ Ordering::Greater => {}
+ }
+ });
+ result
+ }
+ }
+}
+
+/// Implementation guts for `ax_set`, `max_set_by`, and `max_set_by_key`.
+pub fn max_set_impl<I, K, F, Compare>(it: I, key_for: F, mut compare: Compare)
-> Vec<I::Item>
+where
+ I: Iterator,
+ F: FnMut(&I::Item) -> K,
+ Compare: FnMut(&I::Item, &I::Item, &K, &K) -> Ordering,
+{
+ min_set_impl(it, key_for, |it1, it2, key1, key2| {
+ compare(it2, it1, key2, key1)
+ })
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/flatten_ok.rs
b/rust/hw/char/pl011/vendor/itertools/src/flatten_ok.rs
new file mode 100644
index 0000000000..21ae1f7223
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/flatten_ok.rs
@@ -0,0 +1,165 @@
+use crate::size_hint;
+use std::{
+ fmt,
+ iter::{DoubleEndedIterator, FusedIterator},
+};
+
+pub fn flatten_ok<I, T, E>(iter: I) -> FlattenOk<I, T, E>
+where
+ I: Iterator<Item = Result<T, E>>,
+ T: IntoIterator,
+{
+ FlattenOk {
+ iter,
+ inner_front: None,
+ inner_back: None,
+ }
+}
+
+/// An iterator adaptor that flattens `Result::Ok` values and
+/// allows `Result::Err` values through unchanged.
+///
+/// See [`.flatten_ok()`](crate::Itertools::flatten_ok) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct FlattenOk<I, T, E>
+where
+ I: Iterator<Item = Result<T, E>>,
+ T: IntoIterator,
+{
+ iter: I,
+ inner_front: Option<T::IntoIter>,
+ inner_back: Option<T::IntoIter>,
+}
+
+impl<I, T, E> Iterator for FlattenOk<I, T, E>
+where
+ I: Iterator<Item = Result<T, E>>,
+ T: IntoIterator,
+{
+ type Item = Result<T::Item, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ // Handle the front inner iterator.
+ if let Some(inner) = &mut self.inner_front {
+ if let Some(item) = inner.next() {
+ return Some(Ok(item));
+ }
+
+ // This is necessary for the iterator to implement
`FusedIterator`
+ // with only the original iterator being fused.
+ self.inner_front = None;
+ }
+
+ match self.iter.next() {
+ Some(Ok(ok)) => self.inner_front = Some(ok.into_iter()),
+ Some(Err(e)) => return Some(Err(e)),
+ None => {
+ // Handle the back inner iterator.
+ if let Some(inner) = &mut self.inner_back {
+ if let Some(item) = inner.next() {
+ return Some(Ok(item));
+ }
+
+ // This is necessary for the iterator to implement
`FusedIterator`
+ // with only the original iterator being fused.
+ self.inner_back = None;
+ } else {
+ return None;
+ }
+ }
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let inner_hint = |inner: &Option<T::IntoIter>| {
+ inner
+ .as_ref()
+ .map(Iterator::size_hint)
+ .unwrap_or((0, Some(0)))
+ };
+ let inner_front = inner_hint(&self.inner_front);
+ let inner_back = inner_hint(&self.inner_back);
+ // The outer iterator `Ok` case could be (0, None) as we don't know
its size_hint yet.
+ let outer = match self.iter.size_hint() {
+ (0, Some(0)) => (0, Some(0)),
+ _ => (0, None),
+ };
+
+ size_hint::add(size_hint::add(inner_front, inner_back), outer)
+ }
+}
+
+impl<I, T, E> DoubleEndedIterator for FlattenOk<I, T, E>
+where
+ I: DoubleEndedIterator<Item = Result<T, E>>,
+ T: IntoIterator,
+ T::IntoIter: DoubleEndedIterator,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ loop {
+ // Handle the back inner iterator.
+ if let Some(inner) = &mut self.inner_back {
+ if let Some(item) = inner.next_back() {
+ return Some(Ok(item));
+ }
+
+ // This is necessary for the iterator to implement
`FusedIterator`
+ // with only the original iterator being fused.
+ self.inner_back = None;
+ }
+
+ match self.iter.next_back() {
+ Some(Ok(ok)) => self.inner_back = Some(ok.into_iter()),
+ Some(Err(e)) => return Some(Err(e)),
+ None => {
+ // Handle the front inner iterator.
+ if let Some(inner) = &mut self.inner_front {
+ if let Some(item) = inner.next_back() {
+ return Some(Ok(item));
+ }
+
+ // This is necessary for the iterator to implement
`FusedIterator`
+ // with only the original iterator being fused.
+ self.inner_front = None;
+ } else {
+ return None;
+ }
+ }
+ }
+ }
+ }
+}
+
+impl<I, T, E> Clone for FlattenOk<I, T, E>
+where
+ I: Iterator<Item = Result<T, E>> + Clone,
+ T: IntoIterator,
+ T::IntoIter: Clone,
+{
+ clone_fields!(iter, inner_front, inner_back);
+}
+
+impl<I, T, E> fmt::Debug for FlattenOk<I, T, E>
+where
+ I: Iterator<Item = Result<T, E>> + fmt::Debug,
+ T: IntoIterator,
+ T::IntoIter: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("FlattenOk")
+ .field("iter", &self.iter)
+ .field("inner_front", &self.inner_front)
+ .field("inner_back", &self.inner_back)
+ .finish()
+ }
+}
+
+/// Only the iterator being flattened needs to implement [`FusedIterator`].
+impl<I, T, E> FusedIterator for FlattenOk<I, T, E>
+where
+ I: FusedIterator<Item = Result<T, E>>,
+ T: IntoIterator,
+{
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/format.rs
b/rust/hw/char/pl011/vendor/itertools/src/format.rs
new file mode 100644
index 0000000000..c4cb65dcb2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/format.rs
@@ -0,0 +1,168 @@
+use std::cell::Cell;
+use std::fmt;
+
+/// Format all iterator elements lazily, separated by `sep`.
+///
+/// The format value can only be formatted once, after that the iterator is
+/// exhausted.
+///
+/// See [`.format_with()`](crate::Itertools::format_with) for more information.
+pub struct FormatWith<'a, I, F> {
+ sep: &'a str,
+ /// FormatWith uses interior mutability because Display::fmt takes &self.
+ inner: Cell<Option<(I, F)>>,
+}
+
+/// Format all iterator elements lazily, separated by `sep`.
+///
+/// The format value can only be formatted once, after that the iterator is
+/// exhausted.
+///
+/// See [`.format()`](crate::Itertools::format)
+/// for more information.
+pub struct Format<'a, I> {
+ sep: &'a str,
+ /// Format uses interior mutability because Display::fmt takes &self.
+ inner: Cell<Option<I>>,
+}
+
+pub fn new_format<I, F>(iter: I, separator: &str, f: F) -> FormatWith<'_, I, F>
+where
+ I: Iterator,
+ F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) ->
fmt::Result,
+{
+ FormatWith {
+ sep: separator,
+ inner: Cell::new(Some((iter, f))),
+ }
+}
+
+pub fn new_format_default<I>(iter: I, separator: &str) -> Format<'_, I>
+where
+ I: Iterator,
+{
+ Format {
+ sep: separator,
+ inner: Cell::new(Some(iter)),
+ }
+}
+
+impl<'a, I, F> fmt::Display for FormatWith<'a, I, F>
+where
+ I: Iterator,
+ F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) ->
fmt::Result,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (mut iter, mut format) = match self.inner.take() {
+ Some(t) => t,
+ None => panic!("FormatWith: was already formatted once"),
+ };
+
+ if let Some(fst) = iter.next() {
+ format(fst, &mut |disp: &dyn fmt::Display| disp.fmt(f))?;
+ iter.try_for_each(|elt| {
+ if !self.sep.is_empty() {
+ f.write_str(self.sep)?;
+ }
+ format(elt, &mut |disp: &dyn fmt::Display| disp.fmt(f))
+ })?;
+ }
+ Ok(())
+ }
+}
+
+impl<'a, I> Format<'a, I>
+where
+ I: Iterator,
+{
+ fn format(
+ &self,
+ f: &mut fmt::Formatter,
+ cb: fn(&I::Item, &mut fmt::Formatter) -> fmt::Result,
+ ) -> fmt::Result {
+ let mut iter = match self.inner.take() {
+ Some(t) => t,
+ None => panic!("Format: was already formatted once"),
+ };
+
+ if let Some(fst) = iter.next() {
+ cb(&fst, f)?;
+ iter.try_for_each(|elt| {
+ if !self.sep.is_empty() {
+ f.write_str(self.sep)?;
+ }
+ cb(&elt, f)
+ })?;
+ }
+ Ok(())
+ }
+}
+
+macro_rules! impl_format {
+ ($($fmt_trait:ident)*) => {
+ $(
+ impl<'a, I> fmt::$fmt_trait for Format<'a, I>
+ where I: Iterator,
+ I::Item: fmt::$fmt_trait,
+ {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.format(f, fmt::$fmt_trait::fmt)
+ }
+ }
+ )*
+ }
+}
+
+impl_format! {Display Debug UpperExp LowerExp UpperHex LowerHex Octal Binary
Pointer}
+
+impl<'a, I, F> Clone for FormatWith<'a, I, F>
+where
+ (I, F): Clone,
+{
+ fn clone(&self) -> Self {
+ struct PutBackOnDrop<'r, 'a, I, F> {
+ into: &'r FormatWith<'a, I, F>,
+ inner: Option<(I, F)>,
+ }
+ // This ensures we preserve the state of the original `FormatWith` if
`Clone` panics
+ impl<'r, 'a, I, F> Drop for PutBackOnDrop<'r, 'a, I, F> {
+ fn drop(&mut self) {
+ self.into.inner.set(self.inner.take())
+ }
+ }
+ let pbod = PutBackOnDrop {
+ inner: self.inner.take(),
+ into: self,
+ };
+ Self {
+ inner: Cell::new(pbod.inner.clone()),
+ sep: self.sep,
+ }
+ }
+}
+
+impl<'a, I> Clone for Format<'a, I>
+where
+ I: Clone,
+{
+ fn clone(&self) -> Self {
+ struct PutBackOnDrop<'r, 'a, I> {
+ into: &'r Format<'a, I>,
+ inner: Option<I>,
+ }
+ // This ensures we preserve the state of the original `FormatWith` if
`Clone` panics
+ impl<'r, 'a, I> Drop for PutBackOnDrop<'r, 'a, I> {
+ fn drop(&mut self) {
+ self.into.inner.set(self.inner.take())
+ }
+ }
+ let pbod = PutBackOnDrop {
+ inner: self.inner.take(),
+ into: self,
+ };
+ Self {
+ inner: Cell::new(pbod.inner.clone()),
+ sep: self.sep,
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/free.rs
b/rust/hw/char/pl011/vendor/itertools/src/free.rs
new file mode 100644
index 0000000000..19e3e28694
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/free.rs
@@ -0,0 +1,286 @@
+//! Free functions that create iterator adaptors or call iterator methods.
+//!
+//! The benefit of free functions is that they accept any [`IntoIterator`] as
+//! argument, so the resulting code may be easier to read.
+
+#[cfg(feature = "use_alloc")]
+use std::fmt::Display;
+use std::iter::{self, Zip};
+#[cfg(feature = "use_alloc")]
+type VecIntoIter<T> = alloc::vec::IntoIter<T>;
+
+#[cfg(feature = "use_alloc")]
+use alloc::{
+ string::String,
+};
+
+use crate::Itertools;
+use crate::intersperse::{Intersperse, IntersperseWith};
+
+pub use crate::adaptors::{
+ interleave,
+ merge,
+ put_back,
+};
+#[cfg(feature = "use_alloc")]
+pub use crate::put_back_n_impl::put_back_n;
+#[cfg(feature = "use_alloc")]
+pub use crate::multipeek_impl::multipeek;
+#[cfg(feature = "use_alloc")]
+pub use crate::peek_nth::peek_nth;
+#[cfg(feature = "use_alloc")]
+pub use crate::kmerge_impl::kmerge;
+pub use crate::zip_eq_impl::zip_eq;
+pub use crate::merge_join::merge_join_by;
+#[cfg(feature = "use_alloc")]
+pub use crate::rciter_impl::rciter;
+
+/// Iterate `iterable` with a particular value inserted between each element.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::intersperse`].
+///
+/// ```
+/// use itertools::intersperse;
+///
+/// itertools::assert_equal(intersperse((0..3), 8), vec![0, 8, 1, 8, 2]);
+/// ```
+pub fn intersperse<I>(iterable: I, element: I::Item) ->
Intersperse<I::IntoIter>
+ where I: IntoIterator,
+ <I as IntoIterator>::Item: Clone
+{
+ Itertools::intersperse(iterable.into_iter(), element)
+}
+
+/// Iterate `iterable` with a particular value created by a function inserted
+/// between each element.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::intersperse_with`].
+///
+/// ```
+/// use itertools::intersperse_with;
+///
+/// let mut i = 10;
+/// itertools::assert_equal(intersperse_with((0..3), || { i -= 1; i }),
vec![0, 9, 1, 8, 2]);
+/// assert_eq!(i, 8);
+/// ```
+pub fn intersperse_with<I, F>(iterable: I, element: F) ->
IntersperseWith<I::IntoIter, F>
+ where I: IntoIterator,
+ F: FnMut() -> I::Item
+{
+ Itertools::intersperse_with(iterable.into_iter(), element)
+}
+
+/// Iterate `iterable` with a running index.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::enumerate`].
+///
+/// ```
+/// use itertools::enumerate;
+///
+/// for (i, elt) in enumerate(&[1, 2, 3]) {
+/// /* loop body */
+/// }
+/// ```
+pub fn enumerate<I>(iterable: I) -> iter::Enumerate<I::IntoIter>
+ where I: IntoIterator
+{
+ iterable.into_iter().enumerate()
+}
+
+/// Iterate `iterable` in reverse.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::rev`].
+///
+/// ```
+/// use itertools::rev;
+///
+/// for elt in rev(&[1, 2, 3]) {
+/// /* loop body */
+/// }
+/// ```
+pub fn rev<I>(iterable: I) -> iter::Rev<I::IntoIter>
+ where I: IntoIterator,
+ I::IntoIter: DoubleEndedIterator
+{
+ iterable.into_iter().rev()
+}
+
+/// Converts the arguments to iterators and zips them.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::zip`].
+///
+/// ## Example
+///
+/// ```
+/// use itertools::zip;
+///
+/// let mut result: Vec<(i32, char)> = Vec::new();
+///
+/// for (a, b) in zip(&[1, 2, 3, 4, 5], &['a', 'b', 'c']) {
+/// result.push((*a, *b));
+/// }
+/// assert_eq!(result, vec![(1, 'a'),(2, 'b'),(3, 'c')]);
+/// ```
+#[deprecated(note="Use
[std::iter::zip](https://doc.rust-lang.org/std/iter/fn.zip.html) instead",
since="0.10.4")]
+pub fn zip<I, J>(i: I, j: J) -> Zip<I::IntoIter, J::IntoIter>
+ where I: IntoIterator,
+ J: IntoIterator
+{
+ i.into_iter().zip(j)
+}
+
+
+/// Takes two iterables and creates a new iterator over both in sequence.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::chain`].
+///
+/// ## Example
+/// ```
+/// use itertools::chain;
+///
+/// let mut result:Vec<i32> = Vec::new();
+///
+/// for element in chain(&[1, 2, 3], &[4]) {
+/// result.push(*element);
+/// }
+/// assert_eq!(result, vec![1, 2, 3, 4]);
+/// ```
+pub fn chain<I, J>(i: I, j: J) -> iter::Chain<<I as IntoIterator>::IntoIter,
<J as IntoIterator>::IntoIter>
+ where I: IntoIterator,
+ J: IntoIterator<Item = I::Item>
+{
+ i.into_iter().chain(j)
+}
+
+/// Create an iterator that clones each element from &T to T
+///
+/// [`IntoIterator`] enabled version of [`Iterator::cloned`].
+///
+/// ```
+/// use itertools::cloned;
+///
+/// assert_eq!(cloned(b"abc").next(), Some(b'a'));
+/// ```
+pub fn cloned<'a, I, T: 'a>(iterable: I) -> iter::Cloned<I::IntoIter>
+ where I: IntoIterator<Item=&'a T>,
+ T: Clone,
+{
+ iterable.into_iter().cloned()
+}
+
+/// Perform a fold operation over the iterable.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::fold`].
+///
+/// ```
+/// use itertools::fold;
+///
+/// assert_eq!(fold(&[1., 2., 3.], 0., |a, &b| f32::max(a, b)), 3.);
+/// ```
+pub fn fold<I, B, F>(iterable: I, init: B, f: F) -> B
+ where I: IntoIterator,
+ F: FnMut(B, I::Item) -> B
+{
+ iterable.into_iter().fold(init, f)
+}
+
+/// Test whether the predicate holds for all elements in the iterable.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::all`].
+///
+/// ```
+/// use itertools::all;
+///
+/// assert!(all(&[1, 2, 3], |elt| *elt > 0));
+/// ```
+pub fn all<I, F>(iterable: I, f: F) -> bool
+ where I: IntoIterator,
+ F: FnMut(I::Item) -> bool
+{
+ iterable.into_iter().all(f)
+}
+
+/// Test whether the predicate holds for any elements in the iterable.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::any`].
+///
+/// ```
+/// use itertools::any;
+///
+/// assert!(any(&[0, -1, 2], |elt| *elt > 0));
+/// ```
+pub fn any<I, F>(iterable: I, f: F) -> bool
+ where I: IntoIterator,
+ F: FnMut(I::Item) -> bool
+{
+ iterable.into_iter().any(f)
+}
+
+/// Return the maximum value of the iterable.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::max`].
+///
+/// ```
+/// use itertools::max;
+///
+/// assert_eq!(max(0..10), Some(9));
+/// ```
+pub fn max<I>(iterable: I) -> Option<I::Item>
+ where I: IntoIterator,
+ I::Item: Ord
+{
+ iterable.into_iter().max()
+}
+
+/// Return the minimum value of the iterable.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::min`].
+///
+/// ```
+/// use itertools::min;
+///
+/// assert_eq!(min(0..10), Some(0));
+/// ```
+pub fn min<I>(iterable: I) -> Option<I::Item>
+ where I: IntoIterator,
+ I::Item: Ord
+{
+ iterable.into_iter().min()
+}
+
+
+/// Combine all iterator elements into one String, separated by `sep`.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::join`].
+///
+/// ```
+/// use itertools::join;
+///
+/// assert_eq!(join(&[1, 2, 3], ", "), "1, 2, 3");
+/// ```
+#[cfg(feature = "use_alloc")]
+pub fn join<I>(iterable: I, sep: &str) -> String
+ where I: IntoIterator,
+ I::Item: Display
+{
+ iterable.into_iter().join(sep)
+}
+
+/// Sort all iterator elements into a new iterator in ascending order.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::sorted`].
+///
+/// ```
+/// use itertools::sorted;
+/// use itertools::assert_equal;
+///
+/// assert_equal(sorted("rust".chars()), "rstu".chars());
+/// ```
+#[cfg(feature = "use_alloc")]
+pub fn sorted<I>(iterable: I) -> VecIntoIter<I::Item>
+ where I: IntoIterator,
+ I::Item: Ord
+{
+ iterable.into_iter().sorted()
+}
+
diff --git a/rust/hw/char/pl011/vendor/itertools/src/group_map.rs
b/rust/hw/char/pl011/vendor/itertools/src/group_map.rs
new file mode 100644
index 0000000000..a2d0ebb2ab
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/group_map.rs
@@ -0,0 +1,32 @@
+#![cfg(feature = "use_std")]
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::Iterator;
+
+/// Return a `HashMap` of keys mapped to a list of their corresponding values.
+///
+/// See [`.into_group_map()`](crate::Itertools::into_group_map)
+/// for more information.
+pub fn into_group_map<I, K, V>(iter: I) -> HashMap<K, Vec<V>>
+ where I: Iterator<Item=(K, V)>,
+ K: Hash + Eq,
+{
+ let mut lookup = HashMap::new();
+
+ iter.for_each(|(key, val)| {
+ lookup.entry(key).or_insert_with(Vec::new).push(val);
+ });
+
+ lookup
+}
+
+pub fn into_group_map_by<I, K, V>(iter: I, f: impl Fn(&V) -> K) -> HashMap<K,
Vec<V>>
+ where
+ I: Iterator<Item=V>,
+ K: Hash + Eq,
+{
+ into_group_map(
+ iter.map(|v| (f(&v), v))
+ )
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/groupbylazy.rs
b/rust/hw/char/pl011/vendor/itertools/src/groupbylazy.rs
new file mode 100644
index 0000000000..80c6f09f32
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/groupbylazy.rs
@@ -0,0 +1,579 @@
+use std::cell::{Cell, RefCell};
+use alloc::vec::{self, Vec};
+
+/// A trait to unify `FnMut` for `GroupBy` with the chunk key in `IntoChunks`
+trait KeyFunction<A> {
+ type Key;
+ fn call_mut(&mut self, arg: A) -> Self::Key;
+}
+
+impl<A, K, F: ?Sized> KeyFunction<A> for F
+ where F: FnMut(A) -> K
+{
+ type Key = K;
+ #[inline]
+ fn call_mut(&mut self, arg: A) -> Self::Key {
+ (*self)(arg)
+ }
+}
+
+
+/// `ChunkIndex` acts like the grouping key function for `IntoChunks`
+#[derive(Debug, Clone)]
+struct ChunkIndex {
+ size: usize,
+ index: usize,
+ key: usize,
+}
+
+impl ChunkIndex {
+ #[inline(always)]
+ fn new(size: usize) -> Self {
+ ChunkIndex {
+ size,
+ index: 0,
+ key: 0,
+ }
+ }
+}
+
+impl<A> KeyFunction<A> for ChunkIndex {
+ type Key = usize;
+ #[inline(always)]
+ fn call_mut(&mut self, _arg: A) -> Self::Key {
+ if self.index == self.size {
+ self.key += 1;
+ self.index = 0;
+ }
+ self.index += 1;
+ self.key
+ }
+}
+
+#[derive(Clone)]
+struct GroupInner<K, I, F>
+ where I: Iterator
+{
+ key: F,
+ iter: I,
+ current_key: Option<K>,
+ current_elt: Option<I::Item>,
+ /// flag set if iterator is exhausted
+ done: bool,
+ /// Index of group we are currently buffering or visiting
+ top_group: usize,
+ /// Least index for which we still have elements buffered
+ oldest_buffered_group: usize,
+ /// Group index for `buffer[0]` -- the slots
+ /// bottom_group..oldest_buffered_group are unused and will be erased when
+ /// that range is large enough.
+ bottom_group: usize,
+ /// Buffered groups, from `bottom_group` (index 0) to `top_group`.
+ buffer: Vec<vec::IntoIter<I::Item>>,
+ /// index of last group iter that was dropped, usize::MAX == none
+ dropped_group: usize,
+}
+
+impl<K, I, F> GroupInner<K, I, F>
+ where I: Iterator,
+ F: for<'a> KeyFunction<&'a I::Item, Key=K>,
+ K: PartialEq,
+{
+ /// `client`: Index of group that requests next element
+ #[inline(always)]
+ fn step(&mut self, client: usize) -> Option<I::Item> {
+ /*
+ println!("client={}, bottom_group={}, oldest_buffered_group={},
top_group={}, buffers=[{}]",
+ client, self.bottom_group, self.oldest_buffered_group,
+ self.top_group,
+ self.buffer.iter().map(|elt| elt.len()).format(", "));
+ */
+ if client < self.oldest_buffered_group {
+ None
+ } else if client < self.top_group ||
+ (client == self.top_group &&
+ self.buffer.len() > self.top_group - self.bottom_group)
+ {
+ self.lookup_buffer(client)
+ } else if self.done {
+ None
+ } else if self.top_group == client {
+ self.step_current()
+ } else {
+ self.step_buffering(client)
+ }
+ }
+
+ #[inline(never)]
+ fn lookup_buffer(&mut self, client: usize) -> Option<I::Item> {
+ // if `bufidx` doesn't exist in self.buffer, it might be empty
+ let bufidx = client - self.bottom_group;
+ if client < self.oldest_buffered_group {
+ return None;
+ }
+ let elt = self.buffer.get_mut(bufidx).and_then(|queue| queue.next());
+ if elt.is_none() && client == self.oldest_buffered_group {
+ // FIXME: VecDeque is unfortunately not zero allocation when empty,
+ // so we do this job manually.
+ // `bottom_group..oldest_buffered_group` is unused, and if it's
large enough, erase it.
+ self.oldest_buffered_group += 1;
+ // skip forward further empty queues too
+ while self.buffer.get(self.oldest_buffered_group -
self.bottom_group)
+ .map_or(false, |buf| buf.len() == 0)
+ {
+ self.oldest_buffered_group += 1;
+ }
+
+ let nclear = self.oldest_buffered_group - self.bottom_group;
+ if nclear > 0 && nclear >= self.buffer.len() / 2 {
+ let mut i = 0;
+ self.buffer.retain(|buf| {
+ i += 1;
+ debug_assert!(buf.len() == 0 || i > nclear);
+ i > nclear
+ });
+ self.bottom_group = self.oldest_buffered_group;
+ }
+ }
+ elt
+ }
+
+ /// Take the next element from the iterator, and set the done
+ /// flag if exhausted. Must not be called after done.
+ #[inline(always)]
+ fn next_element(&mut self) -> Option<I::Item> {
+ debug_assert!(!self.done);
+ match self.iter.next() {
+ None => { self.done = true; None }
+ otherwise => otherwise,
+ }
+ }
+
+
+ #[inline(never)]
+ fn step_buffering(&mut self, client: usize) -> Option<I::Item> {
+ // requested a later group -- walk through the current group up to
+ // the requested group index, and buffer the elements (unless
+ // the group is marked as dropped).
+ // Because the `Groups` iterator is always the first to request
+ // each group index, client is the next index efter top_group.
+ debug_assert!(self.top_group + 1 == client);
+ let mut group = Vec::new();
+
+ if let Some(elt) = self.current_elt.take() {
+ if self.top_group != self.dropped_group {
+ group.push(elt);
+ }
+ }
+ let mut first_elt = None; // first element of the next group
+
+ while let Some(elt) = self.next_element() {
+ let key = self.key.call_mut(&elt);
+ match self.current_key.take() {
+ None => {}
+ Some(old_key) => if old_key != key {
+ self.current_key = Some(key);
+ first_elt = Some(elt);
+ break;
+ },
+ }
+ self.current_key = Some(key);
+ if self.top_group != self.dropped_group {
+ group.push(elt);
+ }
+ }
+
+ if self.top_group != self.dropped_group {
+ self.push_next_group(group);
+ }
+ if first_elt.is_some() {
+ self.top_group += 1;
+ debug_assert!(self.top_group == client);
+ }
+ first_elt
+ }
+
+ fn push_next_group(&mut self, group: Vec<I::Item>) {
+ // When we add a new buffered group, fill up slots between
oldest_buffered_group and top_group
+ while self.top_group - self.bottom_group > self.buffer.len() {
+ if self.buffer.is_empty() {
+ self.bottom_group += 1;
+ self.oldest_buffered_group += 1;
+ } else {
+ self.buffer.push(Vec::new().into_iter());
+ }
+ }
+ self.buffer.push(group.into_iter());
+ debug_assert!(self.top_group + 1 - self.bottom_group ==
self.buffer.len());
+ }
+
+ /// This is the immediate case, where we use no buffering
+ #[inline]
+ fn step_current(&mut self) -> Option<I::Item> {
+ debug_assert!(!self.done);
+ if let elt @ Some(..) = self.current_elt.take() {
+ return elt;
+ }
+ match self.next_element() {
+ None => None,
+ Some(elt) => {
+ let key = self.key.call_mut(&elt);
+ match self.current_key.take() {
+ None => {}
+ Some(old_key) => if old_key != key {
+ self.current_key = Some(key);
+ self.current_elt = Some(elt);
+ self.top_group += 1;
+ return None;
+ },
+ }
+ self.current_key = Some(key);
+ Some(elt)
+ }
+ }
+ }
+
+ /// Request the just started groups' key.
+ ///
+ /// `client`: Index of group
+ ///
+ /// **Panics** if no group key is available.
+ fn group_key(&mut self, client: usize) -> K {
+ // This can only be called after we have just returned the first
+ // element of a group.
+ // Perform this by simply buffering one more element, grabbing the
+ // next key.
+ debug_assert!(!self.done);
+ debug_assert!(client == self.top_group);
+ debug_assert!(self.current_key.is_some());
+ debug_assert!(self.current_elt.is_none());
+ let old_key = self.current_key.take().unwrap();
+ if let Some(elt) = self.next_element() {
+ let key = self.key.call_mut(&elt);
+ if old_key != key {
+ self.top_group += 1;
+ }
+ self.current_key = Some(key);
+ self.current_elt = Some(elt);
+ }
+ old_key
+ }
+}
+
+impl<K, I, F> GroupInner<K, I, F>
+ where I: Iterator,
+{
+ /// Called when a group is dropped
+ fn drop_group(&mut self, client: usize) {
+ // It's only useful to track the maximal index
+ if self.dropped_group == !0 || client > self.dropped_group {
+ self.dropped_group = client;
+ }
+ }
+}
+
+/// `GroupBy` is the storage for the lazy grouping operation.
+///
+/// If the groups are consumed in their original order, or if each
+/// group is dropped without keeping it around, then `GroupBy` uses
+/// no allocations. It needs allocations only if several group iterators
+/// are alive at the same time.
+///
+/// This type implements [`IntoIterator`] (it is **not** an iterator
+/// itself), because the group iterators need to borrow from this
+/// value. It should be stored in a local variable or temporary and
+/// iterated.
+///
+/// See [`.group_by()`](crate::Itertools::group_by) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct GroupBy<K, I, F>
+ where I: Iterator,
+{
+ inner: RefCell<GroupInner<K, I, F>>,
+ // the group iterator's current index. Keep this in the main value
+ // so that simultaneous iterators all use the same state.
+ index: Cell<usize>,
+}
+
+/// Create a new
+pub fn new<K, J, F>(iter: J, f: F) -> GroupBy<K, J::IntoIter, F>
+ where J: IntoIterator,
+ F: FnMut(&J::Item) -> K,
+{
+ GroupBy {
+ inner: RefCell::new(GroupInner {
+ key: f,
+ iter: iter.into_iter(),
+ current_key: None,
+ current_elt: None,
+ done: false,
+ top_group: 0,
+ oldest_buffered_group: 0,
+ bottom_group: 0,
+ buffer: Vec::new(),
+ dropped_group: !0,
+ }),
+ index: Cell::new(0),
+ }
+}
+
+impl<K, I, F> GroupBy<K, I, F>
+ where I: Iterator,
+{
+ /// `client`: Index of group that requests next element
+ fn step(&self, client: usize) -> Option<I::Item>
+ where F: FnMut(&I::Item) -> K,
+ K: PartialEq,
+ {
+ self.inner.borrow_mut().step(client)
+ }
+
+ /// `client`: Index of group
+ fn drop_group(&self, client: usize) {
+ self.inner.borrow_mut().drop_group(client);
+ }
+}
+
+impl<'a, K, I, F> IntoIterator for &'a GroupBy<K, I, F>
+ where I: Iterator,
+ I::Item: 'a,
+ F: FnMut(&I::Item) -> K,
+ K: PartialEq
+{
+ type Item = (K, Group<'a, K, I, F>);
+ type IntoIter = Groups<'a, K, I, F>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Groups { parent: self }
+ }
+}
+
+
+/// An iterator that yields the Group iterators.
+///
+/// Iterator element type is `(K, Group)`:
+/// the group's key `K` and the group's iterator.
+///
+/// See [`.group_by()`](crate::Itertools::group_by) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Groups<'a, K: 'a, I: 'a, F: 'a>
+ where I: Iterator,
+ I::Item: 'a
+{
+ parent: &'a GroupBy<K, I, F>,
+}
+
+impl<'a, K, I, F> Iterator for Groups<'a, K, I, F>
+ where I: Iterator,
+ I::Item: 'a,
+ F: FnMut(&I::Item) -> K,
+ K: PartialEq
+{
+ type Item = (K, Group<'a, K, I, F>);
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ let index = self.parent.index.get();
+ self.parent.index.set(index + 1);
+ let inner = &mut *self.parent.inner.borrow_mut();
+ inner.step(index).map(|elt| {
+ let key = inner.group_key(index);
+ (key, Group {
+ parent: self.parent,
+ index,
+ first: Some(elt),
+ })
+ })
+ }
+}
+
+/// An iterator for the elements in a single group.
+///
+/// Iterator element type is `I::Item`.
+pub struct Group<'a, K: 'a, I: 'a, F: 'a>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ parent: &'a GroupBy<K, I, F>,
+ index: usize,
+ first: Option<I::Item>,
+}
+
+impl<'a, K, I, F> Drop for Group<'a, K, I, F>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ fn drop(&mut self) {
+ self.parent.drop_group(self.index);
+ }
+}
+
+impl<'a, K, I, F> Iterator for Group<'a, K, I, F>
+ where I: Iterator,
+ I::Item: 'a,
+ F: FnMut(&I::Item) -> K,
+ K: PartialEq,
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ if let elt @ Some(..) = self.first.take() {
+ return elt;
+ }
+ self.parent.step(self.index)
+ }
+}
+
+///// IntoChunks /////
+
+/// Create a new
+pub fn new_chunks<J>(iter: J, size: usize) -> IntoChunks<J::IntoIter>
+ where J: IntoIterator,
+{
+ IntoChunks {
+ inner: RefCell::new(GroupInner {
+ key: ChunkIndex::new(size),
+ iter: iter.into_iter(),
+ current_key: None,
+ current_elt: None,
+ done: false,
+ top_group: 0,
+ oldest_buffered_group: 0,
+ bottom_group: 0,
+ buffer: Vec::new(),
+ dropped_group: !0,
+ }),
+ index: Cell::new(0),
+ }
+}
+
+
+/// `ChunkLazy` is the storage for a lazy chunking operation.
+///
+/// `IntoChunks` behaves just like `GroupBy`: it is iterable, and
+/// it only buffers if several chunk iterators are alive at the same time.
+///
+/// This type implements [`IntoIterator`] (it is **not** an iterator
+/// itself), because the chunk iterators need to borrow from this
+/// value. It should be stored in a local variable or temporary and
+/// iterated.
+///
+/// Iterator element type is `Chunk`, each chunk's iterator.
+///
+/// See [`.chunks()`](crate::Itertools::chunks) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct IntoChunks<I>
+ where I: Iterator,
+{
+ inner: RefCell<GroupInner<usize, I, ChunkIndex>>,
+ // the chunk iterator's current index. Keep this in the main value
+ // so that simultaneous iterators all use the same state.
+ index: Cell<usize>,
+}
+
+impl<I> Clone for IntoChunks<I>
+ where I: Clone + Iterator,
+ I::Item: Clone,
+{
+ clone_fields!(inner, index);
+}
+
+
+impl<I> IntoChunks<I>
+ where I: Iterator,
+{
+ /// `client`: Index of chunk that requests next element
+ fn step(&self, client: usize) -> Option<I::Item> {
+ self.inner.borrow_mut().step(client)
+ }
+
+ /// `client`: Index of chunk
+ fn drop_group(&self, client: usize) {
+ self.inner.borrow_mut().drop_group(client);
+ }
+}
+
+impl<'a, I> IntoIterator for &'a IntoChunks<I>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ type Item = Chunk<'a, I>;
+ type IntoIter = Chunks<'a, I>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Chunks {
+ parent: self,
+ }
+ }
+}
+
+
+/// An iterator that yields the Chunk iterators.
+///
+/// Iterator element type is `Chunk`.
+///
+/// See [`.chunks()`](crate::Itertools::chunks) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Clone)]
+pub struct Chunks<'a, I: 'a>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ parent: &'a IntoChunks<I>,
+}
+
+impl<'a, I> Iterator for Chunks<'a, I>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ type Item = Chunk<'a, I>;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ let index = self.parent.index.get();
+ self.parent.index.set(index + 1);
+ let inner = &mut *self.parent.inner.borrow_mut();
+ inner.step(index).map(|elt| {
+ Chunk {
+ parent: self.parent,
+ index,
+ first: Some(elt),
+ }
+ })
+ }
+}
+
+/// An iterator for the elements in a single chunk.
+///
+/// Iterator element type is `I::Item`.
+pub struct Chunk<'a, I: 'a>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ parent: &'a IntoChunks<I>,
+ index: usize,
+ first: Option<I::Item>,
+}
+
+impl<'a, I> Drop for Chunk<'a, I>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ fn drop(&mut self) {
+ self.parent.drop_group(self.index);
+ }
+}
+
+impl<'a, I> Iterator for Chunk<'a, I>
+ where I: Iterator,
+ I::Item: 'a,
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ if let elt @ Some(..) = self.first.take() {
+ return elt;
+ }
+ self.parent.step(self.index)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/grouping_map.rs
b/rust/hw/char/pl011/vendor/itertools/src/grouping_map.rs
new file mode 100644
index 0000000000..bb5b582c92
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/grouping_map.rs
@@ -0,0 +1,535 @@
+#![cfg(feature = "use_std")]
+
+use crate::MinMaxResult;
+use std::collections::HashMap;
+use std::cmp::Ordering;
+use std::hash::Hash;
+use std::iter::Iterator;
+use std::ops::{Add, Mul};
+
+/// A wrapper to allow for an easy
[`into_grouping_map_by`](crate::Itertools::into_grouping_map_by)
+#[derive(Clone, Debug)]
+pub struct MapForGrouping<I, F>(I, F);
+
+impl<I, F> MapForGrouping<I, F> {
+ pub(crate) fn new(iter: I, key_mapper: F) -> Self {
+ Self(iter, key_mapper)
+ }
+}
+
+impl<K, V, I, F> Iterator for MapForGrouping<I, F>
+ where I: Iterator<Item = V>,
+ K: Hash + Eq,
+ F: FnMut(&V) -> K,
+{
+ type Item = (K, V);
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next().map(|val| ((self.1)(&val), val))
+ }
+}
+
+/// Creates a new `GroupingMap` from `iter`
+pub fn new<I, K, V>(iter: I) -> GroupingMap<I>
+ where I: Iterator<Item = (K, V)>,
+ K: Hash + Eq,
+{
+ GroupingMap { iter }
+}
+
+/// `GroupingMapBy` is an intermediate struct for efficient group-and-fold
operations.
+///
+/// See [`GroupingMap`] for more informations.
+pub type GroupingMapBy<I, F> = GroupingMap<MapForGrouping<I, F>>;
+
+/// `GroupingMap` is an intermediate struct for efficient group-and-fold
operations.
+/// It groups elements by their key and at the same time fold each group
+/// using some aggregating operation.
+///
+/// No method on this struct performs temporary allocations.
+#[derive(Clone, Debug)]
+#[must_use = "GroupingMap is lazy and do nothing unless consumed"]
+pub struct GroupingMap<I> {
+ iter: I,
+}
+
+impl<I, K, V> GroupingMap<I>
+ where I: Iterator<Item = (K, V)>,
+ K: Hash + Eq,
+{
+ /// This is the generic way to perform any operation on a `GroupingMap`.
+ /// It's suggested to use this method only to implement custom operations
+ /// when the already provided ones are not enough.
+ ///
+ /// Groups elements from the `GroupingMap` source by key and applies
`operation` to the elements
+ /// of each group sequentially, passing the previously accumulated value,
a reference to the key
+ /// and the current element as arguments, and stores the results in an
`HashMap`.
+ ///
+ /// The `operation` function is invoked on each element with the following
parameters:
+ /// - the current value of the accumulator of the group if there is
currently one;
+ /// - a reference to the key of the group this element belongs to;
+ /// - the element from the source being aggregated;
+ ///
+ /// If `operation` returns `Some(element)` then the accumulator is updated
with `element`,
+ /// otherwise the previous accumulation is discarded.
+ ///
+ /// Return a `HashMap` associating the key of each group with the result
of aggregation of
+ /// that group's elements. If the aggregation of the last element of a
group discards the
+ /// accumulator then there won't be an entry associated to that group's
key.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![2, 8, 5, 7, 9, 0, 4, 10];
+ /// let lookup = data.into_iter()
+ /// .into_grouping_map_by(|&n| n % 4)
+ /// .aggregate(|acc, _key, val| {
+ /// if val == 0 || val == 10 {
+ /// None
+ /// } else {
+ /// Some(acc.unwrap_or(0) + val)
+ /// }
+ /// });
+ ///
+ /// assert_eq!(lookup[&0], 4); // 0 resets the accumulator so only
4 is summed
+ /// assert_eq!(lookup[&1], 5 + 9);
+ /// assert_eq!(lookup.get(&2), None); // 10 resets the accumulator and
nothing is summed afterward
+ /// assert_eq!(lookup[&3], 7);
+ /// assert_eq!(lookup.len(), 3); // The final keys are only 0, 1 and 2
+ /// ```
+ pub fn aggregate<FO, R>(self, mut operation: FO) -> HashMap<K, R>
+ where FO: FnMut(Option<R>, &K, V) -> Option<R>,
+ {
+ let mut destination_map = HashMap::new();
+
+ self.iter.for_each(|(key, val)| {
+ let acc = destination_map.remove(&key);
+ if let Some(op_res) = operation(acc, &key, val) {
+ destination_map.insert(key, op_res);
+ }
+ });
+
+ destination_map
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and applies
`operation` to the elements
+ /// of each group sequentially, passing the previously accumulated value,
a reference to the key
+ /// and the current element as arguments, and stores the results in a new
map.
+ ///
+ /// `init` is the value from which will be cloned the initial value of
each accumulator.
+ ///
+ /// `operation` is a function that is invoked on each element with the
following parameters:
+ /// - the current value of the accumulator of the group;
+ /// - a reference to the key of the group this element belongs to;
+ /// - the element from the source being accumulated.
+ ///
+ /// Return a `HashMap` associating the key of each group with the result
of folding that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = (1..=7)
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .fold(0, |acc, _key, val| acc + val);
+ ///
+ /// assert_eq!(lookup[&0], 3 + 6);
+ /// assert_eq!(lookup[&1], 1 + 4 + 7);
+ /// assert_eq!(lookup[&2], 2 + 5);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn fold<FO, R>(self, init: R, mut operation: FO) -> HashMap<K, R>
+ where R: Clone,
+ FO: FnMut(R, &K, V) -> R,
+ {
+ self.aggregate(|acc, key, val| {
+ let acc = acc.unwrap_or_else(|| init.clone());
+ Some(operation(acc, key, val))
+ })
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and applies
`operation` to the elements
+ /// of each group sequentially, passing the previously accumulated value,
a reference to the key
+ /// and the current element as arguments, and stores the results in a new
map.
+ ///
+ /// This is similar to [`fold`] but the initial value of the accumulator
is the first element of the group.
+ ///
+ /// `operation` is a function that is invoked on each element with the
following parameters:
+ /// - the current value of the accumulator of the group;
+ /// - a reference to the key of the group this element belongs to;
+ /// - the element from the source being accumulated.
+ ///
+ /// Return a `HashMap` associating the key of each group with the result
of folding that group's elements.
+ ///
+ /// [`fold`]: GroupingMap::fold
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = (1..=7)
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .fold_first(|acc, _key, val| acc + val);
+ ///
+ /// assert_eq!(lookup[&0], 3 + 6);
+ /// assert_eq!(lookup[&1], 1 + 4 + 7);
+ /// assert_eq!(lookup[&2], 2 + 5);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn fold_first<FO>(self, mut operation: FO) -> HashMap<K, V>
+ where FO: FnMut(V, &K, V) -> V,
+ {
+ self.aggregate(|acc, key, val| {
+ Some(match acc {
+ Some(acc) => operation(acc, key, val),
+ None => val,
+ })
+ })
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and collects the
elements of each group in
+ /// an instance of `C`. The iteration order is preserved when inserting
elements.
+ ///
+ /// Return a `HashMap` associating the key of each group with the
collection containing that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use std::collections::HashSet;
+ ///
+ /// let lookup = vec![0, 1, 2, 3, 4, 5, 6, 2, 3, 6].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .collect::<HashSet<_>>();
+ ///
+ /// assert_eq!(lookup[&0], vec![0, 3,
6].into_iter().collect::<HashSet<_>>());
+ /// assert_eq!(lookup[&1], vec![1, 4].into_iter().collect::<HashSet<_>>());
+ /// assert_eq!(lookup[&2], vec![2, 5].into_iter().collect::<HashSet<_>>());
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn collect<C>(self) -> HashMap<K, C>
+ where C: Default + Extend<V>,
+ {
+ let mut destination_map = HashMap::new();
+
+ self.iter.for_each(|(key, val)| {
+
destination_map.entry(key).or_insert_with(C::default).extend(Some(val));
+ });
+
+ destination_map
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
maximum of each group.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the maximum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .max();
+ ///
+ /// assert_eq!(lookup[&0], 12);
+ /// assert_eq!(lookup[&1], 7);
+ /// assert_eq!(lookup[&2], 8);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn max(self) -> HashMap<K, V>
+ where V: Ord,
+ {
+ self.max_by(|_, v1, v2| V::cmp(v1, v2))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
maximum of each group
+ /// with respect to the specified comparison function.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the maximum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .max_by(|_key, x, y| y.cmp(x));
+ ///
+ /// assert_eq!(lookup[&0], 3);
+ /// assert_eq!(lookup[&1], 1);
+ /// assert_eq!(lookup[&2], 5);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn max_by<F>(self, mut compare: F) -> HashMap<K, V>
+ where F: FnMut(&K, &V, &V) -> Ordering,
+ {
+ self.fold_first(|acc, key, val| match compare(key, &acc, &val) {
+ Ordering::Less | Ordering::Equal => val,
+ Ordering::Greater => acc
+ })
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
element of each group
+ /// that gives the maximum from the specified function.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the maximum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .max_by_key(|_key, &val| val % 4);
+ ///
+ /// assert_eq!(lookup[&0], 3);
+ /// assert_eq!(lookup[&1], 7);
+ /// assert_eq!(lookup[&2], 5);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn max_by_key<F, CK>(self, mut f: F) -> HashMap<K, V>
+ where F: FnMut(&K, &V) -> CK,
+ CK: Ord,
+ {
+ self.max_by(|key, v1, v2| f(key, v1).cmp(&f(key, v2)))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
minimum of each group.
+ ///
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .min();
+ ///
+ /// assert_eq!(lookup[&0], 3);
+ /// assert_eq!(lookup[&1], 1);
+ /// assert_eq!(lookup[&2], 5);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn min(self) -> HashMap<K, V>
+ where V: Ord,
+ {
+ self.min_by(|_, v1, v2| V::cmp(v1, v2))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
minimum of each group
+ /// with respect to the specified comparison function.
+ ///
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .min_by(|_key, x, y| y.cmp(x));
+ ///
+ /// assert_eq!(lookup[&0], 12);
+ /// assert_eq!(lookup[&1], 7);
+ /// assert_eq!(lookup[&2], 8);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn min_by<F>(self, mut compare: F) -> HashMap<K, V>
+ where F: FnMut(&K, &V, &V) -> Ordering,
+ {
+ self.fold_first(|acc, key, val| match compare(key, &acc, &val) {
+ Ordering::Less | Ordering::Equal => acc,
+ Ordering::Greater => val
+ })
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and finds the
element of each group
+ /// that gives the minimum from the specified function.
+ ///
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .min_by_key(|_key, &val| val % 4);
+ ///
+ /// assert_eq!(lookup[&0], 12);
+ /// assert_eq!(lookup[&1], 4);
+ /// assert_eq!(lookup[&2], 8);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn min_by_key<F, CK>(self, mut f: F) -> HashMap<K, V>
+ where F: FnMut(&K, &V) -> CK,
+ CK: Ord,
+ {
+ self.min_by(|key, v1, v2| f(key, v1).cmp(&f(key, v2)))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and find the
maximum and minimum of
+ /// each group.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// See [.minmax()](crate::Itertools::minmax) for the non-grouping version.
+ ///
+ /// Differences from the non grouping version:
+ /// - It never produces a `MinMaxResult::NoElements`
+ /// - It doesn't have any speedup
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
and maximum of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{OneElement, MinMax};
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .minmax();
+ ///
+ /// assert_eq!(lookup[&0], MinMax(3, 12));
+ /// assert_eq!(lookup[&1], MinMax(1, 7));
+ /// assert_eq!(lookup[&2], OneElement(5));
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn minmax(self) -> HashMap<K, MinMaxResult<V>>
+ where V: Ord,
+ {
+ self.minmax_by(|_, v1, v2| V::cmp(v1, v2))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and find the
maximum and minimum of
+ /// each group with respect to the specified comparison function.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// It has the same differences from the non-grouping version as `minmax`.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
and maximum of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{OneElement, MinMax};
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .minmax_by(|_key, x, y| y.cmp(x));
+ ///
+ /// assert_eq!(lookup[&0], MinMax(12, 3));
+ /// assert_eq!(lookup[&1], MinMax(7, 1));
+ /// assert_eq!(lookup[&2], OneElement(5));
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn minmax_by<F>(self, mut compare: F) -> HashMap<K, MinMaxResult<V>>
+ where F: FnMut(&K, &V, &V) -> Ordering,
+ {
+ self.aggregate(|acc, key, val| {
+ Some(match acc {
+ Some(MinMaxResult::OneElement(e)) => {
+ if compare(key, &val, &e) == Ordering::Less {
+ MinMaxResult::MinMax(val, e)
+ } else {
+ MinMaxResult::MinMax(e, val)
+ }
+ }
+ Some(MinMaxResult::MinMax(min, max)) => {
+ if compare(key, &val, &min) == Ordering::Less {
+ MinMaxResult::MinMax(val, max)
+ } else if compare(key, &val, &max) != Ordering::Less {
+ MinMaxResult::MinMax(min, val)
+ } else {
+ MinMaxResult::MinMax(min, max)
+ }
+ }
+ None => MinMaxResult::OneElement(val),
+ Some(MinMaxResult::NoElements) => unreachable!(),
+ })
+ })
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and find the
elements of each group
+ /// that gives the minimum and maximum from the specified function.
+ ///
+ /// If several elements are equally maximum, the last element is picked.
+ /// If several elements are equally minimum, the first element is picked.
+ ///
+ /// It has the same differences from the non-grouping version as `minmax`.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the minimum
and maximum of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{OneElement, MinMax};
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .minmax_by_key(|_key, &val| val % 4);
+ ///
+ /// assert_eq!(lookup[&0], MinMax(12, 3));
+ /// assert_eq!(lookup[&1], MinMax(4, 7));
+ /// assert_eq!(lookup[&2], OneElement(5));
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn minmax_by_key<F, CK>(self, mut f: F) -> HashMap<K, MinMaxResult<V>>
+ where F: FnMut(&K, &V) -> CK,
+ CK: Ord,
+ {
+ self.minmax_by(|key, v1, v2| f(key, v1).cmp(&f(key, v2)))
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and sums them.
+ ///
+ /// This is just a shorthand for `self.fold_first(|acc, _, val| acc +
val)`.
+ /// It is more limited than `Iterator::sum` since it doesn't use the `Sum`
trait.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the sum of
that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .sum();
+ ///
+ /// assert_eq!(lookup[&0], 3 + 9 + 12);
+ /// assert_eq!(lookup[&1], 1 + 4 + 7);
+ /// assert_eq!(lookup[&2], 5 + 8);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn sum(self) -> HashMap<K, V>
+ where V: Add<V, Output = V>
+ {
+ self.fold_first(|acc, _, val| acc + val)
+ }
+
+ /// Groups elements from the `GroupingMap` source by key and multiply them.
+ ///
+ /// This is just a shorthand for `self.fold_first(|acc, _, val| acc *
val)`.
+ /// It is more limited than `Iterator::product` since it doesn't use the
`Product` trait.
+ ///
+ /// Returns a `HashMap` associating the key of each group with the product
of that group's elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let lookup = vec![1, 3, 4, 5, 7, 8, 9, 12].into_iter()
+ /// .into_grouping_map_by(|&n| n % 3)
+ /// .product();
+ ///
+ /// assert_eq!(lookup[&0], 3 * 9 * 12);
+ /// assert_eq!(lookup[&1], 1 * 4 * 7);
+ /// assert_eq!(lookup[&2], 5 * 8);
+ /// assert_eq!(lookup.len(), 3);
+ /// ```
+ pub fn product(self) -> HashMap<K, V>
+ where V: Mul<V, Output = V>,
+ {
+ self.fold_first(|acc, _, val| acc * val)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/impl_macros.rs
b/rust/hw/char/pl011/vendor/itertools/src/impl_macros.rs
new file mode 100644
index 0000000000..a029843b05
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/impl_macros.rs
@@ -0,0 +1,29 @@
+//!
+//! Implementation's internal macros
+
+macro_rules! debug_fmt_fields {
+ ($tyname:ident, $($($field:tt/*TODO ideally we would accept ident or tuple
element here*/).+),*) => {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
+ f.debug_struct(stringify!($tyname))
+ $(
+ .field(stringify!($($field).+), &self.$($field).+)
+ )*
+ .finish()
+ }
+ }
+}
+
+macro_rules! clone_fields {
+ ($($field:ident),*) => {
+ #[inline] // TODO is this sensible?
+ fn clone(&self) -> Self {
+ Self {
+ $($field: self.$field.clone(),)*
+ }
+ }
+ }
+}
+
+macro_rules! ignore_ident{
+ ($id:ident, $($t:tt)*) => {$($t)*};
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/intersperse.rs
b/rust/hw/char/pl011/vendor/itertools/src/intersperse.rs
new file mode 100644
index 0000000000..10a3a5389c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/intersperse.rs
@@ -0,0 +1,118 @@
+use std::iter::{Fuse, FusedIterator};
+use super::size_hint;
+
+pub trait IntersperseElement<Item> {
+ fn generate(&mut self) -> Item;
+}
+
+#[derive(Debug, Clone)]
+pub struct IntersperseElementSimple<Item>(Item);
+
+impl<Item: Clone> IntersperseElement<Item> for IntersperseElementSimple<Item> {
+ fn generate(&mut self) -> Item {
+ self.0.clone()
+ }
+}
+
+/// An iterator adaptor to insert a particular value
+/// between each element of the adapted iterator.
+///
+/// Iterator element type is `I::Item`
+///
+/// This iterator is *fused*.
+///
+/// See [`.intersperse()`](crate::Itertools::intersperse) for more information.
+pub type Intersperse<I> = IntersperseWith<I, IntersperseElementSimple<<I as
Iterator>::Item>>;
+
+/// Create a new Intersperse iterator
+pub fn intersperse<I>(iter: I, elt: I::Item) -> Intersperse<I>
+ where I: Iterator,
+{
+ intersperse_with(iter, IntersperseElementSimple(elt))
+}
+
+impl<Item, F: FnMut()->Item> IntersperseElement<Item> for F {
+ fn generate(&mut self) -> Item {
+ self()
+ }
+}
+
+/// An iterator adaptor to insert a particular value created by a function
+/// between each element of the adapted iterator.
+///
+/// Iterator element type is `I::Item`
+///
+/// This iterator is *fused*.
+///
+/// See [`.intersperse_with()`](crate::Itertools::intersperse_with) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Clone, Debug)]
+pub struct IntersperseWith<I, ElemF>
+ where I: Iterator,
+{
+ element: ElemF,
+ iter: Fuse<I>,
+ peek: Option<I::Item>,
+}
+
+/// Create a new `IntersperseWith` iterator
+pub fn intersperse_with<I, ElemF>(iter: I, elt: ElemF) -> IntersperseWith<I,
ElemF>
+ where I: Iterator,
+{
+ let mut iter = iter.fuse();
+ IntersperseWith {
+ peek: iter.next(),
+ iter,
+ element: elt,
+ }
+}
+
+impl<I, ElemF> Iterator for IntersperseWith<I, ElemF>
+ where I: Iterator,
+ ElemF: IntersperseElement<I::Item>
+{
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.peek.is_some() {
+ self.peek.take()
+ } else {
+ self.peek = self.iter.next();
+ if self.peek.is_some() {
+ Some(self.element.generate())
+ } else {
+ None
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // 2 * SH + { 1 or 0 }
+ let has_peek = self.peek.is_some() as usize;
+ let sh = self.iter.size_hint();
+ size_hint::add_scalar(size_hint::add(sh, sh), has_peek)
+ }
+
+ fn fold<B, F>(mut self, init: B, mut f: F) -> B where
+ Self: Sized, F: FnMut(B, Self::Item) -> B,
+ {
+ let mut accum = init;
+
+ if let Some(x) = self.peek.take() {
+ accum = f(accum, x);
+ }
+
+ let element = &mut self.element;
+
+ self.iter.fold(accum,
+ |accum, x| {
+ let accum = f(accum, element.generate());
+ f(accum, x)
+ })
+ }
+}
+
+impl<I, ElemF> FusedIterator for IntersperseWith<I, ElemF>
+ where I: Iterator,
+ ElemF: IntersperseElement<I::Item>
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/k_smallest.rs
b/rust/hw/char/pl011/vendor/itertools/src/k_smallest.rs
new file mode 100644
index 0000000000..acaea5941c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/k_smallest.rs
@@ -0,0 +1,20 @@
+use alloc::collections::BinaryHeap;
+use core::cmp::Ord;
+
+pub(crate) fn k_smallest<T: Ord, I: Iterator<Item = T>>(mut iter: I, k: usize)
-> BinaryHeap<T> {
+ if k == 0 { return BinaryHeap::new(); }
+
+ let mut heap = iter.by_ref().take(k).collect::<BinaryHeap<_>>();
+
+ iter.for_each(|i| {
+ debug_assert_eq!(heap.len(), k);
+ // Equivalent to heap.push(min(i, heap.pop())) but more efficient.
+ // This should be done with a single `.peek_mut().unwrap()` but
+ // `PeekMut` sifts-down unconditionally on Rust 1.46.0 and prior.
+ if *heap.peek().unwrap() > i {
+ *heap.peek_mut().unwrap() = i;
+ }
+ });
+
+ heap
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/kmerge_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/kmerge_impl.rs
new file mode 100644
index 0000000000..509d5fc6a3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/kmerge_impl.rs
@@ -0,0 +1,227 @@
+use crate::size_hint;
+use crate::Itertools;
+
+use alloc::vec::Vec;
+use std::iter::FusedIterator;
+use std::mem::replace;
+use std::fmt;
+
+/// Head element and Tail iterator pair
+///
+/// `PartialEq`, `Eq`, `PartialOrd` and `Ord` are implemented by comparing
sequences based on
+/// first items (which are guaranteed to exist).
+///
+/// The meanings of `PartialOrd` and `Ord` are reversed so as to turn the heap
used in
+/// `KMerge` into a min-heap.
+#[derive(Debug)]
+struct HeadTail<I>
+ where I: Iterator
+{
+ head: I::Item,
+ tail: I,
+}
+
+impl<I> HeadTail<I>
+ where I: Iterator
+{
+ /// Constructs a `HeadTail` from an `Iterator`. Returns `None` if the
`Iterator` is empty.
+ fn new(mut it: I) -> Option<HeadTail<I>> {
+ let head = it.next();
+ head.map(|h| {
+ HeadTail {
+ head: h,
+ tail: it,
+ }
+ })
+ }
+
+ /// Get the next element and update `head`, returning the old head in
`Some`.
+ ///
+ /// Returns `None` when the tail is exhausted (only `head` then remains).
+ fn next(&mut self) -> Option<I::Item> {
+ if let Some(next) = self.tail.next() {
+ Some(replace(&mut self.head, next))
+ } else {
+ None
+ }
+ }
+
+ /// Hints at the size of the sequence, same as the `Iterator` method.
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add_scalar(self.tail.size_hint(), 1)
+ }
+}
+
+impl<I> Clone for HeadTail<I>
+ where I: Iterator + Clone,
+ I::Item: Clone
+{
+ clone_fields!(head, tail);
+}
+
+/// Make `data` a heap (min-heap w.r.t the sorting).
+fn heapify<T, S>(data: &mut [T], mut less_than: S)
+ where S: FnMut(&T, &T) -> bool
+{
+ for i in (0..data.len() / 2).rev() {
+ sift_down(data, i, &mut less_than);
+ }
+}
+
+/// Sift down element at `index` (`heap` is a min-heap wrt the ordering)
+fn sift_down<T, S>(heap: &mut [T], index: usize, mut less_than: S)
+ where S: FnMut(&T, &T) -> bool
+{
+ debug_assert!(index <= heap.len());
+ let mut pos = index;
+ let mut child = 2 * pos + 1;
+ // Require the right child to be present
+ // This allows to find the index of the smallest child without a branch
+ // that wouldn't be predicted if present
+ while child + 1 < heap.len() {
+ // pick the smaller of the two children
+ // use arithmetic to avoid an unpredictable branch
+ child += less_than(&heap[child+1], &heap[child]) as usize;
+
+ // sift down is done if we are already in order
+ if !less_than(&heap[child], &heap[pos]) {
+ return;
+ }
+ heap.swap(pos, child);
+ pos = child;
+ child = 2 * pos + 1;
+ }
+ // Check if the last (left) child was an only child
+ // if it is then it has to be compared with the parent
+ if child + 1 == heap.len() && less_than(&heap[child], &heap[pos]) {
+ heap.swap(pos, child);
+ }
+}
+
+/// An iterator adaptor that merges an abitrary number of base iterators in
ascending order.
+/// If all base iterators are sorted (ascending), the result is sorted.
+///
+/// Iterator element type is `I::Item`.
+///
+/// See [`.kmerge()`](crate::Itertools::kmerge) for more information.
+pub type KMerge<I> = KMergeBy<I, KMergeByLt>;
+
+pub trait KMergePredicate<T> {
+ fn kmerge_pred(&mut self, a: &T, b: &T) -> bool;
+}
+
+#[derive(Clone, Debug)]
+pub struct KMergeByLt;
+
+impl<T: PartialOrd> KMergePredicate<T> for KMergeByLt {
+ fn kmerge_pred(&mut self, a: &T, b: &T) -> bool {
+ a < b
+ }
+}
+
+impl<T, F: FnMut(&T, &T)->bool> KMergePredicate<T> for F {
+ fn kmerge_pred(&mut self, a: &T, b: &T) -> bool {
+ self(a, b)
+ }
+}
+
+/// Create an iterator that merges elements of the contained iterators using
+/// the ordering function.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::kmerge`].
+///
+/// ```
+/// use itertools::kmerge;
+///
+/// for elt in kmerge(vec![vec![0, 2, 4], vec![1, 3, 5], vec![6, 7]]) {
+/// /* loop body */
+/// }
+/// ```
+pub fn kmerge<I>(iterable: I) -> KMerge<<I::Item as IntoIterator>::IntoIter>
+ where I: IntoIterator,
+ I::Item: IntoIterator,
+ <<I as IntoIterator>::Item as IntoIterator>::Item: PartialOrd
+{
+ kmerge_by(iterable, KMergeByLt)
+}
+
+/// An iterator adaptor that merges an abitrary number of base iterators
+/// according to an ordering function.
+///
+/// Iterator element type is `I::Item`.
+///
+/// See [`.kmerge_by()`](crate::Itertools::kmerge_by) for more
+/// information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct KMergeBy<I, F>
+ where I: Iterator,
+{
+ heap: Vec<HeadTail<I>>,
+ less_than: F,
+}
+
+impl<I, F> fmt::Debug for KMergeBy<I, F>
+ where I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug,
+{
+ debug_fmt_fields!(KMergeBy, heap);
+}
+
+/// Create an iterator that merges elements of the contained iterators.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::kmerge_by`].
+pub fn kmerge_by<I, F>(iterable: I, mut less_than: F)
+ -> KMergeBy<<I::Item as IntoIterator>::IntoIter, F>
+ where I: IntoIterator,
+ I::Item: IntoIterator,
+ F: KMergePredicate<<<I as IntoIterator>::Item as
IntoIterator>::Item>,
+{
+ let iter = iterable.into_iter();
+ let (lower, _) = iter.size_hint();
+ let mut heap: Vec<_> = Vec::with_capacity(lower);
+ heap.extend(iter.filter_map(|it| HeadTail::new(it.into_iter())));
+ heapify(&mut heap, |a, b| less_than.kmerge_pred(&a.head, &b.head));
+ KMergeBy { heap, less_than }
+}
+
+impl<I, F> Clone for KMergeBy<I, F>
+ where I: Iterator + Clone,
+ I::Item: Clone,
+ F: Clone,
+{
+ clone_fields!(heap, less_than);
+}
+
+impl<I, F> Iterator for KMergeBy<I, F>
+ where I: Iterator,
+ F: KMergePredicate<I::Item>
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.heap.is_empty() {
+ return None;
+ }
+ let result = if let Some(next) = self.heap[0].next() {
+ next
+ } else {
+ self.heap.swap_remove(0).head
+ };
+ let less_than = &mut self.less_than;
+ sift_down(&mut self.heap, 0, |a, b| less_than.kmerge_pred(&a.head,
&b.head));
+ Some(result)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ #[allow(deprecated)] //TODO: once msrv hits 1.51. replace `fold1` with
`reduce`
+ self.heap.iter()
+ .map(|i| i.size_hint())
+ .fold1(size_hint::add)
+ .unwrap_or((0, Some(0)))
+ }
+}
+
+impl<I, F> FusedIterator for KMergeBy<I, F>
+ where I: Iterator,
+ F: KMergePredicate<I::Item>
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/lazy_buffer.rs
b/rust/hw/char/pl011/vendor/itertools/src/lazy_buffer.rs
new file mode 100644
index 0000000000..ca24062aab
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/lazy_buffer.rs
@@ -0,0 +1,63 @@
+use std::ops::Index;
+use alloc::vec::Vec;
+
+#[derive(Debug, Clone)]
+pub struct LazyBuffer<I: Iterator> {
+ pub it: I,
+ done: bool,
+ buffer: Vec<I::Item>,
+}
+
+impl<I> LazyBuffer<I>
+where
+ I: Iterator,
+{
+ pub fn new(it: I) -> LazyBuffer<I> {
+ LazyBuffer {
+ it,
+ done: false,
+ buffer: Vec::new(),
+ }
+ }
+
+ pub fn len(&self) -> usize {
+ self.buffer.len()
+ }
+
+ pub fn get_next(&mut self) -> bool {
+ if self.done {
+ return false;
+ }
+ if let Some(x) = self.it.next() {
+ self.buffer.push(x);
+ true
+ } else {
+ self.done = true;
+ false
+ }
+ }
+
+ pub fn prefill(&mut self, len: usize) {
+ let buffer_len = self.buffer.len();
+
+ if !self.done && len > buffer_len {
+ let delta = len - buffer_len;
+
+ self.buffer.extend(self.it.by_ref().take(delta));
+ self.done = self.buffer.len() < len;
+ }
+ }
+}
+
+impl<I, J> Index<J> for LazyBuffer<I>
+where
+ I: Iterator,
+ I::Item: Sized,
+ Vec<I::Item>: Index<J>
+{
+ type Output = <Vec<I::Item> as Index<J>>::Output;
+
+ fn index(&self, index: J) -> &Self::Output {
+ self.buffer.index(index)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/lib.rs
b/rust/hw/char/pl011/vendor/itertools/src/lib.rs
new file mode 100644
index 0000000000..c23a65db5c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/lib.rs
@@ -0,0 +1,3967 @@
+#![warn(missing_docs)]
+#![crate_name="itertools"]
+#![cfg_attr(not(feature = "use_std"), no_std)]
+
+//! Extra iterator adaptors, functions and macros.
+//!
+//! To extend [`Iterator`] with methods in this crate, import
+//! the [`Itertools`] trait:
+//!
+//! ```
+//! use itertools::Itertools;
+//! ```
+//!
+//! Now, new methods like [`interleave`](Itertools::interleave)
+//! are available on all iterators:
+//!
+//! ```
+//! use itertools::Itertools;
+//!
+//! let it = (1..3).interleave(vec![-1, -2]);
+//! itertools::assert_equal(it, vec![1, -1, 2, -2]);
+//! ```
+//!
+//! Most iterator methods are also provided as functions (with the benefit
+//! that they convert parameters using [`IntoIterator`]):
+//!
+//! ```
+//! use itertools::interleave;
+//!
+//! for elt in interleave(&[1, 2, 3], &[2, 3, 4]) {
+//! /* loop body */
+//! }
+//! ```
+//!
+//! ## Crate Features
+//!
+//! - `use_std`
+//! - Enabled by default.
+//! - Disable to compile itertools using `#![no_std]`. This disables
+//! any items that depend on collections (like `group_by`, `unique`,
+//! `kmerge`, `join` and many more).
+//!
+//! ## Rust Version
+//!
+//! This version of itertools requires Rust 1.32 or later.
+#![doc(html_root_url="https://docs.rs/itertools/0.8/")]
+
+#[cfg(not(feature = "use_std"))]
+extern crate core as std;
+
+#[cfg(feature = "use_alloc")]
+extern crate alloc;
+
+#[cfg(feature = "use_alloc")]
+use alloc::{
+ string::String,
+ vec::Vec,
+};
+
+pub use either::Either;
+
+use core::borrow::Borrow;
+#[cfg(feature = "use_std")]
+use std::collections::HashMap;
+use std::iter::{IntoIterator, once};
+use std::cmp::Ordering;
+use std::fmt;
+#[cfg(feature = "use_std")]
+use std::collections::HashSet;
+#[cfg(feature = "use_std")]
+use std::hash::Hash;
+#[cfg(feature = "use_alloc")]
+use std::fmt::Write;
+#[cfg(feature = "use_alloc")]
+type VecIntoIter<T> = alloc::vec::IntoIter<T>;
+#[cfg(feature = "use_alloc")]
+use std::iter::FromIterator;
+
+#[macro_use]
+mod impl_macros;
+
+// for compatibility with no std and macros
+#[doc(hidden)]
+pub use std::iter as __std_iter;
+
+/// The concrete iterator types.
+pub mod structs {
+ pub use crate::adaptors::{
+ Dedup,
+ DedupBy,
+ DedupWithCount,
+ DedupByWithCount,
+ Interleave,
+ InterleaveShortest,
+ FilterMapOk,
+ FilterOk,
+ Product,
+ PutBack,
+ Batching,
+ MapInto,
+ MapOk,
+ Merge,
+ MergeBy,
+ TakeWhileRef,
+ WhileSome,
+ Coalesce,
+ TupleCombinations,
+ Positions,
+ Update,
+ };
+ #[allow(deprecated)]
+ pub use crate::adaptors::{MapResults, Step};
+ #[cfg(feature = "use_alloc")]
+ pub use crate::adaptors::MultiProduct;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::combinations::Combinations;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::combinations_with_replacement::CombinationsWithReplacement;
+ pub use crate::cons_tuples_impl::ConsTuples;
+ pub use crate::exactly_one_err::ExactlyOneError;
+ pub use crate::format::{Format, FormatWith};
+ pub use crate::flatten_ok::FlattenOk;
+ #[cfg(feature = "use_std")]
+ pub use crate::grouping_map::{GroupingMap, GroupingMapBy};
+ #[cfg(feature = "use_alloc")]
+ pub use crate::groupbylazy::{IntoChunks, Chunk, Chunks, GroupBy, Group,
Groups};
+ pub use crate::intersperse::{Intersperse, IntersperseWith};
+ #[cfg(feature = "use_alloc")]
+ pub use crate::kmerge_impl::{KMerge, KMergeBy};
+ pub use crate::merge_join::MergeJoinBy;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::multipeek_impl::MultiPeek;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::peek_nth::PeekNth;
+ pub use crate::pad_tail::PadUsing;
+ pub use crate::peeking_take_while::PeekingTakeWhile;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::permutations::Permutations;
+ pub use crate::process_results_impl::ProcessResults;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::powerset::Powerset;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::put_back_n_impl::PutBackN;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::rciter_impl::RcIter;
+ pub use crate::repeatn::RepeatN;
+ #[allow(deprecated)]
+ pub use crate::sources::{RepeatCall, Unfold, Iterate};
+ pub use crate::take_while_inclusive::TakeWhileInclusive;
+ #[cfg(feature = "use_alloc")]
+ pub use crate::tee::Tee;
+ pub use crate::tuple_impl::{TupleBuffer, TupleWindows,
CircularTupleWindows, Tuples};
+ #[cfg(feature = "use_std")]
+ pub use crate::duplicates_impl::{Duplicates, DuplicatesBy};
+ #[cfg(feature = "use_std")]
+ pub use crate::unique_impl::{Unique, UniqueBy};
+ pub use crate::with_position::WithPosition;
+ pub use crate::zip_eq_impl::ZipEq;
+ pub use crate::zip_longest::ZipLongest;
+ pub use crate::ziptuple::Zip;
+}
+
+/// Traits helpful for using certain `Itertools` methods in generic contexts.
+pub mod traits {
+ pub use crate::tuple_impl::HomogeneousTuple;
+}
+
+#[allow(deprecated)]
+pub use crate::structs::*;
+pub use crate::concat_impl::concat;
+pub use crate::cons_tuples_impl::cons_tuples;
+pub use crate::diff::diff_with;
+pub use crate::diff::Diff;
+#[cfg(feature = "use_alloc")]
+pub use crate::kmerge_impl::{kmerge_by};
+pub use crate::minmax::MinMaxResult;
+pub use crate::peeking_take_while::PeekingNext;
+pub use crate::process_results_impl::process_results;
+pub use crate::repeatn::repeat_n;
+#[allow(deprecated)]
+pub use crate::sources::{repeat_call, unfold, iterate};
+pub use crate::with_position::Position;
+pub use crate::unziptuple::{multiunzip, MultiUnzip};
+pub use crate::ziptuple::multizip;
+mod adaptors;
+mod either_or_both;
+pub use crate::either_or_both::EitherOrBoth;
+#[doc(hidden)]
+pub mod free;
+#[doc(inline)]
+pub use crate::free::*;
+mod concat_impl;
+mod cons_tuples_impl;
+#[cfg(feature = "use_alloc")]
+mod combinations;
+#[cfg(feature = "use_alloc")]
+mod combinations_with_replacement;
+mod exactly_one_err;
+mod diff;
+mod flatten_ok;
+#[cfg(feature = "use_std")]
+mod extrema_set;
+mod format;
+#[cfg(feature = "use_std")]
+mod grouping_map;
+#[cfg(feature = "use_alloc")]
+mod group_map;
+#[cfg(feature = "use_alloc")]
+mod groupbylazy;
+mod intersperse;
+#[cfg(feature = "use_alloc")]
+mod k_smallest;
+#[cfg(feature = "use_alloc")]
+mod kmerge_impl;
+#[cfg(feature = "use_alloc")]
+mod lazy_buffer;
+mod merge_join;
+mod minmax;
+#[cfg(feature = "use_alloc")]
+mod multipeek_impl;
+mod pad_tail;
+#[cfg(feature = "use_alloc")]
+mod peek_nth;
+mod peeking_take_while;
+#[cfg(feature = "use_alloc")]
+mod permutations;
+#[cfg(feature = "use_alloc")]
+mod powerset;
+mod process_results_impl;
+#[cfg(feature = "use_alloc")]
+mod put_back_n_impl;
+#[cfg(feature = "use_alloc")]
+mod rciter_impl;
+mod repeatn;
+mod size_hint;
+mod sources;
+mod take_while_inclusive;
+#[cfg(feature = "use_alloc")]
+mod tee;
+mod tuple_impl;
+#[cfg(feature = "use_std")]
+mod duplicates_impl;
+#[cfg(feature = "use_std")]
+mod unique_impl;
+mod unziptuple;
+mod with_position;
+mod zip_eq_impl;
+mod zip_longest;
+mod ziptuple;
+
+#[macro_export]
+/// Create an iterator over the “cartesian product” of iterators.
+///
+/// Iterator element type is like `(A, B, ..., E)` if formed
+/// from iterators `(I, J, ..., M)` with element types `I::Item = A`, `J::Item
= B`, etc.
+///
+/// ```
+/// # use itertools::iproduct;
+/// #
+/// # fn main() {
+/// // Iterate over the coordinates of a 4 x 4 x 4 grid
+/// // from (0, 0, 0), (0, 0, 1), .., (0, 1, 0), (0, 1, 1), .. etc until (3,
3, 3)
+/// for (i, j, k) in iproduct!(0..4, 0..4, 0..4) {
+/// // ..
+/// }
+/// # }
+/// ```
+macro_rules! iproduct {
+ (@flatten $I:expr,) => (
+ $I
+ );
+ (@flatten $I:expr, $J:expr, $($K:expr,)*) => (
+ $crate::iproduct!(@flatten $crate::cons_tuples($crate::iproduct!($I,
$J)), $($K,)*)
+ );
+ ($I:expr) => (
+ $crate::__std_iter::IntoIterator::into_iter($I)
+ );
+ ($I:expr, $J:expr) => (
+ $crate::Itertools::cartesian_product($crate::iproduct!($I),
$crate::iproduct!($J))
+ );
+ ($I:expr, $J:expr, $($K:expr),+) => (
+ $crate::iproduct!(@flatten $crate::iproduct!($I, $J), $($K,)+)
+ );
+}
+
+#[macro_export]
+/// Create an iterator running multiple iterators in lockstep.
+///
+/// The `izip!` iterator yields elements until any subiterator
+/// returns `None`.
+///
+/// This is a version of the standard ``.zip()`` that's supporting more than
+/// two iterators. The iterator element type is a tuple with one element
+/// from each of the input iterators. Just like ``.zip()``, the iteration stops
+/// when the shortest of the inputs reaches its end.
+///
+/// **Note:** The result of this macro is in the general case an iterator
+/// composed of repeated `.zip()` and a `.map()`; it has an anonymous type.
+/// The special cases of one and two arguments produce the equivalent of
+/// `$a.into_iter()` and `$a.into_iter().zip($b)` respectively.
+///
+/// Prefer this macro `izip!()` over [`multizip`] for the performance benefits
+/// of using the standard library `.zip()`.
+///
+/// ```
+/// # use itertools::izip;
+/// #
+/// # fn main() {
+///
+/// // iterate over three sequences side-by-side
+/// let mut results = [0, 0, 0, 0];
+/// let inputs = [3, 7, 9, 6];
+///
+/// for (r, index, input) in izip!(&mut results, 0..10, &inputs) {
+/// *r = index * 10 + input;
+/// }
+///
+/// assert_eq!(results, [0 + 3, 10 + 7, 29, 36]);
+/// # }
+/// ```
+macro_rules! izip {
+ // @closure creates a tuple-flattening closure for .map() call. usage:
+ // @closure partial_pattern => partial_tuple , rest , of , iterators
+ // eg. izip!( @closure ((a, b), c) => (a, b, c) , dd , ee )
+ ( @closure $p:pat => $tup:expr ) => {
+ |$p| $tup
+ };
+
+ // The "b" identifier is a different identifier on each recursion level
thanks to hygiene.
+ ( @closure $p:pat => ( $($tup:tt)* ) , $_iter:expr $( , $tail:expr )* ) =>
{
+ $crate::izip!(@closure ($p, b) => ( $($tup)*, b ) $( , $tail )*)
+ };
+
+ // unary
+ ($first:expr $(,)*) => {
+ $crate::__std_iter::IntoIterator::into_iter($first)
+ };
+
+ // binary
+ ($first:expr, $second:expr $(,)*) => {
+ $crate::izip!($first)
+ .zip($second)
+ };
+
+ // n-ary where n > 2
+ ( $first:expr $( , $rest:expr )* $(,)* ) => {
+ $crate::izip!($first)
+ $(
+ .zip($rest)
+ )*
+ .map(
+ $crate::izip!(@closure a => (a) $( , $rest )*)
+ )
+ };
+}
+
+#[macro_export]
+/// [Chain][`chain`] zero or more iterators together into one sequence.
+///
+/// The comma-separated arguments must implement [`IntoIterator`].
+/// The final argument may be followed by a trailing comma.
+///
+/// [`chain`]: Iterator::chain
+///
+/// # Examples
+///
+/// Empty invocations of `chain!` expand to an invocation of
[`std::iter::empty`]:
+/// ```
+/// use std::iter;
+/// use itertools::chain;
+///
+/// let _: iter::Empty<()> = chain!();
+/// let _: iter::Empty<i8> = chain!();
+/// ```
+///
+/// Invocations of `chain!` with one argument expand to
[`arg.into_iter()`](IntoIterator):
+/// ```
+/// use std::{ops::Range, slice};
+/// use itertools::chain;
+/// let _: <Range<_> as IntoIterator>::IntoIter = chain!((2..6),); // trailing
comma optional!
+/// let _: <&[_] as IntoIterator>::IntoIter = chain!(&[2, 3, 4]);
+/// ```
+///
+/// Invocations of `chain!` with multiple arguments
[`.into_iter()`](IntoIterator) each
+/// argument, and then [`chain`] them together:
+/// ```
+/// use std::{iter::*, ops::Range, slice};
+/// use itertools::{assert_equal, chain};
+///
+/// // e.g., this:
+/// let with_macro: Chain<Chain<Once<_>, Take<Repeat<_>>>, slice::Iter<_>> =
+/// chain![once(&0), repeat(&1).take(2), &[2, 3, 5],];
+///
+/// // ...is equivalent to this:
+/// let with_method: Chain<Chain<Once<_>, Take<Repeat<_>>>, slice::Iter<_>> =
+/// once(&0)
+/// .chain(repeat(&1).take(2))
+/// .chain(&[2, 3, 5]);
+///
+/// assert_equal(with_macro, with_method);
+/// ```
+macro_rules! chain {
+ () => {
+ core::iter::empty()
+ };
+ ($first:expr $(, $rest:expr )* $(,)?) => {
+ {
+ let iter = core::iter::IntoIterator::into_iter($first);
+ $(
+ let iter =
+ core::iter::Iterator::chain(
+ iter,
+ core::iter::IntoIterator::into_iter($rest));
+ )*
+ iter
+ }
+ };
+}
+
+/// An [`Iterator`] blanket implementation that provides extra adaptors and
+/// methods.
+///
+/// This trait defines a number of methods. They are divided into two groups:
+///
+/// * *Adaptors* take an iterator and parameter as input, and return
+/// a new iterator value. These are listed first in the trait. An example
+/// of an adaptor is [`.interleave()`](Itertools::interleave)
+///
+/// * *Regular methods* are those that don't return iterators and instead
+/// return a regular value of some other kind.
+/// [`.next_tuple()`](Itertools::next_tuple) is an example and the first
regular
+/// method in the list.
+pub trait Itertools : Iterator {
+ // adaptors
+
+ /// Alternate elements from two iterators until both have run out.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (1..7).interleave(vec![-1, -2]);
+ /// itertools::assert_equal(it, vec![1, -1, 2, -2, 3, 4, 5, 6]);
+ /// ```
+ fn interleave<J>(self, other: J) -> Interleave<Self, J::IntoIter>
+ where J: IntoIterator<Item = Self::Item>,
+ Self: Sized
+ {
+ interleave(self, other)
+ }
+
+ /// Alternate elements from two iterators until at least one of them has
run
+ /// out.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (1..7).interleave_shortest(vec![-1, -2]);
+ /// itertools::assert_equal(it, vec![1, -1, 2, -2, 3]);
+ /// ```
+ fn interleave_shortest<J>(self, other: J) -> InterleaveShortest<Self,
J::IntoIter>
+ where J: IntoIterator<Item = Self::Item>,
+ Self: Sized
+ {
+ adaptors::interleave_shortest(self, other.into_iter())
+ }
+
+ /// An iterator adaptor to insert a particular value
+ /// between each element of the adapted iterator.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// itertools::assert_equal((0..3).intersperse(8), vec![0, 8, 1, 8, 2]);
+ /// ```
+ fn intersperse(self, element: Self::Item) -> Intersperse<Self>
+ where Self: Sized,
+ Self::Item: Clone
+ {
+ intersperse::intersperse(self, element)
+ }
+
+ /// An iterator adaptor to insert a particular value created by a function
+ /// between each element of the adapted iterator.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut i = 10;
+ /// itertools::assert_equal((0..3).intersperse_with(|| { i -= 1; i }),
vec![0, 9, 1, 8, 2]);
+ /// assert_eq!(i, 8);
+ /// ```
+ fn intersperse_with<F>(self, element: F) -> IntersperseWith<Self, F>
+ where Self: Sized,
+ F: FnMut() -> Self::Item
+ {
+ intersperse::intersperse_with(self, element)
+ }
+
+ /// Create an iterator which iterates over both this and the specified
+ /// iterator simultaneously, yielding pairs of two optional elements.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// As long as neither input iterator is exhausted yet, it yields two
values
+ /// via `EitherOrBoth::Both`.
+ ///
+ /// When the parameter iterator is exhausted, it only yields a value from
the
+ /// `self` iterator via `EitherOrBoth::Left`.
+ ///
+ /// When the `self` iterator is exhausted, it only yields a value from the
+ /// parameter iterator via `EitherOrBoth::Right`.
+ ///
+ /// When both iterators return `None`, all further invocations of `.next()`
+ /// will return `None`.
+ ///
+ /// Iterator element type is
+ /// [`EitherOrBoth<Self::Item, J::Item>`](EitherOrBoth).
+ ///
+ /// ```rust
+ /// use itertools::EitherOrBoth::{Both, Right};
+ /// use itertools::Itertools;
+ /// let it = (0..1).zip_longest(1..3);
+ /// itertools::assert_equal(it, vec![Both(0, 1), Right(2)]);
+ /// ```
+ #[inline]
+ fn zip_longest<J>(self, other: J) -> ZipLongest<Self, J::IntoIter>
+ where J: IntoIterator,
+ Self: Sized
+ {
+ zip_longest::zip_longest(self, other.into_iter())
+ }
+
+ /// Create an iterator which iterates over both this and the specified
+ /// iterator simultaneously, yielding pairs of elements.
+ ///
+ /// **Panics** if the iterators reach an end and they are not of equal
+ /// lengths.
+ #[inline]
+ fn zip_eq<J>(self, other: J) -> ZipEq<Self, J::IntoIter>
+ where J: IntoIterator,
+ Self: Sized
+ {
+ zip_eq(self, other)
+ }
+
+ /// A “meta iterator adaptor”. Its closure receives a reference to the
+ /// iterator and may pick off as many elements as it likes, to produce the
+ /// next iterator element.
+ ///
+ /// Iterator element type is `B`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // An adaptor that gathers elements in pairs
+ /// let pit = (0..4).batching(|it| {
+ /// match it.next() {
+ /// None => None,
+ /// Some(x) => match it.next() {
+ /// None => None,
+ /// Some(y) => Some((x, y)),
+ /// }
+ /// }
+ /// });
+ ///
+ /// itertools::assert_equal(pit, vec![(0, 1), (2, 3)]);
+ /// ```
+ ///
+ fn batching<B, F>(self, f: F) -> Batching<Self, F>
+ where F: FnMut(&mut Self) -> Option<B>,
+ Self: Sized
+ {
+ adaptors::batching(self, f)
+ }
+
+ /// Return an *iterable* that can group iterator elements.
+ /// Consecutive elements that map to the same key (“runs”), are assigned
+ /// to the same group.
+ ///
+ /// `GroupBy` is the storage for the lazy grouping operation.
+ ///
+ /// If the groups are consumed in order, or if each group's iterator is
+ /// dropped without keeping it around, then `GroupBy` uses no
+ /// allocations. It needs allocations only if several group iterators
+ /// are alive at the same time.
+ ///
+ /// This type implements [`IntoIterator`] (it is **not** an iterator
+ /// itself), because the group iterators need to borrow from this
+ /// value. It should be stored in a local variable or temporary and
+ /// iterated.
+ ///
+ /// Iterator element type is `(K, Group)`: the group's key and the
+ /// group iterator.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // group data into runs of larger than zero or not.
+ /// let data = vec![1, 3, -2, -2, 1, 0, 1, 2];
+ /// // groups: |---->|------>|--------->|
+ ///
+ /// // Note: The `&` is significant here, `GroupBy` is iterable
+ /// // only by reference. You can also call `.into_iter()` explicitly.
+ /// let mut data_grouped = Vec::new();
+ /// for (key, group) in &data.into_iter().group_by(|elt| *elt >= 0) {
+ /// data_grouped.push((key, group.collect()));
+ /// }
+ /// assert_eq!(data_grouped, vec![(true, vec![1, 3]), (false, vec![-2,
-2]), (true, vec![1, 0, 1, 2])]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn group_by<K, F>(self, key: F) -> GroupBy<K, Self, F>
+ where Self: Sized,
+ F: FnMut(&Self::Item) -> K,
+ K: PartialEq,
+ {
+ groupbylazy::new(self, key)
+ }
+
+ /// Return an *iterable* that can chunk the iterator.
+ ///
+ /// Yield subiterators (chunks) that each yield a fixed number elements,
+ /// determined by `size`. The last chunk will be shorter if there aren't
+ /// enough elements.
+ ///
+ /// `IntoChunks` is based on `GroupBy`: it is iterable (implements
+ /// `IntoIterator`, **not** `Iterator`), and it only buffers if several
+ /// chunk iterators are alive at the same time.
+ ///
+ /// Iterator element type is `Chunk`, each chunk's iterator.
+ ///
+ /// **Panics** if `size` is 0.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1, 1, 2, -2, 6, 0, 3, 1];
+ /// //chunk size=3 |------->|-------->|--->|
+ ///
+ /// // Note: The `&` is significant here, `IntoChunks` is iterable
+ /// // only by reference. You can also call `.into_iter()` explicitly.
+ /// for chunk in &data.into_iter().chunks(3) {
+ /// // Check that the sum of each chunk is 4.
+ /// assert_eq!(4, chunk.sum());
+ /// }
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn chunks(self, size: usize) -> IntoChunks<Self>
+ where Self: Sized,
+ {
+ assert!(size != 0);
+ groupbylazy::new_chunks(self, size)
+ }
+
+ /// Return an iterator over all contiguous windows producing tuples of
+ /// a specific size (up to 12).
+ ///
+ /// `tuple_windows` clones the iterator elements so that they can be
+ /// part of successive windows, this makes it most suited for iterators
+ /// of references and other values that are cheap to copy.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// let mut v = Vec::new();
+ ///
+ /// // pairwise iteration
+ /// for (a, b) in (1..5).tuple_windows() {
+ /// v.push((a, b));
+ /// }
+ /// assert_eq!(v, vec![(1, 2), (2, 3), (3, 4)]);
+ ///
+ /// let mut it = (1..5).tuple_windows();
+ /// assert_eq!(Some((1, 2, 3)), it.next());
+ /// assert_eq!(Some((2, 3, 4)), it.next());
+ /// assert_eq!(None, it.next());
+ ///
+ /// // this requires a type hint
+ /// let it = (1..5).tuple_windows::<(_, _, _)>();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (2, 3, 4)]);
+ ///
+ /// // you can also specify the complete type
+ /// use itertools::TupleWindows;
+ /// use std::ops::Range;
+ ///
+ /// let it: TupleWindows<Range<u32>, (u32, u32, u32)> =
(1..5).tuple_windows();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (2, 3, 4)]);
+ /// ```
+ fn tuple_windows<T>(self) -> TupleWindows<Self, T>
+ where Self: Sized + Iterator<Item = T::Item>,
+ T: traits::HomogeneousTuple,
+ T::Item: Clone
+ {
+ tuple_impl::tuple_windows(self)
+ }
+
+ /// Return an iterator over all windows, wrapping back to the first
+ /// elements when the window would otherwise exceed the length of the
+ /// iterator, producing tuples of a specific size (up to 12).
+ ///
+ /// `circular_tuple_windows` clones the iterator elements so that they can
be
+ /// part of successive windows, this makes it most suited for iterators
+ /// of references and other values that are cheap to copy.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// let mut v = Vec::new();
+ /// for (a, b) in (1..5).circular_tuple_windows() {
+ /// v.push((a, b));
+ /// }
+ /// assert_eq!(v, vec![(1, 2), (2, 3), (3, 4), (4, 1)]);
+ ///
+ /// let mut it = (1..5).circular_tuple_windows();
+ /// assert_eq!(Some((1, 2, 3)), it.next());
+ /// assert_eq!(Some((2, 3, 4)), it.next());
+ /// assert_eq!(Some((3, 4, 1)), it.next());
+ /// assert_eq!(Some((4, 1, 2)), it.next());
+ /// assert_eq!(None, it.next());
+ ///
+ /// // this requires a type hint
+ /// let it = (1..5).circular_tuple_windows::<(_, _, _)>();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (2, 3, 4), (3, 4, 1), (4,
1, 2)]);
+ /// ```
+ fn circular_tuple_windows<T>(self) -> CircularTupleWindows<Self, T>
+ where Self: Sized + Clone + Iterator<Item = T::Item> +
ExactSizeIterator,
+ T: tuple_impl::TupleCollect + Clone,
+ T::Item: Clone
+ {
+ tuple_impl::circular_tuple_windows(self)
+ }
+ /// Return an iterator that groups the items in tuples of a specific size
+ /// (up to 12).
+ ///
+ /// See also the method [`.next_tuple()`](Itertools::next_tuple).
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// let mut v = Vec::new();
+ /// for (a, b) in (1..5).tuples() {
+ /// v.push((a, b));
+ /// }
+ /// assert_eq!(v, vec![(1, 2), (3, 4)]);
+ ///
+ /// let mut it = (1..7).tuples();
+ /// assert_eq!(Some((1, 2, 3)), it.next());
+ /// assert_eq!(Some((4, 5, 6)), it.next());
+ /// assert_eq!(None, it.next());
+ ///
+ /// // this requires a type hint
+ /// let it = (1..7).tuples::<(_, _, _)>();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (4, 5, 6)]);
+ ///
+ /// // you can also specify the complete type
+ /// use itertools::Tuples;
+ /// use std::ops::Range;
+ ///
+ /// let it: Tuples<Range<u32>, (u32, u32, u32)> = (1..7).tuples();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (4, 5, 6)]);
+ /// ```
+ ///
+ /// See also [`Tuples::into_buffer`].
+ fn tuples<T>(self) -> Tuples<Self, T>
+ where Self: Sized + Iterator<Item = T::Item>,
+ T: traits::HomogeneousTuple
+ {
+ tuple_impl::tuples(self)
+ }
+
+ /// Split into an iterator pair that both yield all elements from
+ /// the original iterator.
+ ///
+ /// **Note:** If the iterator is clonable, prefer using that instead
+ /// of using this method. Cloning is likely to be more efficient.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// let xs = vec![0, 1, 2, 3];
+ ///
+ /// let (mut t1, t2) = xs.into_iter().tee();
+ /// itertools::assert_equal(t1.next(), Some(0));
+ /// itertools::assert_equal(t2, 0..4);
+ /// itertools::assert_equal(t1, 1..4);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn tee(self) -> (Tee<Self>, Tee<Self>)
+ where Self: Sized,
+ Self::Item: Clone
+ {
+ tee::new(self)
+ }
+
+ /// Return an iterator adaptor that steps `n` elements in the base iterator
+ /// for each iteration.
+ ///
+ /// The iterator steps by yielding the next element from the base iterator,
+ /// then skipping forward `n - 1` elements.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// **Panics** if the step is 0.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (0..8).step(3);
+ /// itertools::assert_equal(it, vec![0, 3, 6]);
+ /// ```
+ #[deprecated(note="Use std .step_by() instead", since="0.8.0")]
+ #[allow(deprecated)]
+ fn step(self, n: usize) -> Step<Self>
+ where Self: Sized
+ {
+ adaptors::step(self, n)
+ }
+
+ /// Convert each item of the iterator using the [`Into`] trait.
+ ///
+ /// ```rust
+ /// use itertools::Itertools;
+ ///
+ /// (1i32..42i32).map_into::<f64>().collect_vec();
+ /// ```
+ fn map_into<R>(self) -> MapInto<Self, R>
+ where Self: Sized,
+ Self::Item: Into<R>,
+ {
+ adaptors::map_into(self)
+ }
+
+ /// See [`.map_ok()`](Itertools::map_ok).
+ #[deprecated(note="Use .map_ok() instead", since="0.10.0")]
+ fn map_results<F, T, U, E>(self, f: F) -> MapOk<Self, F>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ F: FnMut(T) -> U,
+ {
+ self.map_ok(f)
+ }
+
+ /// Return an iterator adaptor that applies the provided closure
+ /// to every `Result::Ok` value. `Result::Err` values are
+ /// unchanged.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![Ok(41), Err(false), Ok(11)];
+ /// let it = input.into_iter().map_ok(|i| i + 1);
+ /// itertools::assert_equal(it, vec![Ok(42), Err(false), Ok(12)]);
+ /// ```
+ fn map_ok<F, T, U, E>(self, f: F) -> MapOk<Self, F>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ F: FnMut(T) -> U,
+ {
+ adaptors::map_ok(self, f)
+ }
+
+ /// Return an iterator adaptor that filters every `Result::Ok`
+ /// value with the provided closure. `Result::Err` values are
+ /// unchanged.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![Ok(22), Err(false), Ok(11)];
+ /// let it = input.into_iter().filter_ok(|&i| i > 20);
+ /// itertools::assert_equal(it, vec![Ok(22), Err(false)]);
+ /// ```
+ fn filter_ok<F, T, E>(self, f: F) -> FilterOk<Self, F>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ F: FnMut(&T) -> bool,
+ {
+ adaptors::filter_ok(self, f)
+ }
+
+ /// Return an iterator adaptor that filters and transforms every
+ /// `Result::Ok` value with the provided closure. `Result::Err`
+ /// values are unchanged.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![Ok(22), Err(false), Ok(11)];
+ /// let it = input.into_iter().filter_map_ok(|i| if i > 20 { Some(i * 2) }
else { None });
+ /// itertools::assert_equal(it, vec![Ok(44), Err(false)]);
+ /// ```
+ fn filter_map_ok<F, T, U, E>(self, f: F) -> FilterMapOk<Self, F>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ F: FnMut(T) -> Option<U>,
+ {
+ adaptors::filter_map_ok(self, f)
+ }
+
+ /// Return an iterator adaptor that flattens every `Result::Ok` value into
+ /// a series of `Result::Ok` values. `Result::Err` values are unchanged.
+ ///
+ /// This is useful when you have some common error type for your crate and
+ /// need to propagate it upwards, but the `Result::Ok` case needs to be
flattened.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![Ok(0..2), Err(false), Ok(2..4)];
+ /// let it = input.iter().cloned().flatten_ok();
+ /// itertools::assert_equal(it.clone(), vec![Ok(0), Ok(1), Err(false),
Ok(2), Ok(3)]);
+ ///
+ /// // This can also be used to propagate errors when collecting.
+ /// let output_result: Result<Vec<i32>, bool> = it.collect();
+ /// assert_eq!(output_result, Err(false));
+ /// ```
+ fn flatten_ok<T, E>(self) -> FlattenOk<Self, T, E>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ T: IntoIterator
+ {
+ flatten_ok::flatten_ok(self)
+ }
+
+ /// “Lift” a function of the values of the current iterator so as to
process
+ /// an iterator of `Result` values instead.
+ ///
+ /// `processor` is a closure that receives an adapted version of the
iterator
+ /// as the only argument — the adapted iterator produces elements of type
`T`,
+ /// as long as the original iterator produces `Ok` values.
+ ///
+ /// If the original iterable produces an error at any point, the adapted
+ /// iterator ends and it will return the error iself.
+ ///
+ /// Otherwise, the return value from the closure is returned wrapped
+ /// inside `Ok`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// type Item = Result<i32, &'static str>;
+ ///
+ /// let first_values: Vec<Item> = vec![Ok(1), Ok(0), Ok(3)];
+ /// let second_values: Vec<Item> = vec![Ok(2), Ok(1), Err("overflow")];
+ ///
+ /// // “Lift” the iterator .max() method to work on the Ok-values.
+ /// let first_max = first_values.into_iter().process_results(|iter|
iter.max().unwrap_or(0));
+ /// let second_max = second_values.into_iter().process_results(|iter|
iter.max().unwrap_or(0));
+ ///
+ /// assert_eq!(first_max, Ok(3));
+ /// assert!(second_max.is_err());
+ /// ```
+ fn process_results<F, T, E, R>(self, processor: F) -> Result<R, E>
+ where Self: Iterator<Item = Result<T, E>> + Sized,
+ F: FnOnce(ProcessResults<Self, E>) -> R
+ {
+ process_results(self, processor)
+ }
+
+ /// Return an iterator adaptor that merges the two base iterators in
+ /// ascending order. If both base iterators are sorted (ascending), the
+ /// result is sorted.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a = (0..11).step_by(3);
+ /// let b = (0..11).step_by(5);
+ /// let it = a.merge(b);
+ /// itertools::assert_equal(it, vec![0, 0, 3, 5, 6, 9, 10]);
+ /// ```
+ fn merge<J>(self, other: J) -> Merge<Self, J::IntoIter>
+ where Self: Sized,
+ Self::Item: PartialOrd,
+ J: IntoIterator<Item = Self::Item>
+ {
+ merge(self, other)
+ }
+
+ /// Return an iterator adaptor that merges the two base iterators in order.
+ /// This is much like [`.merge()`](Itertools::merge) but allows for a
custom ordering.
+ ///
+ /// This can be especially useful for sequences of tuples.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a = (0..).zip("bc".chars());
+ /// let b = (0..).zip("ad".chars());
+ /// let it = a.merge_by(b, |x, y| x.1 <= y.1);
+ /// itertools::assert_equal(it, vec![(0, 'a'), (0, 'b'), (1, 'c'), (1,
'd')]);
+ /// ```
+
+ fn merge_by<J, F>(self, other: J, is_first: F) -> MergeBy<Self,
J::IntoIter, F>
+ where Self: Sized,
+ J: IntoIterator<Item = Self::Item>,
+ F: FnMut(&Self::Item, &Self::Item) -> bool
+ {
+ adaptors::merge_by_new(self, other.into_iter(), is_first)
+ }
+
+ /// Create an iterator that merges items from both this and the specified
+ /// iterator in ascending order.
+ ///
+ /// The function can either return an `Ordering` variant or a boolean.
+ ///
+ /// If `cmp_fn` returns `Ordering`,
+ /// it chooses whether to pair elements based on the `Ordering` returned
by the
+ /// specified compare function. At any point, inspecting the tip of the
+ /// iterators `I` and `J` as items `i` of type `I::Item` and `j` of type
+ /// `J::Item` respectively, the resulting iterator will:
+ ///
+ /// - Emit `EitherOrBoth::Left(i)` when `i < j`,
+ /// and remove `i` from its source iterator
+ /// - Emit `EitherOrBoth::Right(j)` when `i > j`,
+ /// and remove `j` from its source iterator
+ /// - Emit `EitherOrBoth::Both(i, j)` when `i == j`,
+ /// and remove both `i` and `j` from their respective source iterators
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::EitherOrBoth::{Left, Right, Both};
+ ///
+ /// let a = vec![0, 2, 4, 6, 1].into_iter();
+ /// let b = (0..10).step_by(3);
+ ///
+ /// itertools::assert_equal(
+ /// a.merge_join_by(b, |i, j| i.cmp(j)),
+ /// vec![Both(0, 0), Left(2), Right(3), Left(4), Both(6, 6), Left(1),
Right(9)]
+ /// );
+ /// ```
+ ///
+ /// If `cmp_fn` returns `bool`,
+ /// it chooses whether to pair elements based on the boolean returned by
the
+ /// specified function. At any point, inspecting the tip of the
+ /// iterators `I` and `J` as items `i` of type `I::Item` and `j` of type
+ /// `J::Item` respectively, the resulting iterator will:
+ ///
+ /// - Emit `Either::Left(i)` when `true`,
+ /// and remove `i` from its source iterator
+ /// - Emit `Either::Right(j)` when `false`,
+ /// and remove `j` from its source iterator
+ ///
+ /// It is similar to the `Ordering` case if the first argument is
considered
+ /// "less" than the second argument.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::Either::{Left, Right};
+ ///
+ /// let a = vec![0, 2, 4, 6, 1].into_iter();
+ /// let b = (0..10).step_by(3);
+ ///
+ /// itertools::assert_equal(
+ /// a.merge_join_by(b, |i, j| i <= j),
+ /// vec![Left(0), Right(0), Left(2), Right(3), Left(4), Left(6),
Left(1), Right(6), Right(9)]
+ /// );
+ /// ```
+ #[inline]
+ fn merge_join_by<J, F, T>(self, other: J, cmp_fn: F) -> MergeJoinBy<Self,
J::IntoIter, F>
+ where J: IntoIterator,
+ F: FnMut(&Self::Item, &J::Item) -> T,
+ T: merge_join::OrderingOrBool<Self::Item, J::Item>,
+ Self: Sized
+ {
+ merge_join_by(self, other, cmp_fn)
+ }
+
+ /// Return an iterator adaptor that flattens an iterator of iterators by
+ /// merging them in ascending order.
+ ///
+ /// If all base iterators are sorted (ascending), the result is sorted.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a = (0..6).step_by(3);
+ /// let b = (1..6).step_by(3);
+ /// let c = (2..6).step_by(3);
+ /// let it = vec![a, b, c].into_iter().kmerge();
+ /// itertools::assert_equal(it, vec![0, 1, 2, 3, 4, 5]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn kmerge(self) -> KMerge<<Self::Item as IntoIterator>::IntoIter>
+ where Self: Sized,
+ Self::Item: IntoIterator,
+ <Self::Item as IntoIterator>::Item: PartialOrd,
+ {
+ kmerge(self)
+ }
+
+ /// Return an iterator adaptor that flattens an iterator of iterators by
+ /// merging them according to the given closure.
+ ///
+ /// The closure `first` is called with two elements *a*, *b* and should
+ /// return `true` if *a* is ordered before *b*.
+ ///
+ /// If all base iterators are sorted according to `first`, the result is
+ /// sorted.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a = vec![-1f64, 2., 3., -5., 6., -7.];
+ /// let b = vec![0., 2., -4.];
+ /// let mut it = vec![a, b].into_iter().kmerge_by(|a, b| a.abs() <
b.abs());
+ /// assert_eq!(it.next(), Some(0.));
+ /// assert_eq!(it.last(), Some(-7.));
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn kmerge_by<F>(self, first: F)
+ -> KMergeBy<<Self::Item as IntoIterator>::IntoIter, F>
+ where Self: Sized,
+ Self::Item: IntoIterator,
+ F: FnMut(&<Self::Item as IntoIterator>::Item,
+ &<Self::Item as IntoIterator>::Item) -> bool
+ {
+ kmerge_by(self, first)
+ }
+
+ /// Return an iterator adaptor that iterates over the cartesian product of
+ /// the element sets of two iterators `self` and `J`.
+ ///
+ /// Iterator element type is `(Self::Item, J::Item)`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (0..2).cartesian_product("αβ".chars());
+ /// itertools::assert_equal(it, vec![(0, 'α'), (0, 'β'), (1, 'α'), (1,
'β')]);
+ /// ```
+ fn cartesian_product<J>(self, other: J) -> Product<Self, J::IntoIter>
+ where Self: Sized,
+ Self::Item: Clone,
+ J: IntoIterator,
+ J::IntoIter: Clone
+ {
+ adaptors::cartesian_product(self, other.into_iter())
+ }
+
+ /// Return an iterator adaptor that iterates over the cartesian product of
+ /// all subiterators returned by meta-iterator `self`.
+ ///
+ /// All provided iterators must yield the same `Item` type. To generate
+ /// the product of iterators yielding multiple types, use the
+ /// [`iproduct`] macro instead.
+ ///
+ ///
+ /// The iterator element type is `Vec<T>`, where `T` is the iterator
element
+ /// of the subiterators.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// let mut multi_prod = (0..3).map(|i| (i * 2)..(i * 2 + 2))
+ /// .multi_cartesian_product();
+ /// assert_eq!(multi_prod.next(), Some(vec![0, 2, 4]));
+ /// assert_eq!(multi_prod.next(), Some(vec![0, 2, 5]));
+ /// assert_eq!(multi_prod.next(), Some(vec![0, 3, 4]));
+ /// assert_eq!(multi_prod.next(), Some(vec![0, 3, 5]));
+ /// assert_eq!(multi_prod.next(), Some(vec![1, 2, 4]));
+ /// assert_eq!(multi_prod.next(), Some(vec![1, 2, 5]));
+ /// assert_eq!(multi_prod.next(), Some(vec![1, 3, 4]));
+ /// assert_eq!(multi_prod.next(), Some(vec![1, 3, 5]));
+ /// assert_eq!(multi_prod.next(), None);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn multi_cartesian_product(self) -> MultiProduct<<Self::Item as
IntoIterator>::IntoIter>
+ where Self: Sized,
+ Self::Item: IntoIterator,
+ <Self::Item as IntoIterator>::IntoIter: Clone,
+ <Self::Item as IntoIterator>::Item: Clone
+ {
+ adaptors::multi_cartesian_product(self)
+ }
+
+ /// Return an iterator adaptor that uses the passed-in closure to
+ /// optionally merge together consecutive elements.
+ ///
+ /// The closure `f` is passed two elements, `previous` and `current` and
may
+ /// return either (1) `Ok(combined)` to merge the two values or
+ /// (2) `Err((previous', current'))` to indicate they can't be merged.
+ /// In (2), the value `previous'` is emitted by the iterator.
+ /// Either (1) `combined` or (2) `current'` becomes the previous value
+ /// when coalesce continues with the next pair of elements to merge. The
+ /// value that remains at the end is also emitted by the iterator.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sum same-sign runs together
+ /// let data = vec![-1., -2., -3., 3., 1., 0., -1.];
+ /// itertools::assert_equal(data.into_iter().coalesce(|x, y|
+ /// if (x >= 0.) == (y >= 0.) {
+ /// Ok(x + y)
+ /// } else {
+ /// Err((x, y))
+ /// }),
+ /// vec![-6., 4., -1.]);
+ /// ```
+ fn coalesce<F>(self, f: F) -> Coalesce<Self, F>
+ where Self: Sized,
+ F: FnMut(Self::Item, Self::Item)
+ -> Result<Self::Item, (Self::Item, Self::Item)>
+ {
+ adaptors::coalesce(self, f)
+ }
+
+ /// Remove duplicates from sections of consecutive identical elements.
+ /// If the iterator is sorted, all elements will be unique.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1., 1., 2., 3., 3., 2., 2.];
+ /// itertools::assert_equal(data.into_iter().dedup(),
+ /// vec![1., 2., 3., 2.]);
+ /// ```
+ fn dedup(self) -> Dedup<Self>
+ where Self: Sized,
+ Self::Item: PartialEq,
+ {
+ adaptors::dedup(self)
+ }
+
+ /// Remove duplicates from sections of consecutive identical elements,
+ /// determining equality using a comparison function.
+ /// If the iterator is sorted, all elements will be unique.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![(0, 1.), (1, 1.), (0, 2.), (0, 3.), (1, 3.), (1, 2.),
(2, 2.)];
+ /// itertools::assert_equal(data.into_iter().dedup_by(|x, y| x.1 == y.1),
+ /// vec![(0, 1.), (0, 2.), (0, 3.), (1, 2.)]);
+ /// ```
+ fn dedup_by<Cmp>(self, cmp: Cmp) -> DedupBy<Self, Cmp>
+ where Self: Sized,
+ Cmp: FnMut(&Self::Item, &Self::Item)->bool,
+ {
+ adaptors::dedup_by(self, cmp)
+ }
+
+ /// Remove duplicates from sections of consecutive identical elements,
while keeping a count of
+ /// how many repeated elements were present.
+ /// If the iterator is sorted, all elements will be unique.
+ ///
+ /// Iterator element type is `(usize, Self::Item)`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec!['a', 'a', 'b', 'c', 'c', 'b', 'b'];
+ /// itertools::assert_equal(data.into_iter().dedup_with_count(),
+ /// vec![(2, 'a'), (1, 'b'), (2, 'c'), (2, 'b')]);
+ /// ```
+ fn dedup_with_count(self) -> DedupWithCount<Self>
+ where
+ Self: Sized,
+ {
+ adaptors::dedup_with_count(self)
+ }
+
+ /// Remove duplicates from sections of consecutive identical elements,
while keeping a count of
+ /// how many repeated elements were present.
+ /// This will determine equality using a comparison function.
+ /// If the iterator is sorted, all elements will be unique.
+ ///
+ /// Iterator element type is `(usize, Self::Item)`.
+ ///
+ /// This iterator is *fused*.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![(0, 'a'), (1, 'a'), (0, 'b'), (0, 'c'), (1, 'c'), (1,
'b'), (2, 'b')];
+ /// itertools::assert_equal(data.into_iter().dedup_by_with_count(|x, y|
x.1 == y.1),
+ /// vec![(2, (0, 'a')), (1, (0, 'b')), (2, (0,
'c')), (2, (1, 'b'))]);
+ /// ```
+ fn dedup_by_with_count<Cmp>(self, cmp: Cmp) -> DedupByWithCount<Self, Cmp>
+ where
+ Self: Sized,
+ Cmp: FnMut(&Self::Item, &Self::Item) -> bool,
+ {
+ adaptors::dedup_by_with_count(self, cmp)
+ }
+
+ /// Return an iterator adaptor that produces elements that appear more
than once during the
+ /// iteration. Duplicates are detected using hash and equality.
+ ///
+ /// The iterator is stable, returning the duplicate items in the order in
which they occur in
+ /// the adapted iterator. Each duplicate item is returned exactly once. If
an item appears more
+ /// than twice, the second item is the item retained and the rest are
discarded.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![10, 20, 30, 20, 40, 10, 50];
+ /// itertools::assert_equal(data.into_iter().duplicates(),
+ /// vec![20, 10]);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn duplicates(self) -> Duplicates<Self>
+ where Self: Sized,
+ Self::Item: Eq + Hash
+ {
+ duplicates_impl::duplicates(self)
+ }
+
+ /// Return an iterator adaptor that produces elements that appear more
than once during the
+ /// iteration. Duplicates are detected using hash and equality.
+ ///
+ /// Duplicates are detected by comparing the key they map to with the
keying function `f` by
+ /// hash and equality. The keys are stored in a hash map in the iterator.
+ ///
+ /// The iterator is stable, returning the duplicate items in the order in
which they occur in
+ /// the adapted iterator. Each duplicate item is returned exactly once. If
an item appears more
+ /// than twice, the second item is the item retained and the rest are
discarded.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec!["a", "bb", "aa", "c", "ccc"];
+ /// itertools::assert_equal(data.into_iter().duplicates_by(|s| s.len()),
+ /// vec!["aa", "c"]);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn duplicates_by<V, F>(self, f: F) -> DuplicatesBy<Self, V, F>
+ where Self: Sized,
+ V: Eq + Hash,
+ F: FnMut(&Self::Item) -> V
+ {
+ duplicates_impl::duplicates_by(self, f)
+ }
+
+ /// Return an iterator adaptor that filters out elements that have
+ /// already been produced once during the iteration. Duplicates
+ /// are detected using hash and equality.
+ ///
+ /// Clones of visited elements are stored in a hash set in the
+ /// iterator.
+ ///
+ /// The iterator is stable, returning the non-duplicate items in the order
+ /// in which they occur in the adapted iterator. In a set of duplicate
+ /// items, the first item encountered is the item retained.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![10, 20, 30, 20, 40, 10, 50];
+ /// itertools::assert_equal(data.into_iter().unique(),
+ /// vec![10, 20, 30, 40, 50]);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn unique(self) -> Unique<Self>
+ where Self: Sized,
+ Self::Item: Clone + Eq + Hash
+ {
+ unique_impl::unique(self)
+ }
+
+ /// Return an iterator adaptor that filters out elements that have
+ /// already been produced once during the iteration.
+ ///
+ /// Duplicates are detected by comparing the key they map to
+ /// with the keying function `f` by hash and equality.
+ /// The keys are stored in a hash set in the iterator.
+ ///
+ /// The iterator is stable, returning the non-duplicate items in the order
+ /// in which they occur in the adapted iterator. In a set of duplicate
+ /// items, the first item encountered is the item retained.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec!["a", "bb", "aa", "c", "ccc"];
+ /// itertools::assert_equal(data.into_iter().unique_by(|s| s.len()),
+ /// vec!["a", "bb", "ccc"]);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn unique_by<V, F>(self, f: F) -> UniqueBy<Self, V, F>
+ where Self: Sized,
+ V: Eq + Hash,
+ F: FnMut(&Self::Item) -> V
+ {
+ unique_impl::unique_by(self, f)
+ }
+
+ /// Return an iterator adaptor that borrows from this iterator and
+ /// takes items while the closure `accept` returns `true`.
+ ///
+ /// This adaptor can only be used on iterators that implement `PeekingNext`
+ /// like `.peekable()`, `put_back` and a few other collection iterators.
+ ///
+ /// The last and rejected element (first `false`) is still available when
+ /// `peeking_take_while` is done.
+ ///
+ ///
+ /// See also [`.take_while_ref()`](Itertools::take_while_ref)
+ /// which is a similar adaptor.
+ fn peeking_take_while<F>(&mut self, accept: F) -> PeekingTakeWhile<Self, F>
+ where Self: Sized + PeekingNext,
+ F: FnMut(&Self::Item) -> bool,
+ {
+ peeking_take_while::peeking_take_while(self, accept)
+ }
+
+ /// Return an iterator adaptor that borrows from a `Clone`-able iterator
+ /// to only pick off elements while the predicate `accept` returns `true`.
+ ///
+ /// It uses the `Clone` trait to restore the original iterator so that the
+ /// last and rejected element (first `false`) is still available when
+ /// `take_while_ref` is done.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut hexadecimals = "0123456789abcdef".chars();
+ ///
+ /// let decimals = hexadecimals.take_while_ref(|c| c.is_numeric())
+ /// .collect::<String>();
+ /// assert_eq!(decimals, "0123456789");
+ /// assert_eq!(hexadecimals.next(), Some('a'));
+ ///
+ /// ```
+ fn take_while_ref<F>(&mut self, accept: F) -> TakeWhileRef<Self, F>
+ where Self: Clone,
+ F: FnMut(&Self::Item) -> bool
+ {
+ adaptors::take_while_ref(self, accept)
+ }
+
+ /// Returns an iterator adaptor that consumes elements while the given
+ /// predicate is `true`, *including* the element for which the predicate
+ /// first returned `false`.
+ ///
+ /// The [`.take_while()`][std::iter::Iterator::take_while] adaptor is
useful
+ /// when you want items satisfying a predicate, but to know when to stop
+ /// taking elements, we have to consume that first element that doesn't
+ /// satisfy the predicate. This adaptor includes that element where
+ /// [`.take_while()`][std::iter::Iterator::take_while] would drop it.
+ ///
+ /// The [`.take_while_ref()`][crate::Itertools::take_while_ref] adaptor
+ /// serves a similar purpose, but this adaptor doesn't require [`Clone`]ing
+ /// the underlying elements.
+ ///
+ /// ```rust
+ /// # use itertools::Itertools;
+ /// let items = vec![1, 2, 3, 4, 5];
+ /// let filtered: Vec<_> = items
+ /// .into_iter()
+ /// .take_while_inclusive(|&n| n % 3 != 0)
+ /// .collect();
+ ///
+ /// assert_eq!(filtered, vec![1, 2, 3]);
+ /// ```
+ ///
+ /// ```rust
+ /// # use itertools::Itertools;
+ /// let items = vec![1, 2, 3, 4, 5];
+ ///
+ /// let take_while_inclusive_result: Vec<_> = items
+ /// .iter()
+ /// .copied()
+ /// .take_while_inclusive(|&n| n % 3 != 0)
+ /// .collect();
+ /// let take_while_result: Vec<_> = items
+ /// .into_iter()
+ /// .take_while(|&n| n % 3 != 0)
+ /// .collect();
+ ///
+ /// assert_eq!(take_while_inclusive_result, vec![1, 2, 3]);
+ /// assert_eq!(take_while_result, vec![1, 2]);
+ /// // both iterators have the same items remaining at this point---the 3
+ /// // is lost from the `take_while` vec
+ /// ```
+ ///
+ /// ```rust
+ /// # use itertools::Itertools;
+ /// #[derive(Debug, PartialEq)]
+ /// struct NoCloneImpl(i32);
+ ///
+ /// let non_clonable_items: Vec<_> = vec![1, 2, 3, 4, 5]
+ /// .into_iter()
+ /// .map(NoCloneImpl)
+ /// .collect();
+ /// let filtered: Vec<_> = non_clonable_items
+ /// .into_iter()
+ /// .take_while_inclusive(|n| n.0 % 3 != 0)
+ /// .collect();
+ /// let expected: Vec<_> = vec![1, 2,
3].into_iter().map(NoCloneImpl).collect();
+ /// assert_eq!(filtered, expected);
+ fn take_while_inclusive<F>(&mut self, accept: F) ->
TakeWhileInclusive<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(&Self::Item) -> bool,
+ {
+ take_while_inclusive::TakeWhileInclusive::new(self, accept)
+ }
+
+ /// Return an iterator adaptor that filters `Option<A>` iterator elements
+ /// and produces `A`. Stops on the first `None` encountered.
+ ///
+ /// Iterator element type is `A`, the unwrapped element.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // List all hexadecimal digits
+ /// itertools::assert_equal(
+ /// (0..).map(|i| std::char::from_digit(i, 16)).while_some(),
+ /// "0123456789abcdef".chars());
+ ///
+ /// ```
+ fn while_some<A>(self) -> WhileSome<Self>
+ where Self: Sized + Iterator<Item = Option<A>>
+ {
+ adaptors::while_some(self)
+ }
+
+ /// Return an iterator adaptor that iterates over the combinations of the
+ /// elements from an iterator.
+ ///
+ /// Iterator element can be any homogeneous tuple of type `Self::Item` with
+ /// size up to 12.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut v = Vec::new();
+ /// for (a, b) in (1..5).tuple_combinations() {
+ /// v.push((a, b));
+ /// }
+ /// assert_eq!(v, vec![(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]);
+ ///
+ /// let mut it = (1..5).tuple_combinations();
+ /// assert_eq!(Some((1, 2, 3)), it.next());
+ /// assert_eq!(Some((1, 2, 4)), it.next());
+ /// assert_eq!(Some((1, 3, 4)), it.next());
+ /// assert_eq!(Some((2, 3, 4)), it.next());
+ /// assert_eq!(None, it.next());
+ ///
+ /// // this requires a type hint
+ /// let it = (1..5).tuple_combinations::<(_, _, _)>();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (1, 2, 4), (1, 3, 4), (2,
3, 4)]);
+ ///
+ /// // you can also specify the complete type
+ /// use itertools::TupleCombinations;
+ /// use std::ops::Range;
+ ///
+ /// let it: TupleCombinations<Range<u32>, (u32, u32, u32)> =
(1..5).tuple_combinations();
+ /// itertools::assert_equal(it, vec![(1, 2, 3), (1, 2, 4), (1, 3, 4), (2,
3, 4)]);
+ /// ```
+ fn tuple_combinations<T>(self) -> TupleCombinations<Self, T>
+ where Self: Sized + Clone,
+ Self::Item: Clone,
+ T: adaptors::HasCombination<Self>,
+ {
+ adaptors::tuple_combinations(self)
+ }
+
+ /// Return an iterator adaptor that iterates over the `k`-length
combinations of
+ /// the elements from an iterator.
+ ///
+ /// Iterator element type is `Vec<Self::Item>`. The iterator produces a
new Vec per iteration,
+ /// and clones the iterator elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (1..5).combinations(3);
+ /// itertools::assert_equal(it, vec![
+ /// vec![1, 2, 3],
+ /// vec![1, 2, 4],
+ /// vec![1, 3, 4],
+ /// vec![2, 3, 4],
+ /// ]);
+ /// ```
+ ///
+ /// Note: Combinations does not take into account the equality of the
iterated values.
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = vec![1, 2, 2].into_iter().combinations(2);
+ /// itertools::assert_equal(it, vec![
+ /// vec![1, 2], // Note: these are the same
+ /// vec![1, 2], // Note: these are the same
+ /// vec![2, 2],
+ /// ]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn combinations(self, k: usize) -> Combinations<Self>
+ where Self: Sized,
+ Self::Item: Clone
+ {
+ combinations::combinations(self, k)
+ }
+
+ /// Return an iterator that iterates over the `k`-length combinations of
+ /// the elements from an iterator, with replacement.
+ ///
+ /// Iterator element type is `Vec<Self::Item>`. The iterator produces a
new Vec per iteration,
+ /// and clones the iterator elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (1..4).combinations_with_replacement(2);
+ /// itertools::assert_equal(it, vec![
+ /// vec![1, 1],
+ /// vec![1, 2],
+ /// vec![1, 3],
+ /// vec![2, 2],
+ /// vec![2, 3],
+ /// vec![3, 3],
+ /// ]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn combinations_with_replacement(self, k: usize) ->
CombinationsWithReplacement<Self>
+ where
+ Self: Sized,
+ Self::Item: Clone,
+ {
+ combinations_with_replacement::combinations_with_replacement(self, k)
+ }
+
+ /// Return an iterator adaptor that iterates over all k-permutations of the
+ /// elements from an iterator.
+ ///
+ /// Iterator element type is `Vec<Self::Item>` with length `k`. The
iterator
+ /// produces a new Vec per iteration, and clones the iterator elements.
+ ///
+ /// If `k` is greater than the length of the input iterator, the resultant
+ /// iterator adaptor will be empty.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let perms = (5..8).permutations(2);
+ /// itertools::assert_equal(perms, vec![
+ /// vec![5, 6],
+ /// vec![5, 7],
+ /// vec![6, 5],
+ /// vec![6, 7],
+ /// vec![7, 5],
+ /// vec![7, 6],
+ /// ]);
+ /// ```
+ ///
+ /// Note: Permutations does not take into account the equality of the
iterated values.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = vec![2, 2].into_iter().permutations(2);
+ /// itertools::assert_equal(it, vec![
+ /// vec![2, 2], // Note: these are the same
+ /// vec![2, 2], // Note: these are the same
+ /// ]);
+ /// ```
+ ///
+ /// Note: The source iterator is collected lazily, and will not be
+ /// re-iterated if the permutations adaptor is completed and re-iterated.
+ #[cfg(feature = "use_alloc")]
+ fn permutations(self, k: usize) -> Permutations<Self>
+ where Self: Sized,
+ Self::Item: Clone
+ {
+ permutations::permutations(self, k)
+ }
+
+ /// Return an iterator that iterates through the powerset of the elements
from an
+ /// iterator.
+ ///
+ /// Iterator element type is `Vec<Self::Item>`. The iterator produces a
new `Vec`
+ /// per iteration, and clones the iterator elements.
+ ///
+ /// The powerset of a set contains all subsets including the empty set and
the full
+ /// input set. A powerset has length _2^n_ where _n_ is the length of the
input
+ /// set.
+ ///
+ /// Each `Vec` produced by this iterator represents a subset of the
elements
+ /// produced by the source iterator.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let sets = (1..4).powerset().collect::<Vec<_>>();
+ /// itertools::assert_equal(sets, vec![
+ /// vec![],
+ /// vec![1],
+ /// vec![2],
+ /// vec![3],
+ /// vec![1, 2],
+ /// vec![1, 3],
+ /// vec![2, 3],
+ /// vec![1, 2, 3],
+ /// ]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn powerset(self) -> Powerset<Self>
+ where Self: Sized,
+ Self::Item: Clone,
+ {
+ powerset::powerset(self)
+ }
+
+ /// Return an iterator adaptor that pads the sequence to a minimum length
of
+ /// `min` by filling missing elements using a closure `f`.
+ ///
+ /// Iterator element type is `Self::Item`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let it = (0..5).pad_using(10, |i| 2*i);
+ /// itertools::assert_equal(it, vec![0, 1, 2, 3, 4, 10, 12, 14, 16, 18]);
+ ///
+ /// let it = (0..10).pad_using(5, |i| 2*i);
+ /// itertools::assert_equal(it, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ ///
+ /// let it = (0..5).pad_using(10, |i| 2*i).rev();
+ /// itertools::assert_equal(it, vec![18, 16, 14, 12, 10, 4, 3, 2, 1, 0]);
+ /// ```
+ fn pad_using<F>(self, min: usize, f: F) -> PadUsing<Self, F>
+ where Self: Sized,
+ F: FnMut(usize) -> Self::Item
+ {
+ pad_tail::pad_using(self, min, f)
+ }
+
+ /// Return an iterator adaptor that combines each element with a
`Position` to
+ /// ease special-case handling of the first or last elements.
+ ///
+ /// Iterator element type is
+ /// [`(Position, Self::Item)`](Position)
+ ///
+ /// ```
+ /// use itertools::{Itertools, Position};
+ ///
+ /// let it = (0..4).with_position();
+ /// itertools::assert_equal(it,
+ /// vec![(Position::First, 0),
+ /// (Position::Middle, 1),
+ /// (Position::Middle, 2),
+ /// (Position::Last, 3)]);
+ ///
+ /// let it = (0..1).with_position();
+ /// itertools::assert_equal(it, vec![(Position::Only, 0)]);
+ /// ```
+ fn with_position(self) -> WithPosition<Self>
+ where Self: Sized,
+ {
+ with_position::with_position(self)
+ }
+
+ /// Return an iterator adaptor that yields the indices of all elements
+ /// satisfying a predicate, counted from the start of the iterator.
+ ///
+ /// Equivalent to `iter.enumerate().filter(|(_, v)| predicate(v)).map(|(i,
_)| i)`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1, 2, 3, 3, 4, 6, 7, 9];
+ /// itertools::assert_equal(data.iter().positions(|v| v % 2 == 0), vec![1,
4, 5]);
+ ///
+ /// itertools::assert_equal(data.iter().positions(|v| v % 2 == 1).rev(),
vec![7, 6, 3, 2, 0]);
+ /// ```
+ fn positions<P>(self, predicate: P) -> Positions<Self, P>
+ where Self: Sized,
+ P: FnMut(Self::Item) -> bool,
+ {
+ adaptors::positions(self, predicate)
+ }
+
+ /// Return an iterator adaptor that applies a mutating function
+ /// to each element before yielding it.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![vec![1], vec![3, 2, 1]];
+ /// let it = input.into_iter().update(|mut v| v.push(0));
+ /// itertools::assert_equal(it, vec![vec![1, 0], vec![3, 2, 1, 0]]);
+ /// ```
+ fn update<F>(self, updater: F) -> Update<Self, F>
+ where Self: Sized,
+ F: FnMut(&mut Self::Item),
+ {
+ adaptors::update(self, updater)
+ }
+
+ // non-adaptor methods
+ /// Advances the iterator and returns the next items grouped in a tuple of
+ /// a specific size (up to 12).
+ ///
+ /// If there are enough elements to be grouped in a tuple, then the tuple
is
+ /// returned inside `Some`, otherwise `None` is returned.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut iter = 1..5;
+ ///
+ /// assert_eq!(Some((1, 2)), iter.next_tuple());
+ /// ```
+ fn next_tuple<T>(&mut self) -> Option<T>
+ where Self: Sized + Iterator<Item = T::Item>,
+ T: traits::HomogeneousTuple
+ {
+ T::collect_from_iter_no_buf(self)
+ }
+
+ /// Collects all items from the iterator into a tuple of a specific size
+ /// (up to 12).
+ ///
+ /// If the number of elements inside the iterator is **exactly** equal to
+ /// the tuple size, then the tuple is returned inside `Some`, otherwise
+ /// `None` is returned.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let iter = 1..3;
+ ///
+ /// if let Some((x, y)) = iter.collect_tuple() {
+ /// assert_eq!((x, y), (1, 2))
+ /// } else {
+ /// panic!("Expected two elements")
+ /// }
+ /// ```
+ fn collect_tuple<T>(mut self) -> Option<T>
+ where Self: Sized + Iterator<Item = T::Item>,
+ T: traits::HomogeneousTuple
+ {
+ match self.next_tuple() {
+ elt @ Some(_) => match self.next() {
+ Some(_) => None,
+ None => elt,
+ },
+ _ => None
+ }
+ }
+
+
+ /// Find the position and value of the first element satisfying a
predicate.
+ ///
+ /// The iterator is not advanced past the first element found.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let text = "Hα";
+ /// assert_eq!(text.chars().find_position(|ch| ch.is_lowercase()),
Some((1, 'α')));
+ /// ```
+ fn find_position<P>(&mut self, mut pred: P) -> Option<(usize, Self::Item)>
+ where P: FnMut(&Self::Item) -> bool
+ {
+ for (index, elt) in self.enumerate() {
+ if pred(&elt) {
+ return Some((index, elt));
+ }
+ }
+ None
+ }
+ /// Find the value of the first element satisfying a predicate or return
the last element, if any.
+ ///
+ /// The iterator is not advanced past the first element found.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let numbers = [1, 2, 3, 4];
+ /// assert_eq!(numbers.iter().find_or_last(|&&x| x > 5), Some(&4));
+ /// assert_eq!(numbers.iter().find_or_last(|&&x| x > 2), Some(&3));
+ /// assert_eq!(std::iter::empty::<i32>().find_or_last(|&x| x > 5), None);
+ /// ```
+ fn find_or_last<P>(mut self, mut predicate: P) -> Option<Self::Item>
+ where Self: Sized,
+ P: FnMut(&Self::Item) -> bool,
+ {
+ let mut prev = None;
+ self.find_map(|x| if predicate(&x) { Some(x) } else { prev = Some(x);
None })
+ .or(prev)
+ }
+ /// Find the value of the first element satisfying a predicate or return
the first element, if any.
+ ///
+ /// The iterator is not advanced past the first element found.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let numbers = [1, 2, 3, 4];
+ /// assert_eq!(numbers.iter().find_or_first(|&&x| x > 5), Some(&1));
+ /// assert_eq!(numbers.iter().find_or_first(|&&x| x > 2), Some(&3));
+ /// assert_eq!(std::iter::empty::<i32>().find_or_first(|&x| x > 5), None);
+ /// ```
+ fn find_or_first<P>(mut self, mut predicate: P) -> Option<Self::Item>
+ where Self: Sized,
+ P: FnMut(&Self::Item) -> bool,
+ {
+ let first = self.next()?;
+ Some(if predicate(&first) {
+ first
+ } else {
+ self.find(|x| predicate(x)).unwrap_or(first)
+ })
+ }
+ /// Returns `true` if the given item is present in this iterator.
+ ///
+ /// This method is short-circuiting. If the given item is present in this
+ /// iterator, this method will consume the iterator up-to-and-including
+ /// the item. If the given item is not present in this iterator, the
+ /// iterator will be exhausted.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// #[derive(PartialEq, Debug)]
+ /// enum Enum { A, B, C, D, E, }
+ ///
+ /// let mut iter = vec![Enum::A, Enum::B, Enum::C, Enum::D].into_iter();
+ ///
+ /// // search `iter` for `B`
+ /// assert_eq!(iter.contains(&Enum::B), true);
+ /// // `B` was found, so the iterator now rests at the item after `B`
(i.e, `C`).
+ /// assert_eq!(iter.next(), Some(Enum::C));
+ ///
+ /// // search `iter` for `E`
+ /// assert_eq!(iter.contains(&Enum::E), false);
+ /// // `E` wasn't found, so `iter` is now exhausted
+ /// assert_eq!(iter.next(), None);
+ /// ```
+ fn contains<Q>(&mut self, query: &Q) -> bool
+ where
+ Self: Sized,
+ Self::Item: Borrow<Q>,
+ Q: PartialEq,
+ {
+ self.any(|x| x.borrow() == query)
+ }
+
+ /// Check whether all elements compare equal.
+ ///
+ /// Empty iterators are considered to have equal elements:
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1, 1, 1, 2, 2, 3, 3, 3, 4, 5, 5];
+ /// assert!(!data.iter().all_equal());
+ /// assert!(data[0..3].iter().all_equal());
+ /// assert!(data[3..5].iter().all_equal());
+ /// assert!(data[5..8].iter().all_equal());
+ ///
+ /// let data : Option<usize> = None;
+ /// assert!(data.into_iter().all_equal());
+ /// ```
+ fn all_equal(&mut self) -> bool
+ where Self: Sized,
+ Self::Item: PartialEq,
+ {
+ match self.next() {
+ None => true,
+ Some(a) => self.all(|x| a == x),
+ }
+ }
+
+ /// If there are elements and they are all equal, return a single copy of
that element.
+ /// If there are no elements, return an Error containing None.
+ /// If there are elements and they are not all equal, return a tuple
containing the first
+ /// two non-equal elements found.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1, 1, 1, 2, 2, 3, 3, 3, 4, 5, 5];
+ /// assert_eq!(data.iter().all_equal_value(), Err(Some((&1, &2))));
+ /// assert_eq!(data[0..3].iter().all_equal_value(), Ok(&1));
+ /// assert_eq!(data[3..5].iter().all_equal_value(), Ok(&2));
+ /// assert_eq!(data[5..8].iter().all_equal_value(), Ok(&3));
+ ///
+ /// let data : Option<usize> = None;
+ /// assert_eq!(data.into_iter().all_equal_value(), Err(None));
+ /// ```
+ fn all_equal_value(&mut self) -> Result<Self::Item, Option<(Self::Item,
Self::Item)>>
+ where
+ Self: Sized,
+ Self::Item: PartialEq
+ {
+ let first = self.next().ok_or(None)?;
+ let other = self.find(|x| x != &first);
+ if let Some(other) = other {
+ Err(Some((first, other)))
+ } else {
+ Ok(first)
+ }
+ }
+
+ /// Check whether all elements are unique (non equal).
+ ///
+ /// Empty iterators are considered to have unique elements:
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![1, 2, 3, 4, 1, 5];
+ /// assert!(!data.iter().all_unique());
+ /// assert!(data[0..4].iter().all_unique());
+ /// assert!(data[1..6].iter().all_unique());
+ ///
+ /// let data : Option<usize> = None;
+ /// assert!(data.into_iter().all_unique());
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn all_unique(&mut self) -> bool
+ where Self: Sized,
+ Self::Item: Eq + Hash
+ {
+ let mut used = HashSet::new();
+ self.all(move |elt| used.insert(elt))
+ }
+
+ /// Consume the first `n` elements from the iterator eagerly,
+ /// and return the same iterator again.
+ ///
+ /// It works similarly to *.skip(* `n` *)* except it is eager and
+ /// preserves the iterator type.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut iter = "αβγ".chars().dropping(2);
+ /// itertools::assert_equal(iter, "γ".chars());
+ /// ```
+ ///
+ /// *Fusing notes: if the iterator is exhausted by dropping,
+ /// the result of calling `.next()` again depends on the iterator
implementation.*
+ fn dropping(mut self, n: usize) -> Self
+ where Self: Sized
+ {
+ if n > 0 {
+ self.nth(n - 1);
+ }
+ self
+ }
+
+ /// Consume the last `n` elements from the iterator eagerly,
+ /// and return the same iterator again.
+ ///
+ /// This is only possible on double ended iterators. `n` may be
+ /// larger than the number of elements.
+ ///
+ /// Note: This method is eager, dropping the back elements immediately and
+ /// preserves the iterator type.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let init = vec![0, 3, 6, 9].into_iter().dropping_back(1);
+ /// itertools::assert_equal(init, vec![0, 3, 6]);
+ /// ```
+ fn dropping_back(mut self, n: usize) -> Self
+ where Self: Sized,
+ Self: DoubleEndedIterator
+ {
+ if n > 0 {
+ (&mut self).rev().nth(n - 1);
+ }
+ self
+ }
+
+ /// Run the closure `f` eagerly on each element of the iterator.
+ ///
+ /// Consumes the iterator until its end.
+ ///
+ /// ```
+ /// use std::sync::mpsc::channel;
+ /// use itertools::Itertools;
+ ///
+ /// let (tx, rx) = channel();
+ ///
+ /// // use .foreach() to apply a function to each value -- sending it
+ /// (0..5).map(|x| x * 2 + 1).foreach(|x| { tx.send(x).unwrap(); } );
+ ///
+ /// drop(tx);
+ ///
+ /// itertools::assert_equal(rx.iter(), vec![1, 3, 5, 7, 9]);
+ /// ```
+ #[deprecated(note="Use .for_each() instead", since="0.8.0")]
+ fn foreach<F>(self, f: F)
+ where F: FnMut(Self::Item),
+ Self: Sized,
+ {
+ self.for_each(f);
+ }
+
+ /// Combine all an iterator's elements into one element by using
[`Extend`].
+ ///
+ /// This combinator will extend the first item with each of the rest of the
+ /// items of the iterator. If the iterator is empty, the default value of
+ /// `I::Item` is returned.
+ ///
+ /// ```rust
+ /// use itertools::Itertools;
+ ///
+ /// let input = vec![vec![1], vec![2, 3], vec![4, 5, 6]];
+ /// assert_eq!(input.into_iter().concat(),
+ /// vec![1, 2, 3, 4, 5, 6]);
+ /// ```
+ fn concat(self) -> Self::Item
+ where Self: Sized,
+ Self::Item: Extend<<<Self as Iterator>::Item as
IntoIterator>::Item> + IntoIterator + Default
+ {
+ concat(self)
+ }
+
+ /// `.collect_vec()` is simply a type specialization of
[`Iterator::collect`],
+ /// for convenience.
+ #[cfg(feature = "use_alloc")]
+ fn collect_vec(self) -> Vec<Self::Item>
+ where Self: Sized
+ {
+ self.collect()
+ }
+
+ /// `.try_collect()` is more convenient way of writing
+ /// `.collect::<Result<_, _>>()`
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use std::{fs, io};
+ /// use itertools::Itertools;
+ ///
+ /// fn process_dir_entries(entries: &[fs::DirEntry]) {
+ /// // ...
+ /// }
+ ///
+ /// fn do_stuff() -> std::io::Result<()> {
+ /// let entries: Vec<_> = fs::read_dir(".")?.try_collect()?;
+ /// process_dir_entries(&entries);
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn try_collect<T, U, E>(self) -> Result<U, E>
+ where
+ Self: Sized + Iterator<Item = Result<T, E>>,
+ Result<U, E>: FromIterator<Result<T, E>>,
+ {
+ self.collect()
+ }
+
+ /// Assign to each reference in `self` from the `from` iterator,
+ /// stopping at the shortest of the two iterators.
+ ///
+ /// The `from` iterator is queried for its next element before the `self`
+ /// iterator, and if either is exhausted the method is done.
+ ///
+ /// Return the number of elements written.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut xs = [0; 4];
+ /// xs.iter_mut().set_from(1..);
+ /// assert_eq!(xs, [1, 2, 3, 4]);
+ /// ```
+ #[inline]
+ fn set_from<'a, A: 'a, J>(&mut self, from: J) -> usize
+ where Self: Iterator<Item = &'a mut A>,
+ J: IntoIterator<Item = A>
+ {
+ let mut count = 0;
+ for elt in from {
+ match self.next() {
+ None => break,
+ Some(ptr) => *ptr = elt,
+ }
+ count += 1;
+ }
+ count
+ }
+
+ /// Combine all iterator elements into one String, separated by `sep`.
+ ///
+ /// Use the `Display` implementation of each element.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// assert_eq!(["a", "b", "c"].iter().join(", "), "a, b, c");
+ /// assert_eq!([1, 2, 3].iter().join(", "), "1, 2, 3");
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn join(&mut self, sep: &str) -> String
+ where Self::Item: std::fmt::Display
+ {
+ match self.next() {
+ None => String::new(),
+ Some(first_elt) => {
+ // estimate lower bound of capacity needed
+ let (lower, _) = self.size_hint();
+ let mut result = String::with_capacity(sep.len() * lower);
+ write!(&mut result, "{}", first_elt).unwrap();
+ self.for_each(|elt| {
+ result.push_str(sep);
+ write!(&mut result, "{}", elt).unwrap();
+ });
+ result
+ }
+ }
+ }
+
+ /// Format all iterator elements, separated by `sep`.
+ ///
+ /// All elements are formatted (any formatting trait)
+ /// with `sep` inserted between each element.
+ ///
+ /// **Panics** if the formatter helper is formatted more than once.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = [1.1, 2.71828, -3.];
+ /// assert_eq!(
+ /// format!("{:.2}", data.iter().format(", ")),
+ /// "1.10, 2.72, -3.00");
+ /// ```
+ fn format(self, sep: &str) -> Format<Self>
+ where Self: Sized,
+ {
+ format::new_format_default(self, sep)
+ }
+
+ /// Format all iterator elements, separated by `sep`.
+ ///
+ /// This is a customizable version of [`.format()`](Itertools::format).
+ ///
+ /// The supplied closure `format` is called once per iterator element,
+ /// with two arguments: the element and a callback that takes a
+ /// `&Display` value, i.e. any reference to type that implements `Display`.
+ ///
+ /// Using `&format_args!(...)` is the most versatile way to apply custom
+ /// element formatting. The callback can be called multiple times if
needed.
+ ///
+ /// **Panics** if the formatter helper is formatted more than once.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = [1.1, 2.71828, -3.];
+ /// let data_formatter = data.iter().format_with(", ", |elt, f|
f(&format_args!("{:.2}", elt)));
+ /// assert_eq!(format!("{}", data_formatter),
+ /// "1.10, 2.72, -3.00");
+ ///
+ /// // .format_with() is recursively composable
+ /// let matrix = [[1., 2., 3.],
+ /// [4., 5., 6.]];
+ /// let matrix_formatter = matrix.iter().format_with("\n", |row, f| {
+ /// f(&row.iter().format_with(", ", |elt,
g| g(&elt)))
+ /// });
+ /// assert_eq!(format!("{}", matrix_formatter),
+ /// "1, 2, 3\n4, 5, 6");
+ ///
+ ///
+ /// ```
+ fn format_with<F>(self, sep: &str, format: F) -> FormatWith<Self, F>
+ where Self: Sized,
+ F: FnMut(Self::Item, &mut dyn FnMut(&dyn fmt::Display) ->
fmt::Result) -> fmt::Result,
+ {
+ format::new_format(self, sep, format)
+ }
+
+ /// See [`.fold_ok()`](Itertools::fold_ok).
+ #[deprecated(note="Use .fold_ok() instead", since="0.10.0")]
+ fn fold_results<A, E, B, F>(&mut self, start: B, f: F) -> Result<B, E>
+ where Self: Iterator<Item = Result<A, E>>,
+ F: FnMut(B, A) -> B
+ {
+ self.fold_ok(start, f)
+ }
+
+ /// Fold `Result` values from an iterator.
+ ///
+ /// Only `Ok` values are folded. If no error is encountered, the folded
+ /// value is returned inside `Ok`. Otherwise, the operation terminates
+ /// and returns the first `Err` value it encounters. No iterator elements
are
+ /// consumed after the first error.
+ ///
+ /// The first accumulator value is the `start` parameter.
+ /// Each iteration passes the accumulator value and the next value inside
`Ok`
+ /// to the fold function `f` and its return value becomes the new
accumulator value.
+ ///
+ /// For example the sequence *Ok(1), Ok(2), Ok(3)* will result in a
+ /// computation like this:
+ ///
+ /// ```ignore
+ /// let mut accum = start;
+ /// accum = f(accum, 1);
+ /// accum = f(accum, 2);
+ /// accum = f(accum, 3);
+ /// ```
+ ///
+ /// With a `start` value of 0 and an addition as folding function,
+ /// this effectively results in *((0 + 1) + 2) + 3*
+ ///
+ /// ```
+ /// use std::ops::Add;
+ /// use itertools::Itertools;
+ ///
+ /// let values = [1, 2, -2, -1, 2, 1];
+ /// assert_eq!(
+ /// values.iter()
+ /// .map(Ok::<_, ()>)
+ /// .fold_ok(0, Add::add),
+ /// Ok(3)
+ /// );
+ /// assert!(
+ /// values.iter()
+ /// .map(|&x| if x >= 0 { Ok(x) } else { Err("Negative number")
})
+ /// .fold_ok(0, Add::add)
+ /// .is_err()
+ /// );
+ /// ```
+ fn fold_ok<A, E, B, F>(&mut self, mut start: B, mut f: F) -> Result<B, E>
+ where Self: Iterator<Item = Result<A, E>>,
+ F: FnMut(B, A) -> B
+ {
+ for elt in self {
+ match elt {
+ Ok(v) => start = f(start, v),
+ Err(u) => return Err(u),
+ }
+ }
+ Ok(start)
+ }
+
+ /// Fold `Option` values from an iterator.
+ ///
+ /// Only `Some` values are folded. If no `None` is encountered, the folded
+ /// value is returned inside `Some`. Otherwise, the operation terminates
+ /// and returns `None`. No iterator elements are consumed after the `None`.
+ ///
+ /// This is the `Option` equivalent to [`fold_ok`](Itertools::fold_ok).
+ ///
+ /// ```
+ /// use std::ops::Add;
+ /// use itertools::Itertools;
+ ///
+ /// let mut values = vec![Some(1), Some(2), Some(-2)].into_iter();
+ /// assert_eq!(values.fold_options(5, Add::add), Some(5 + 1 + 2 - 2));
+ ///
+ /// let mut more_values = vec![Some(2), None, Some(0)].into_iter();
+ /// assert!(more_values.fold_options(0, Add::add).is_none());
+ /// assert_eq!(more_values.next().unwrap(), Some(0));
+ /// ```
+ fn fold_options<A, B, F>(&mut self, mut start: B, mut f: F) -> Option<B>
+ where Self: Iterator<Item = Option<A>>,
+ F: FnMut(B, A) -> B
+ {
+ for elt in self {
+ match elt {
+ Some(v) => start = f(start, v),
+ None => return None,
+ }
+ }
+ Some(start)
+ }
+
+ /// Accumulator of the elements in the iterator.
+ ///
+ /// Like `.fold()`, without a base case. If the iterator is
+ /// empty, return `None`. With just one element, return it.
+ /// Otherwise elements are accumulated in sequence using the closure `f`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// assert_eq!((0..10).fold1(|x, y| x + y).unwrap_or(0), 45);
+ /// assert_eq!((0..0).fold1(|x, y| x * y), None);
+ /// ```
+ #[deprecated(since = "0.10.2", note = "Use `Iterator::reduce` instead")]
+ fn fold1<F>(mut self, f: F) -> Option<Self::Item>
+ where F: FnMut(Self::Item, Self::Item) -> Self::Item,
+ Self: Sized,
+ {
+ self.next().map(move |x| self.fold(x, f))
+ }
+
+ /// Accumulate the elements in the iterator in a tree-like manner.
+ ///
+ /// You can think of it as, while there's more than one item, repeatedly
+ /// combining adjacent items. It does so in bottom-up-merge-sort order,
+ /// however, so that it needs only logarithmic stack space.
+ ///
+ /// This produces a call tree like the following (where the calls under
+ /// an item are done after reading that item):
+ ///
+ /// ```text
+ /// 1 2 3 4 5 6 7
+ /// │ │ │ │ │ │ │
+ /// └─f └─f └─f │
+ /// │ │ │ │
+ /// └───f └─f
+ /// │ │
+ /// └─────f
+ /// ```
+ ///
+ /// Which, for non-associative functions, will typically produce a
different
+ /// result than the linear call tree used by [`Iterator::reduce`]:
+ ///
+ /// ```text
+ /// 1 2 3 4 5 6 7
+ /// │ │ │ │ │ │ │
+ /// └─f─f─f─f─f─f
+ /// ```
+ ///
+ /// If `f` is associative, prefer the normal [`Iterator::reduce`] instead.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // The same tree as above
+ /// let num_strings = (1..8).map(|x| x.to_string());
+ /// assert_eq!(num_strings.tree_fold1(|x, y| format!("f({}, {})", x, y)),
+ /// Some(String::from("f(f(f(1, 2), f(3, 4)), f(f(5, 6), 7))")));
+ ///
+ /// // Like fold1, an empty iterator produces None
+ /// assert_eq!((0..0).tree_fold1(|x, y| x * y), None);
+ ///
+ /// // tree_fold1 matches fold1 for associative operations...
+ /// assert_eq!((0..10).tree_fold1(|x, y| x + y),
+ /// (0..10).fold1(|x, y| x + y));
+ /// // ...but not for non-associative ones
+ /// assert_ne!((0..10).tree_fold1(|x, y| x - y),
+ /// (0..10).fold1(|x, y| x - y));
+ /// ```
+ fn tree_fold1<F>(mut self, mut f: F) -> Option<Self::Item>
+ where F: FnMut(Self::Item, Self::Item) -> Self::Item,
+ Self: Sized,
+ {
+ type State<T> = Result<T, Option<T>>;
+
+ fn inner0<T, II, FF>(it: &mut II, f: &mut FF) -> State<T>
+ where
+ II: Iterator<Item = T>,
+ FF: FnMut(T, T) -> T
+ {
+ // This function could be replaced with `it.next().ok_or(None)`,
+ // but half the useful tree_fold1 work is combining adjacent items,
+ // so put that in a form that LLVM is more likely to optimize well.
+
+ let a =
+ if let Some(v) = it.next() { v }
+ else { return Err(None) };
+ let b =
+ if let Some(v) = it.next() { v }
+ else { return Err(Some(a)) };
+ Ok(f(a, b))
+ }
+
+ fn inner<T, II, FF>(stop: usize, it: &mut II, f: &mut FF) -> State<T>
+ where
+ II: Iterator<Item = T>,
+ FF: FnMut(T, T) -> T
+ {
+ let mut x = inner0(it, f)?;
+ for height in 0..stop {
+ // Try to get another tree the same size with which to combine
it,
+ // creating a new tree that's twice as big for next time
around.
+ let next =
+ if height == 0 {
+ inner0(it, f)
+ } else {
+ inner(height, it, f)
+ };
+ match next {
+ Ok(y) => x = f(x, y),
+
+ // If we ran out of items, combine whatever we did manage
+ // to get. It's better combined with the current value
+ // than something in a parent frame, because the tree in
+ // the parent is always as least as big as this one.
+ Err(None) => return Err(Some(x)),
+ Err(Some(y)) => return Err(Some(f(x, y))),
+ }
+ }
+ Ok(x)
+ }
+
+ match inner(usize::max_value(), &mut self, &mut f) {
+ Err(x) => x,
+ _ => unreachable!(),
+ }
+ }
+
+ /// An iterator method that applies a function, producing a single, final
value.
+ ///
+ /// `fold_while()` is basically equivalent to [`Iterator::fold`] but with
additional support for
+ /// early exit via short-circuiting.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::FoldWhile::{Continue, Done};
+ ///
+ /// let numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ ///
+ /// let mut result = 0;
+ ///
+ /// // for loop:
+ /// for i in &numbers {
+ /// if *i > 5 {
+ /// break;
+ /// }
+ /// result = result + i;
+ /// }
+ ///
+ /// // fold:
+ /// let result2 = numbers.iter().fold(0, |acc, x| {
+ /// if *x > 5 { acc } else { acc + x }
+ /// });
+ ///
+ /// // fold_while:
+ /// let result3 = numbers.iter().fold_while(0, |acc, x| {
+ /// if *x > 5 { Done(acc) } else { Continue(acc + x) }
+ /// }).into_inner();
+ ///
+ /// // they're the same
+ /// assert_eq!(result, result2);
+ /// assert_eq!(result2, result3);
+ /// ```
+ ///
+ /// The big difference between the computations of `result2` and `result3`
is that while
+ /// `fold()` called the provided closure for every item of the callee
iterator,
+ /// `fold_while()` actually stopped iterating as soon as it encountered
`Fold::Done(_)`.
+ fn fold_while<B, F>(&mut self, init: B, mut f: F) -> FoldWhile<B>
+ where Self: Sized,
+ F: FnMut(B, Self::Item) -> FoldWhile<B>
+ {
+ use Result::{
+ Ok as Continue,
+ Err as Break,
+ };
+
+ let result = self.try_fold(init, #[inline(always)] |acc, v|
+ match f(acc, v) {
+ FoldWhile::Continue(acc) => Continue(acc),
+ FoldWhile::Done(acc) => Break(acc),
+ }
+ );
+
+ match result {
+ Continue(acc) => FoldWhile::Continue(acc),
+ Break(acc) => FoldWhile::Done(acc),
+ }
+ }
+
+ /// Iterate over the entire iterator and add all the elements.
+ ///
+ /// An empty iterator returns `None`, otherwise `Some(sum)`.
+ ///
+ /// # Panics
+ ///
+ /// When calling `sum1()` and a primitive integer type is being returned,
this
+ /// method will panic if the computation overflows and debug assertions are
+ /// enabled.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let empty_sum = (1..1).sum1::<i32>();
+ /// assert_eq!(empty_sum, None);
+ ///
+ /// let nonempty_sum = (1..11).sum1::<i32>();
+ /// assert_eq!(nonempty_sum, Some(55));
+ /// ```
+ fn sum1<S>(mut self) -> Option<S>
+ where Self: Sized,
+ S: std::iter::Sum<Self::Item>,
+ {
+ self.next()
+ .map(|first| once(first).chain(self).sum())
+ }
+
+ /// Iterate over the entire iterator and multiply all the elements.
+ ///
+ /// An empty iterator returns `None`, otherwise `Some(product)`.
+ ///
+ /// # Panics
+ ///
+ /// When calling `product1()` and a primitive integer type is being
returned,
+ /// method will panic if the computation overflows and debug assertions are
+ /// enabled.
+ ///
+ /// # Examples
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let empty_product = (1..1).product1::<i32>();
+ /// assert_eq!(empty_product, None);
+ ///
+ /// let nonempty_product = (1..11).product1::<i32>();
+ /// assert_eq!(nonempty_product, Some(3628800));
+ /// ```
+ fn product1<P>(mut self) -> Option<P>
+ where Self: Sized,
+ P: std::iter::Product<Self::Item>,
+ {
+ self.next()
+ .map(|first| once(first).chain(self).product())
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_unstable`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is unstable (i.e., may reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort the letters of the text in ascending order
+ /// let text = "bdacfe";
+ /// itertools::assert_equal(text.chars().sorted_unstable(),
+ /// "abcdef".chars());
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_unstable(self) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ Self::Item: Ord
+ {
+ // Use .sort_unstable() directly since it is not quite identical with
+ // .sort_by(Ord::cmp)
+ let mut v = Vec::from_iter(self);
+ v.sort_unstable();
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_unstable_by`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is unstable (i.e., may reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort people in descending order by age
+ /// let people = vec![("Jane", 20), ("John", 18), ("Jill", 30), ("Jack",
27)];
+ ///
+ /// let oldest_people_first = people
+ /// .into_iter()
+ /// .sorted_unstable_by(|a, b| Ord::cmp(&b.1, &a.1))
+ /// .map(|(person, _age)| person);
+ ///
+ /// itertools::assert_equal(oldest_people_first,
+ /// vec!["Jill", "Jack", "Jane", "John"]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_unstable_by<F>(self, cmp: F) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ F: FnMut(&Self::Item, &Self::Item) -> Ordering,
+ {
+ let mut v = Vec::from_iter(self);
+ v.sort_unstable_by(cmp);
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_unstable_by_key`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is unstable (i.e., may reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort people in descending order by age
+ /// let people = vec![("Jane", 20), ("John", 18), ("Jill", 30), ("Jack",
27)];
+ ///
+ /// let oldest_people_first = people
+ /// .into_iter()
+ /// .sorted_unstable_by_key(|x| -x.1)
+ /// .map(|(person, _age)| person);
+ ///
+ /// itertools::assert_equal(oldest_people_first,
+ /// vec!["Jill", "Jack", "Jane", "John"]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_unstable_by_key<K, F>(self, f: F) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ K: Ord,
+ F: FnMut(&Self::Item) -> K,
+ {
+ let mut v = Vec::from_iter(self);
+ v.sort_unstable_by_key(f);
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort the letters of the text in ascending order
+ /// let text = "bdacfe";
+ /// itertools::assert_equal(text.chars().sorted(),
+ /// "abcdef".chars());
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted(self) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ Self::Item: Ord
+ {
+ // Use .sort() directly since it is not quite identical with
+ // .sort_by(Ord::cmp)
+ let mut v = Vec::from_iter(self);
+ v.sort();
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_by`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort people in descending order by age
+ /// let people = vec![("Jane", 20), ("John", 18), ("Jill", 30), ("Jack",
30)];
+ ///
+ /// let oldest_people_first = people
+ /// .into_iter()
+ /// .sorted_by(|a, b| Ord::cmp(&b.1, &a.1))
+ /// .map(|(person, _age)| person);
+ ///
+ /// itertools::assert_equal(oldest_people_first,
+ /// vec!["Jill", "Jack", "Jane", "John"]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_by<F>(self, cmp: F) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ F: FnMut(&Self::Item, &Self::Item) -> Ordering,
+ {
+ let mut v = Vec::from_iter(self);
+ v.sort_by(cmp);
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_by_key`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort people in descending order by age
+ /// let people = vec![("Jane", 20), ("John", 18), ("Jill", 30), ("Jack",
30)];
+ ///
+ /// let oldest_people_first = people
+ /// .into_iter()
+ /// .sorted_by_key(|x| -x.1)
+ /// .map(|(person, _age)| person);
+ ///
+ /// itertools::assert_equal(oldest_people_first,
+ /// vec!["Jill", "Jack", "Jane", "John"]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_by_key<K, F>(self, f: F) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ K: Ord,
+ F: FnMut(&Self::Item) -> K,
+ {
+ let mut v = Vec::from_iter(self);
+ v.sort_by_key(f);
+ v.into_iter()
+ }
+
+ /// Sort all iterator elements into a new iterator in ascending order. The
key function is
+ /// called exactly once per key.
+ ///
+ /// **Note:** This consumes the entire iterator, uses the
+ /// [`slice::sort_by_cached_key`] method and returns the result as a new
+ /// iterator that owns its elements.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements).
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // sort people in descending order by age
+ /// let people = vec![("Jane", 20), ("John", 18), ("Jill", 30), ("Jack",
30)];
+ ///
+ /// let oldest_people_first = people
+ /// .into_iter()
+ /// .sorted_by_cached_key(|x| -x.1)
+ /// .map(|(person, _age)| person);
+ ///
+ /// itertools::assert_equal(oldest_people_first,
+ /// vec!["Jill", "Jack", "Jane", "John"]);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn sorted_by_cached_key<K, F>(self, f: F) -> VecIntoIter<Self::Item>
+ where
+ Self: Sized,
+ K: Ord,
+ F: FnMut(&Self::Item) -> K,
+ {
+ let mut v = Vec::from_iter(self);
+ v.sort_by_cached_key(f);
+ v.into_iter()
+ }
+
+ /// Sort the k smallest elements into a new iterator, in ascending order.
+ ///
+ /// **Note:** This consumes the entire iterator, and returns the result
+ /// as a new iterator that owns its elements. If the input contains
+ /// less than k elements, the result is equivalent to `self.sorted()`.
+ ///
+ /// This is guaranteed to use `k * sizeof(Self::Item) + O(1)` memory
+ /// and `O(n log k)` time, with `n` the number of elements in the input.
+ ///
+ /// The sorted iterator, if directly collected to a `Vec`, is converted
+ /// without any extra copying or allocation cost.
+ ///
+ /// **Note:** This is functionally-equivalent to `self.sorted().take(k)`
+ /// but much more efficient.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// // A random permutation of 0..15
+ /// let numbers = vec![6, 9, 1, 14, 0, 4, 8, 7, 11, 2, 10, 3, 13, 12, 5];
+ ///
+ /// let five_smallest = numbers
+ /// .into_iter()
+ /// .k_smallest(5);
+ ///
+ /// itertools::assert_equal(five_smallest, 0..5);
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn k_smallest(self, k: usize) -> VecIntoIter<Self::Item>
+ where Self: Sized,
+ Self::Item: Ord
+ {
+ crate::k_smallest::k_smallest(self, k)
+ .into_sorted_vec()
+ .into_iter()
+ }
+
+ /// Collect all iterator elements into one of two
+ /// partitions. Unlike [`Iterator::partition`], each partition may
+ /// have a distinct type.
+ ///
+ /// ```
+ /// use itertools::{Itertools, Either};
+ ///
+ /// let successes_and_failures = vec![Ok(1), Err(false), Err(true), Ok(2)];
+ ///
+ /// let (successes, failures): (Vec<_>, Vec<_>) = successes_and_failures
+ /// .into_iter()
+ /// .partition_map(|r| {
+ /// match r {
+ /// Ok(v) => Either::Left(v),
+ /// Err(v) => Either::Right(v),
+ /// }
+ /// });
+ ///
+ /// assert_eq!(successes, [1, 2]);
+ /// assert_eq!(failures, [false, true]);
+ /// ```
+ fn partition_map<A, B, F, L, R>(self, mut predicate: F) -> (A, B)
+ where Self: Sized,
+ F: FnMut(Self::Item) -> Either<L, R>,
+ A: Default + Extend<L>,
+ B: Default + Extend<R>,
+ {
+ let mut left = A::default();
+ let mut right = B::default();
+
+ self.for_each(|val| match predicate(val) {
+ Either::Left(v) => left.extend(Some(v)),
+ Either::Right(v) => right.extend(Some(v)),
+ });
+
+ (left, right)
+ }
+
+ /// Partition a sequence of `Result`s into one list of all the `Ok`
elements
+ /// and another list of all the `Err` elements.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let successes_and_failures = vec![Ok(1), Err(false), Err(true), Ok(2)];
+ ///
+ /// let (successes, failures): (Vec<_>, Vec<_>) = successes_and_failures
+ /// .into_iter()
+ /// .partition_result();
+ ///
+ /// assert_eq!(successes, [1, 2]);
+ /// assert_eq!(failures, [false, true]);
+ /// ```
+ fn partition_result<A, B, T, E>(self) -> (A, B)
+ where
+ Self: Iterator<Item = Result<T, E>> + Sized,
+ A: Default + Extend<T>,
+ B: Default + Extend<E>,
+ {
+ self.partition_map(|r| match r {
+ Ok(v) => Either::Left(v),
+ Err(v) => Either::Right(v),
+ })
+ }
+
+ /// Return a `HashMap` of keys mapped to `Vec`s of values. Keys and values
+ /// are taken from `(Key, Value)` tuple pairs yielded by the input
iterator.
+ ///
+ /// Essentially a shorthand for `.into_grouping_map().collect::<Vec<_>>()`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let data = vec![(0, 10), (2, 12), (3, 13), (0, 20), (3, 33), (2, 42)];
+ /// let lookup = data.into_iter().into_group_map();
+ ///
+ /// assert_eq!(lookup[&0], vec![10, 20]);
+ /// assert_eq!(lookup.get(&1), None);
+ /// assert_eq!(lookup[&2], vec![12, 42]);
+ /// assert_eq!(lookup[&3], vec![13, 33]);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn into_group_map<K, V>(self) -> HashMap<K, Vec<V>>
+ where Self: Iterator<Item=(K, V)> + Sized,
+ K: Hash + Eq,
+ {
+ group_map::into_group_map(self)
+ }
+
+ /// Return an `Iterator` on a `HashMap`. Keys mapped to `Vec`s of values.
The key is specified
+ /// in the closure.
+ ///
+ /// Essentially a shorthand for
`.into_grouping_map_by(f).collect::<Vec<_>>()`.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use std::collections::HashMap;
+ ///
+ /// let data = vec![(0, 10), (2, 12), (3, 13), (0, 20), (3, 33), (2, 42)];
+ /// let lookup: HashMap<u32,Vec<(u32, u32)>> =
+ /// data.clone().into_iter().into_group_map_by(|a| a.0);
+ ///
+ /// assert_eq!(lookup[&0], vec![(0,10),(0,20)]);
+ /// assert_eq!(lookup.get(&1), None);
+ /// assert_eq!(lookup[&2], vec![(2,12), (2,42)]);
+ /// assert_eq!(lookup[&3], vec![(3,13), (3,33)]);
+ ///
+ /// assert_eq!(
+ /// data.into_iter()
+ /// .into_group_map_by(|x| x.0)
+ /// .into_iter()
+ /// .map(|(key, values)| (key, values.into_iter().fold(0,|acc,
(_,v)| acc + v )))
+ /// .collect::<HashMap<u32,u32>>()[&0],
+ /// 30,
+ /// );
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn into_group_map_by<K, V, F>(self, f: F) -> HashMap<K, Vec<V>>
+ where
+ Self: Iterator<Item=V> + Sized,
+ K: Hash + Eq,
+ F: Fn(&V) -> K,
+ {
+ group_map::into_group_map_by(self, f)
+ }
+
+ /// Constructs a `GroupingMap` to be used later with one of the efficient
+ /// group-and-fold operations it allows to perform.
+ ///
+ /// The input iterator must yield item in the form of `(K, V)` where the
+ /// value of type `K` will be used as key to identify the groups and the
+ /// value of type `V` as value for the folding operation.
+ ///
+ /// See [`GroupingMap`] for more informations
+ /// on what operations are available.
+ #[cfg(feature = "use_std")]
+ fn into_grouping_map<K, V>(self) -> GroupingMap<Self>
+ where Self: Iterator<Item=(K, V)> + Sized,
+ K: Hash + Eq,
+ {
+ grouping_map::new(self)
+ }
+
+ /// Constructs a `GroupingMap` to be used later with one of the efficient
+ /// group-and-fold operations it allows to perform.
+ ///
+ /// The values from this iterator will be used as values for the folding
operation
+ /// while the keys will be obtained from the values by calling
`key_mapper`.
+ ///
+ /// See [`GroupingMap`] for more informations
+ /// on what operations are available.
+ #[cfg(feature = "use_std")]
+ fn into_grouping_map_by<K, V, F>(self, key_mapper: F) ->
GroupingMapBy<Self, F>
+ where Self: Iterator<Item=V> + Sized,
+ K: Hash + Eq,
+ F: FnMut(&V) -> K
+ {
+ grouping_map::new(grouping_map::MapForGrouping::new(self, key_mapper))
+ }
+
+ /// Return all minimum elements of an iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().min_set(), Vec::<&i32>::new());
+ ///
+ /// let a = [1];
+ /// assert_eq!(a.iter().min_set(), vec![&1]);
+ ///
+ /// let a = [1, 2, 3, 4, 5];
+ /// assert_eq!(a.iter().min_set(), vec![&1]);
+ ///
+ /// let a = [1, 1, 1, 1];
+ /// assert_eq!(a.iter().min_set(), vec![&1, &1, &1, &1]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn min_set(self) -> Vec<Self::Item>
+ where Self: Sized, Self::Item: Ord
+ {
+ extrema_set::min_set_impl(self, |_| (), |x, y, _, _| x.cmp(y))
+ }
+
+ /// Return all minimum elements of an iterator, as determined by
+ /// the specified function.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::cmp::Ordering;
+ /// use itertools::Itertools;
+ ///
+ /// let a: [(i32, i32); 0] = [];
+ /// assert_eq!(a.iter().min_set_by(|_, _| Ordering::Equal), Vec::<&(i32,
i32)>::new());
+ ///
+ /// let a = [(1, 2)];
+ /// assert_eq!(a.iter().min_set_by(|&&(k1,_), &&(k2, _)| k1.cmp(&k2)),
vec![&(1, 2)]);
+ ///
+ /// let a = [(1, 2), (2, 2), (3, 9), (4, 8), (5, 9)];
+ /// assert_eq!(a.iter().min_set_by(|&&(_,k1), &&(_,k2)| k1.cmp(&k2)),
vec![&(1, 2), &(2, 2)]);
+ ///
+ /// let a = [(1, 2), (1, 3), (1, 4), (1, 5)];
+ /// assert_eq!(a.iter().min_set_by(|&&(k1,_), &&(k2, _)| k1.cmp(&k2)),
vec![&(1, 2), &(1, 3), &(1, 4), &(1, 5)]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn min_set_by<F>(self, mut compare: F) -> Vec<Self::Item>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ extrema_set::min_set_impl(
+ self,
+ |_| (),
+ |x, y, _, _| compare(x, y)
+ )
+ }
+
+ /// Return all minimum elements of an iterator, as determined by
+ /// the specified function.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [(i32, i32); 0] = [];
+ /// assert_eq!(a.iter().min_set_by_key(|_| ()), Vec::<&(i32, i32)>::new());
+ ///
+ /// let a = [(1, 2)];
+ /// assert_eq!(a.iter().min_set_by_key(|&&(k,_)| k), vec![&(1, 2)]);
+ ///
+ /// let a = [(1, 2), (2, 2), (3, 9), (4, 8), (5, 9)];
+ /// assert_eq!(a.iter().min_set_by_key(|&&(_, k)| k), vec![&(1, 2), &(2,
2)]);
+ ///
+ /// let a = [(1, 2), (1, 3), (1, 4), (1, 5)];
+ /// assert_eq!(a.iter().min_set_by_key(|&&(k, _)| k), vec![&(1, 2), &(1,
3), &(1, 4), &(1, 5)]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn min_set_by_key<K, F>(self, key: F) -> Vec<Self::Item>
+ where Self: Sized, K: Ord, F: FnMut(&Self::Item) -> K
+ {
+ extrema_set::min_set_impl(self, key, |_, _, kx, ky| kx.cmp(ky))
+ }
+
+ /// Return all maximum elements of an iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().max_set(), Vec::<&i32>::new());
+ ///
+ /// let a = [1];
+ /// assert_eq!(a.iter().max_set(), vec![&1]);
+ ///
+ /// let a = [1, 2, 3, 4, 5];
+ /// assert_eq!(a.iter().max_set(), vec![&5]);
+ ///
+ /// let a = [1, 1, 1, 1];
+ /// assert_eq!(a.iter().max_set(), vec![&1, &1, &1, &1]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn max_set(self) -> Vec<Self::Item>
+ where Self: Sized, Self::Item: Ord
+ {
+ extrema_set::max_set_impl(self, |_| (), |x, y, _, _| x.cmp(y))
+ }
+
+ /// Return all maximum elements of an iterator, as determined by
+ /// the specified function.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::cmp::Ordering;
+ /// use itertools::Itertools;
+ ///
+ /// let a: [(i32, i32); 0] = [];
+ /// assert_eq!(a.iter().max_set_by(|_, _| Ordering::Equal), Vec::<&(i32,
i32)>::new());
+ ///
+ /// let a = [(1, 2)];
+ /// assert_eq!(a.iter().max_set_by(|&&(k1,_), &&(k2, _)| k1.cmp(&k2)),
vec![&(1, 2)]);
+ ///
+ /// let a = [(1, 2), (2, 2), (3, 9), (4, 8), (5, 9)];
+ /// assert_eq!(a.iter().max_set_by(|&&(_,k1), &&(_,k2)| k1.cmp(&k2)),
vec![&(3, 9), &(5, 9)]);
+ ///
+ /// let a = [(1, 2), (1, 3), (1, 4), (1, 5)];
+ /// assert_eq!(a.iter().max_set_by(|&&(k1,_), &&(k2, _)| k1.cmp(&k2)),
vec![&(1, 2), &(1, 3), &(1, 4), &(1, 5)]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn max_set_by<F>(self, mut compare: F) -> Vec<Self::Item>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ extrema_set::max_set_impl(
+ self,
+ |_| (),
+ |x, y, _, _| compare(x, y)
+ )
+ }
+
+ /// Return all maximum elements of an iterator, as determined by
+ /// the specified function.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [(i32, i32); 0] = [];
+ /// assert_eq!(a.iter().max_set_by_key(|_| ()), Vec::<&(i32, i32)>::new());
+ ///
+ /// let a = [(1, 2)];
+ /// assert_eq!(a.iter().max_set_by_key(|&&(k,_)| k), vec![&(1, 2)]);
+ ///
+ /// let a = [(1, 2), (2, 2), (3, 9), (4, 8), (5, 9)];
+ /// assert_eq!(a.iter().max_set_by_key(|&&(_, k)| k), vec![&(3, 9), &(5,
9)]);
+ ///
+ /// let a = [(1, 2), (1, 3), (1, 4), (1, 5)];
+ /// assert_eq!(a.iter().max_set_by_key(|&&(k, _)| k), vec![&(1, 2), &(1,
3), &(1, 4), &(1, 5)]);
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ #[cfg(feature = "use_std")]
+ fn max_set_by_key<K, F>(self, key: F) -> Vec<Self::Item>
+ where Self: Sized, K: Ord, F: FnMut(&Self::Item) -> K
+ {
+ extrema_set::max_set_impl(self, key, |_, _, kx, ky| kx.cmp(ky))
+ }
+
+ /// Return the minimum and maximum elements in the iterator.
+ ///
+ /// The return type `MinMaxResult` is an enum of three variants:
+ ///
+ /// - `NoElements` if the iterator is empty.
+ /// - `OneElement(x)` if the iterator has exactly one element.
+ /// - `MinMax(x, y)` is returned otherwise, where `x <= y`. Two
+ /// values are equal if and only if there is more than one
+ /// element in the iterator and all elements are equal.
+ ///
+ /// On an iterator of length `n`, `minmax` does `1.5 * n` comparisons,
+ /// and so is faster than calling `min` and `max` separately which does
+ /// `2 * n` comparisons.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{NoElements, OneElement, MinMax};
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().minmax(), NoElements);
+ ///
+ /// let a = [1];
+ /// assert_eq!(a.iter().minmax(), OneElement(&1));
+ ///
+ /// let a = [1, 2, 3, 4, 5];
+ /// assert_eq!(a.iter().minmax(), MinMax(&1, &5));
+ ///
+ /// let a = [1, 1, 1, 1];
+ /// assert_eq!(a.iter().minmax(), MinMax(&1, &1));
+ /// ```
+ ///
+ /// The elements can be floats but no particular result is guaranteed
+ /// if an element is NaN.
+ fn minmax(self) -> MinMaxResult<Self::Item>
+ where Self: Sized, Self::Item: PartialOrd
+ {
+ minmax::minmax_impl(self, |_| (), |x, y, _, _| x < y)
+ }
+
+ /// Return the minimum and maximum element of an iterator, as determined by
+ /// the specified function.
+ ///
+ /// The return value is a variant of [`MinMaxResult`] like for
[`.minmax()`](Itertools::minmax).
+ ///
+ /// For the minimum, the first minimal element is returned. For the
maximum,
+ /// the last maximal element wins. This matches the behavior of the
standard
+ /// [`Iterator::min`] and [`Iterator::max`] methods.
+ ///
+ /// The keys can be floats but no particular result is guaranteed
+ /// if a key is NaN.
+ fn minmax_by_key<K, F>(self, key: F) -> MinMaxResult<Self::Item>
+ where Self: Sized, K: PartialOrd, F: FnMut(&Self::Item) -> K
+ {
+ minmax::minmax_impl(self, key, |_, _, xk, yk| xk < yk)
+ }
+
+ /// Return the minimum and maximum element of an iterator, as determined by
+ /// the specified comparison function.
+ ///
+ /// The return value is a variant of [`MinMaxResult`] like for
[`.minmax()`](Itertools::minmax).
+ ///
+ /// For the minimum, the first minimal element is returned. For the
maximum,
+ /// the last maximal element wins. This matches the behavior of the
standard
+ /// [`Iterator::min`] and [`Iterator::max`] methods.
+ fn minmax_by<F>(self, mut compare: F) -> MinMaxResult<Self::Item>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ minmax::minmax_impl(
+ self,
+ |_| (),
+ |x, y, _, _| Ordering::Less == compare(x, y)
+ )
+ }
+
+ /// Return the position of the maximum element in the iterator.
+ ///
+ /// If several elements are equally maximum, the position of the
+ /// last of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_max(), None);
+ ///
+ /// let a = [-3, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_max(), Some(3));
+ ///
+ /// let a = [1, 1, -1, -1];
+ /// assert_eq!(a.iter().position_max(), Some(1));
+ /// ```
+ fn position_max(self) -> Option<usize>
+ where Self: Sized, Self::Item: Ord
+ {
+ self.enumerate()
+ .max_by(|x, y| Ord::cmp(&x.1, &y.1))
+ .map(|x| x.0)
+ }
+
+ /// Return the position of the maximum element in the iterator, as
+ /// determined by the specified function.
+ ///
+ /// If several elements are equally maximum, the position of the
+ /// last of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_max_by_key(|x| x.abs()), None);
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_max_by_key(|x| x.abs()), Some(4));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_max_by_key(|x| x.abs()), Some(3));
+ /// ```
+ fn position_max_by_key<K, F>(self, mut key: F) -> Option<usize>
+ where Self: Sized, K: Ord, F: FnMut(&Self::Item) -> K
+ {
+ self.enumerate()
+ .max_by(|x, y| Ord::cmp(&key(&x.1), &key(&y.1)))
+ .map(|x| x.0)
+ }
+
+ /// Return the position of the maximum element in the iterator, as
+ /// determined by the specified comparison function.
+ ///
+ /// If several elements are equally maximum, the position of the
+ /// last of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_max_by(|x, y| x.cmp(y)), None);
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_max_by(|x, y| x.cmp(y)), Some(3));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_max_by(|x, y| x.cmp(y)), Some(1));
+ /// ```
+ fn position_max_by<F>(self, mut compare: F) -> Option<usize>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ self.enumerate()
+ .max_by(|x, y| compare(&x.1, &y.1))
+ .map(|x| x.0)
+ }
+
+ /// Return the position of the minimum element in the iterator.
+ ///
+ /// If several elements are equally minimum, the position of the
+ /// first of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_min(), None);
+ ///
+ /// let a = [-3, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_min(), Some(4));
+ ///
+ /// let a = [1, 1, -1, -1];
+ /// assert_eq!(a.iter().position_min(), Some(2));
+ /// ```
+ fn position_min(self) -> Option<usize>
+ where Self: Sized, Self::Item: Ord
+ {
+ self.enumerate()
+ .min_by(|x, y| Ord::cmp(&x.1, &y.1))
+ .map(|x| x.0)
+ }
+
+ /// Return the position of the minimum element in the iterator, as
+ /// determined by the specified function.
+ ///
+ /// If several elements are equally minimum, the position of the
+ /// first of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_min_by_key(|x| x.abs()), None);
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_min_by_key(|x| x.abs()), Some(1));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_min_by_key(|x| x.abs()), Some(0));
+ /// ```
+ fn position_min_by_key<K, F>(self, mut key: F) -> Option<usize>
+ where Self: Sized, K: Ord, F: FnMut(&Self::Item) -> K
+ {
+ self.enumerate()
+ .min_by(|x, y| Ord::cmp(&key(&x.1), &key(&y.1)))
+ .map(|x| x.0)
+ }
+
+ /// Return the position of the minimum element in the iterator, as
+ /// determined by the specified comparison function.
+ ///
+ /// If several elements are equally minimum, the position of the
+ /// first of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_min_by(|x, y| x.cmp(y)), None);
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_min_by(|x, y| x.cmp(y)), Some(4));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_min_by(|x, y| x.cmp(y)), Some(2));
+ /// ```
+ fn position_min_by<F>(self, mut compare: F) -> Option<usize>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ self.enumerate()
+ .min_by(|x, y| compare(&x.1, &y.1))
+ .map(|x| x.0)
+ }
+
+ /// Return the positions of the minimum and maximum elements in
+ /// the iterator.
+ ///
+ /// The return type [`MinMaxResult`] is an enum of three variants:
+ ///
+ /// - `NoElements` if the iterator is empty.
+ /// - `OneElement(xpos)` if the iterator has exactly one element.
+ /// - `MinMax(xpos, ypos)` is returned otherwise, where the
+ /// element at `xpos` ≤ the element at `ypos`. While the
+ /// referenced elements themselves may be equal, `xpos` cannot
+ /// be equal to `ypos`.
+ ///
+ /// On an iterator of length `n`, `position_minmax` does `1.5 * n`
+ /// comparisons, and so is faster than calling `position_min` and
+ /// `position_max` separately which does `2 * n` comparisons.
+ ///
+ /// For the minimum, if several elements are equally minimum, the
+ /// position of the first of them is returned. For the maximum, if
+ /// several elements are equally maximum, the position of the last
+ /// of them is returned.
+ ///
+ /// The elements can be floats but no particular result is
+ /// guaranteed if an element is NaN.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{NoElements, OneElement, MinMax};
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_minmax(), NoElements);
+ ///
+ /// let a = [10];
+ /// assert_eq!(a.iter().position_minmax(), OneElement(0));
+ ///
+ /// let a = [-3, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_minmax(), MinMax(4, 3));
+ ///
+ /// let a = [1, 1, -1, -1];
+ /// assert_eq!(a.iter().position_minmax(), MinMax(2, 1));
+ /// ```
+ fn position_minmax(self) -> MinMaxResult<usize>
+ where Self: Sized, Self::Item: PartialOrd
+ {
+ use crate::MinMaxResult::{NoElements, OneElement, MinMax};
+ match minmax::minmax_impl(self.enumerate(), |_| (), |x, y, _, _| x.1 <
y.1) {
+ NoElements => NoElements,
+ OneElement(x) => OneElement(x.0),
+ MinMax(x, y) => MinMax(x.0, y.0),
+ }
+ }
+
+ /// Return the postions of the minimum and maximum elements of an
+ /// iterator, as determined by the specified function.
+ ///
+ /// The return value is a variant of [`MinMaxResult`] like for
+ /// [`position_minmax`].
+ ///
+ /// For the minimum, if several elements are equally minimum, the
+ /// position of the first of them is returned. For the maximum, if
+ /// several elements are equally maximum, the position of the last
+ /// of them is returned.
+ ///
+ /// The keys can be floats but no particular result is guaranteed
+ /// if a key is NaN.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{NoElements, OneElement, MinMax};
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_minmax_by_key(|x| x.abs()), NoElements);
+ ///
+ /// let a = [10_i32];
+ /// assert_eq!(a.iter().position_minmax_by_key(|x| x.abs()),
OneElement(0));
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_minmax_by_key(|x| x.abs()), MinMax(1, 4));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_minmax_by_key(|x| x.abs()), MinMax(0, 3));
+ /// ```
+ ///
+ /// [`position_minmax`]: Self::position_minmax
+ fn position_minmax_by_key<K, F>(self, mut key: F) -> MinMaxResult<usize>
+ where Self: Sized, K: PartialOrd, F: FnMut(&Self::Item) -> K
+ {
+ use crate::MinMaxResult::{NoElements, OneElement, MinMax};
+ match self.enumerate().minmax_by_key(|e| key(&e.1)) {
+ NoElements => NoElements,
+ OneElement(x) => OneElement(x.0),
+ MinMax(x, y) => MinMax(x.0, y.0),
+ }
+ }
+
+ /// Return the postions of the minimum and maximum elements of an
+ /// iterator, as determined by the specified comparison function.
+ ///
+ /// The return value is a variant of [`MinMaxResult`] like for
+ /// [`position_minmax`].
+ ///
+ /// For the minimum, if several elements are equally minimum, the
+ /// position of the first of them is returned. For the maximum, if
+ /// several elements are equally maximum, the position of the last
+ /// of them is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ /// use itertools::MinMaxResult::{NoElements, OneElement, MinMax};
+ ///
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.iter().position_minmax_by(|x, y| x.cmp(y)), NoElements);
+ ///
+ /// let a = [10_i32];
+ /// assert_eq!(a.iter().position_minmax_by(|x, y| x.cmp(y)),
OneElement(0));
+ ///
+ /// let a = [-3_i32, 0, 1, 5, -10];
+ /// assert_eq!(a.iter().position_minmax_by(|x, y| x.cmp(y)), MinMax(4, 3));
+ ///
+ /// let a = [1_i32, 1, -1, -1];
+ /// assert_eq!(a.iter().position_minmax_by(|x, y| x.cmp(y)), MinMax(2, 1));
+ /// ```
+ ///
+ /// [`position_minmax`]: Self::position_minmax
+ fn position_minmax_by<F>(self, mut compare: F) -> MinMaxResult<usize>
+ where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering
+ {
+ use crate::MinMaxResult::{NoElements, OneElement, MinMax};
+ match self.enumerate().minmax_by(|x, y| compare(&x.1, &y.1)) {
+ NoElements => NoElements,
+ OneElement(x) => OneElement(x.0),
+ MinMax(x, y) => MinMax(x.0, y.0),
+ }
+ }
+
+ /// If the iterator yields exactly one element, that element will be
returned, otherwise
+ /// an error will be returned containing an iterator that has the same
output as the input
+ /// iterator.
+ ///
+ /// This provides an additional layer of validation over just calling
`Iterator::next()`.
+ /// If your assumption that there should only be one element yielded is
false this provides
+ /// the opportunity to detect and handle that, preventing errors at a
distance.
+ ///
+ /// # Examples
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// assert_eq!((0..10).filter(|&x| x == 2).exactly_one().unwrap(), 2);
+ /// assert!((0..10).filter(|&x| x > 1 && x <
4).exactly_one().unwrap_err().eq(2..4));
+ /// assert!((0..10).filter(|&x| x > 1 && x <
5).exactly_one().unwrap_err().eq(2..5));
+ /// assert!((0..10).filter(|&_|
false).exactly_one().unwrap_err().eq(0..0));
+ /// ```
+ fn exactly_one(mut self) -> Result<Self::Item, ExactlyOneError<Self>>
+ where
+ Self: Sized,
+ {
+ match self.next() {
+ Some(first) => {
+ match self.next() {
+ Some(second) => {
+ Err(ExactlyOneError::new(Some(Either::Left([first,
second])), self))
+ }
+ None => {
+ Ok(first)
+ }
+ }
+ }
+ None => Err(ExactlyOneError::new(None, self)),
+ }
+ }
+
+ /// If the iterator yields no elements, Ok(None) will be returned. If the
iterator yields
+ /// exactly one element, that element will be returned, otherwise an error
will be returned
+ /// containing an iterator that has the same output as the input iterator.
+ ///
+ /// This provides an additional layer of validation over just calling
`Iterator::next()`.
+ /// If your assumption that there should be at most one element yielded is
false this provides
+ /// the opportunity to detect and handle that, preventing errors at a
distance.
+ ///
+ /// # Examples
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// assert_eq!((0..10).filter(|&x| x == 2).at_most_one().unwrap(),
Some(2));
+ /// assert!((0..10).filter(|&x| x > 1 && x <
4).at_most_one().unwrap_err().eq(2..4));
+ /// assert!((0..10).filter(|&x| x > 1 && x <
5).at_most_one().unwrap_err().eq(2..5));
+ /// assert_eq!((0..10).filter(|&_| false).at_most_one().unwrap(), None);
+ /// ```
+ fn at_most_one(mut self) -> Result<Option<Self::Item>,
ExactlyOneError<Self>>
+ where
+ Self: Sized,
+ {
+ match self.next() {
+ Some(first) => {
+ match self.next() {
+ Some(second) => {
+ Err(ExactlyOneError::new(Some(Either::Left([first,
second])), self))
+ }
+ None => {
+ Ok(Some(first))
+ }
+ }
+ }
+ None => Ok(None),
+ }
+ }
+
+ /// An iterator adaptor that allows the user to peek at multiple `.next()`
+ /// values without advancing the base iterator.
+ ///
+ /// # Examples
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut iter = (0..10).multipeek();
+ /// assert_eq!(iter.peek(), Some(&0));
+ /// assert_eq!(iter.peek(), Some(&1));
+ /// assert_eq!(iter.peek(), Some(&2));
+ /// assert_eq!(iter.next(), Some(0));
+ /// assert_eq!(iter.peek(), Some(&1));
+ /// ```
+ #[cfg(feature = "use_alloc")]
+ fn multipeek(self) -> MultiPeek<Self>
+ where
+ Self: Sized,
+ {
+ multipeek_impl::multipeek(self)
+ }
+
+ /// Collect the items in this iterator and return a `HashMap` which
+ /// contains each item that appears in the iterator and the number
+ /// of times it appears.
+ ///
+ /// # Examples
+ /// ```
+ /// # use itertools::Itertools;
+ /// let counts = [1, 1, 1, 3, 3, 5].into_iter().counts();
+ /// assert_eq!(counts[&1], 3);
+ /// assert_eq!(counts[&3], 2);
+ /// assert_eq!(counts[&5], 1);
+ /// assert_eq!(counts.get(&0), None);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn counts(self) -> HashMap<Self::Item, usize>
+ where
+ Self: Sized,
+ Self::Item: Eq + Hash,
+ {
+ let mut counts = HashMap::new();
+ self.for_each(|item| *counts.entry(item).or_default() += 1);
+ counts
+ }
+
+ /// Collect the items in this iterator and return a `HashMap` which
+ /// contains each item that appears in the iterator and the number
+ /// of times it appears,
+ /// determining identity using a keying function.
+ ///
+ /// ```
+ /// # use itertools::Itertools;
+ /// struct Character {
+ /// first_name: &'static str,
+ /// last_name: &'static str,
+ /// }
+ ///
+ /// let characters =
+ /// vec![
+ /// Character { first_name: "Amy", last_name: "Pond" },
+ /// Character { first_name: "Amy", last_name: "Wong" },
+ /// Character { first_name: "Amy", last_name: "Santiago" },
+ /// Character { first_name: "James", last_name: "Bond" },
+ /// Character { first_name: "James", last_name: "Sullivan" },
+ /// Character { first_name: "James", last_name: "Norington" },
+ /// Character { first_name: "James", last_name: "Kirk" },
+ /// ];
+ ///
+ /// let first_name_frequency =
+ /// characters
+ /// .into_iter()
+ /// .counts_by(|c| c.first_name);
+ ///
+ /// assert_eq!(first_name_frequency["Amy"], 3);
+ /// assert_eq!(first_name_frequency["James"], 4);
+ /// assert_eq!(first_name_frequency.contains_key("Asha"), false);
+ /// ```
+ #[cfg(feature = "use_std")]
+ fn counts_by<K, F>(self, f: F) -> HashMap<K, usize>
+ where
+ Self: Sized,
+ K: Eq + Hash,
+ F: FnMut(Self::Item) -> K,
+ {
+ self.map(f).counts()
+ }
+
+ /// Converts an iterator of tuples into a tuple of containers.
+ ///
+ /// `unzip()` consumes an entire iterator of n-ary tuples, producing `n`
collections, one for each
+ /// column.
+ ///
+ /// This function is, in some sense, the opposite of [`multizip`].
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let inputs = vec![(1, 2, 3), (4, 5, 6), (7, 8, 9)];
+ ///
+ /// let (a, b, c): (Vec<_>, Vec<_>, Vec<_>) = inputs
+ /// .into_iter()
+ /// .multiunzip();
+ ///
+ /// assert_eq!(a, vec![1, 4, 7]);
+ /// assert_eq!(b, vec![2, 5, 8]);
+ /// assert_eq!(c, vec![3, 6, 9]);
+ /// ```
+ fn multiunzip<FromI>(self) -> FromI
+ where
+ Self: Sized + MultiUnzip<FromI>,
+ {
+ MultiUnzip::multiunzip(self)
+ }
+}
+
+impl<T: ?Sized> Itertools for T where T: Iterator { }
+
+/// Return `true` if both iterables produce equal sequences
+/// (elements pairwise equal and sequences of the same length),
+/// `false` otherwise.
+///
+/// [`IntoIterator`] enabled version of [`Iterator::eq`].
+///
+/// ```
+/// assert!(itertools::equal(vec![1, 2, 3], 1..4));
+/// assert!(!itertools::equal(&[0, 0], &[0, 0, 0]));
+/// ```
+pub fn equal<I, J>(a: I, b: J) -> bool
+ where I: IntoIterator,
+ J: IntoIterator,
+ I::Item: PartialEq<J::Item>
+{
+ a.into_iter().eq(b)
+}
+
+/// Assert that two iterables produce equal sequences, with the same
+/// semantics as [`equal(a, b)`](equal).
+///
+/// **Panics** on assertion failure with a message that shows the
+/// two iteration elements.
+///
+/// ```ignore
+/// assert_equal("exceed".split('c'), "excess".split('c'));
+/// // ^PANIC: panicked at 'Failed assertion Some("eed") == Some("ess") for
iteration 1',
+/// ```
+pub fn assert_equal<I, J>(a: I, b: J)
+ where I: IntoIterator,
+ J: IntoIterator,
+ I::Item: fmt::Debug + PartialEq<J::Item>,
+ J::Item: fmt::Debug,
+{
+ let mut ia = a.into_iter();
+ let mut ib = b.into_iter();
+ let mut i = 0;
+ loop {
+ match (ia.next(), ib.next()) {
+ (None, None) => return,
+ (a, b) => {
+ let equal = match (&a, &b) {
+ (&Some(ref a), &Some(ref b)) => a == b,
+ _ => false,
+ };
+ assert!(equal, "Failed assertion {a:?} == {b:?} for iteration
{i}",
+ i=i, a=a, b=b);
+ i += 1;
+ }
+ }
+ }
+}
+
+/// Partition a sequence using predicate `pred` so that elements
+/// that map to `true` are placed before elements which map to `false`.
+///
+/// The order within the partitions is arbitrary.
+///
+/// Return the index of the split point.
+///
+/// ```
+/// use itertools::partition;
+///
+/// # // use repeated numbers to not promise any ordering
+/// let mut data = [7, 1, 1, 7, 1, 1, 7];
+/// let split_index = partition(&mut data, |elt| *elt >= 3);
+///
+/// assert_eq!(data, [7, 7, 7, 1, 1, 1, 1]);
+/// assert_eq!(split_index, 3);
+/// ```
+pub fn partition<'a, A: 'a, I, F>(iter: I, mut pred: F) -> usize
+ where I: IntoIterator<Item = &'a mut A>,
+ I::IntoIter: DoubleEndedIterator,
+ F: FnMut(&A) -> bool
+{
+ let mut split_index = 0;
+ let mut iter = iter.into_iter();
+ 'main: while let Some(front) = iter.next() {
+ if !pred(front) {
+ loop {
+ match iter.next_back() {
+ Some(back) => if pred(back) {
+ std::mem::swap(front, back);
+ break;
+ },
+ None => break 'main,
+ }
+ }
+ }
+ split_index += 1;
+ }
+ split_index
+}
+
+/// An enum used for controlling the execution of `fold_while`.
+///
+/// See [`.fold_while()`](Itertools::fold_while) for more information.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum FoldWhile<T> {
+ /// Continue folding with this value
+ Continue(T),
+ /// Fold is complete and will return this value
+ Done(T),
+}
+
+impl<T> FoldWhile<T> {
+ /// Return the value in the continue or done.
+ pub fn into_inner(self) -> T {
+ match self {
+ FoldWhile::Continue(x) | FoldWhile::Done(x) => x,
+ }
+ }
+
+ /// Return true if `self` is `Done`, false if it is `Continue`.
+ pub fn is_done(&self) -> bool {
+ match *self {
+ FoldWhile::Continue(_) => false,
+ FoldWhile::Done(_) => true,
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/merge_join.rs
b/rust/hw/char/pl011/vendor/itertools/src/merge_join.rs
new file mode 100644
index 0000000000..84f7d03338
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/merge_join.rs
@@ -0,0 +1,220 @@
+use std::cmp::Ordering;
+use std::iter::Fuse;
+use std::fmt;
+
+use either::Either;
+
+use super::adaptors::{PutBack, put_back};
+use crate::either_or_both::EitherOrBoth;
+use crate::size_hint::{self, SizeHint};
+#[cfg(doc)]
+use crate::Itertools;
+
+/// Return an iterator adaptor that merge-joins items from the two base
iterators in ascending order.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::merge_join_by`].
+pub fn merge_join_by<I, J, F, T>(left: I, right: J, cmp_fn: F)
+ -> MergeJoinBy<I::IntoIter, J::IntoIter, F>
+ where I: IntoIterator,
+ J: IntoIterator,
+ F: FnMut(&I::Item, &J::Item) -> T,
+ T: OrderingOrBool<I::Item, J::Item>,
+{
+ MergeJoinBy {
+ left: put_back(left.into_iter().fuse()),
+ right: put_back(right.into_iter().fuse()),
+ cmp_fn,
+ }
+}
+
+/// An iterator adaptor that merge-joins items from the two base iterators in
ascending order.
+///
+/// See [`.merge_join_by()`](crate::Itertools::merge_join_by) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct MergeJoinBy<I: Iterator, J: Iterator, F> {
+ left: PutBack<Fuse<I>>,
+ right: PutBack<Fuse<J>>,
+ cmp_fn: F,
+}
+
+pub trait OrderingOrBool<L, R> {
+ type MergeResult;
+ fn left(left: L) -> Self::MergeResult;
+ fn right(right: R) -> Self::MergeResult;
+ // "merge" never returns (Some(...), Some(...), ...) so
Option<Either<I::Item, J::Item>>
+ // is appealing but it is always followed by two put_backs, so we think
the compiler is
+ // smart enough to optimize it. Or we could move put_backs into "merge".
+ fn merge(self, left: L, right: R) -> (Option<L>, Option<R>,
Self::MergeResult);
+ fn size_hint(left: SizeHint, right: SizeHint) -> SizeHint;
+}
+
+impl<L, R> OrderingOrBool<L, R> for Ordering {
+ type MergeResult = EitherOrBoth<L, R>;
+ fn left(left: L) -> Self::MergeResult {
+ EitherOrBoth::Left(left)
+ }
+ fn right(right: R) -> Self::MergeResult {
+ EitherOrBoth::Right(right)
+ }
+ fn merge(self, left: L, right: R) -> (Option<L>, Option<R>,
Self::MergeResult) {
+ match self {
+ Ordering::Equal => (None, None, EitherOrBoth::Both(left, right)),
+ Ordering::Less => (None, Some(right), EitherOrBoth::Left(left)),
+ Ordering::Greater => (Some(left), None,
EitherOrBoth::Right(right)),
+ }
+ }
+ fn size_hint(left: SizeHint, right: SizeHint) -> SizeHint {
+ let (a_lower, a_upper) = left;
+ let (b_lower, b_upper) = right;
+ let lower = ::std::cmp::max(a_lower, b_lower);
+ let upper = match (a_upper, b_upper) {
+ (Some(x), Some(y)) => x.checked_add(y),
+ _ => None,
+ };
+ (lower, upper)
+ }
+}
+
+impl<L, R> OrderingOrBool<L, R> for bool {
+ type MergeResult = Either<L, R>;
+ fn left(left: L) -> Self::MergeResult {
+ Either::Left(left)
+ }
+ fn right(right: R) -> Self::MergeResult {
+ Either::Right(right)
+ }
+ fn merge(self, left: L, right: R) -> (Option<L>, Option<R>,
Self::MergeResult) {
+ if self {
+ (None, Some(right), Either::Left(left))
+ } else {
+ (Some(left), None, Either::Right(right))
+ }
+ }
+ fn size_hint(left: SizeHint, right: SizeHint) -> SizeHint {
+ // Not ExactSizeIterator because size may be larger than usize
+ size_hint::add(left, right)
+ }
+}
+
+impl<I, J, F> Clone for MergeJoinBy<I, J, F>
+ where I: Iterator,
+ J: Iterator,
+ PutBack<Fuse<I>>: Clone,
+ PutBack<Fuse<J>>: Clone,
+ F: Clone,
+{
+ clone_fields!(left, right, cmp_fn);
+}
+
+impl<I, J, F> fmt::Debug for MergeJoinBy<I, J, F>
+ where I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug,
+ J: Iterator + fmt::Debug,
+ J::Item: fmt::Debug,
+{
+ debug_fmt_fields!(MergeJoinBy, left, right);
+}
+
+impl<I, J, F, T> Iterator for MergeJoinBy<I, J, F>
+ where I: Iterator,
+ J: Iterator,
+ F: FnMut(&I::Item, &J::Item) -> T,
+ T: OrderingOrBool<I::Item, J::Item>,
+{
+ type Item = T::MergeResult;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match (self.left.next(), self.right.next()) {
+ (None, None) => None,
+ (Some(left), None) => Some(T::left(left)),
+ (None, Some(right)) => Some(T::right(right)),
+ (Some(left), Some(right)) => {
+ let (left, right, next) = (self.cmp_fn)(&left,
&right).merge(left, right);
+ if let Some(left) = left {
+ self.left.put_back(left);
+ }
+ if let Some(right) = right {
+ self.right.put_back(right);
+ }
+ Some(next)
+ }
+ }
+ }
+
+ fn size_hint(&self) -> SizeHint {
+ T::size_hint(self.left.size_hint(), self.right.size_hint())
+ }
+
+ fn count(mut self) -> usize {
+ let mut count = 0;
+ loop {
+ match (self.left.next(), self.right.next()) {
+ (None, None) => break count,
+ (Some(_left), None) => break count + 1 +
self.left.into_parts().1.count(),
+ (None, Some(_right)) => break count + 1 +
self.right.into_parts().1.count(),
+ (Some(left), Some(right)) => {
+ count += 1;
+ let (left, right, _) = (self.cmp_fn)(&left,
&right).merge(left, right);
+ if let Some(left) = left {
+ self.left.put_back(left);
+ }
+ if let Some(right) = right {
+ self.right.put_back(right);
+ }
+ }
+ }
+ }
+ }
+
+ fn last(mut self) -> Option<Self::Item> {
+ let mut previous_element = None;
+ loop {
+ match (self.left.next(), self.right.next()) {
+ (None, None) => break previous_element,
+ (Some(left), None) => {
+ break Some(T::left(
+ self.left.into_parts().1.last().unwrap_or(left),
+ ))
+ }
+ (None, Some(right)) => {
+ break Some(T::right(
+ self.right.into_parts().1.last().unwrap_or(right),
+ ))
+ }
+ (Some(left), Some(right)) => {
+ let (left, right, elem) = (self.cmp_fn)(&left,
&right).merge(left, right);
+ if let Some(left) = left {
+ self.left.put_back(left);
+ }
+ if let Some(right) = right {
+ self.right.put_back(right);
+ }
+ previous_element = Some(elem);
+ }
+ }
+ }
+ }
+
+ fn nth(&mut self, mut n: usize) -> Option<Self::Item> {
+ loop {
+ if n == 0 {
+ break self.next();
+ }
+ n -= 1;
+ match (self.left.next(), self.right.next()) {
+ (None, None) => break None,
+ (Some(_left), None) => break self.left.nth(n).map(T::left),
+ (None, Some(_right)) => break self.right.nth(n).map(T::right),
+ (Some(left), Some(right)) => {
+ let (left, right, _) = (self.cmp_fn)(&left,
&right).merge(left, right);
+ if let Some(left) = left {
+ self.left.put_back(left);
+ }
+ if let Some(right) = right {
+ self.right.put_back(right);
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/minmax.rs
b/rust/hw/char/pl011/vendor/itertools/src/minmax.rs
new file mode 100644
index 0000000000..52b2f115dd
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/minmax.rs
@@ -0,0 +1,115 @@
+
+/// `MinMaxResult` is an enum returned by `minmax`.
+///
+/// See [`.minmax()`](crate::Itertools::minmax) for more detail.
+#[derive(Copy, Clone, PartialEq, Debug)]
+pub enum MinMaxResult<T> {
+ /// Empty iterator
+ NoElements,
+
+ /// Iterator with one element, so the minimum and maximum are the same
+ OneElement(T),
+
+ /// More than one element in the iterator, the first element is not larger
+ /// than the second
+ MinMax(T, T)
+}
+
+impl<T: Clone> MinMaxResult<T> {
+ /// `into_option` creates an `Option` of type `(T, T)`. The returned
`Option`
+ /// has variant `None` if and only if the `MinMaxResult` has variant
+ /// `NoElements`. Otherwise `Some((x, y))` is returned where `x <= y`.
+ /// If the `MinMaxResult` has variant `OneElement(x)`, performing this
+ /// operation will make one clone of `x`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use itertools::MinMaxResult::{self, NoElements, OneElement, MinMax};
+ ///
+ /// let r: MinMaxResult<i32> = NoElements;
+ /// assert_eq!(r.into_option(), None);
+ ///
+ /// let r = OneElement(1);
+ /// assert_eq!(r.into_option(), Some((1, 1)));
+ ///
+ /// let r = MinMax(1, 2);
+ /// assert_eq!(r.into_option(), Some((1, 2)));
+ /// ```
+ pub fn into_option(self) -> Option<(T,T)> {
+ match self {
+ MinMaxResult::NoElements => None,
+ MinMaxResult::OneElement(x) => Some((x.clone(), x)),
+ MinMaxResult::MinMax(x, y) => Some((x, y))
+ }
+ }
+}
+
+/// Implementation guts for `minmax` and `minmax_by_key`.
+pub fn minmax_impl<I, K, F, L>(mut it: I, mut key_for: F,
+ mut lt: L) -> MinMaxResult<I::Item>
+ where I: Iterator,
+ F: FnMut(&I::Item) -> K,
+ L: FnMut(&I::Item, &I::Item, &K, &K) -> bool,
+{
+ let (mut min, mut max, mut min_key, mut max_key) = match it.next() {
+ None => return MinMaxResult::NoElements,
+ Some(x) => {
+ match it.next() {
+ None => return MinMaxResult::OneElement(x),
+ Some(y) => {
+ let xk = key_for(&x);
+ let yk = key_for(&y);
+ if !lt(&y, &x, &yk, &xk) {(x, y, xk, yk)} else {(y, x, yk,
xk)}
+ }
+ }
+ }
+ };
+
+ loop {
+ // `first` and `second` are the two next elements we want to look
+ // at. We first compare `first` and `second` (#1). The smaller one
+ // is then compared to current minimum (#2). The larger one is
+ // compared to current maximum (#3). This way we do 3 comparisons
+ // for 2 elements.
+ let first = match it.next() {
+ None => break,
+ Some(x) => x
+ };
+ let second = match it.next() {
+ None => {
+ let first_key = key_for(&first);
+ if lt(&first, &min, &first_key, &min_key) {
+ min = first;
+ } else if !lt(&first, &max, &first_key, &max_key) {
+ max = first;
+ }
+ break;
+ }
+ Some(x) => x
+ };
+ let first_key = key_for(&first);
+ let second_key = key_for(&second);
+ if !lt(&second, &first, &second_key, &first_key) {
+ if lt(&first, &min, &first_key, &min_key) {
+ min = first;
+ min_key = first_key;
+ }
+ if !lt(&second, &max, &second_key, &max_key) {
+ max = second;
+ max_key = second_key;
+ }
+ } else {
+ if lt(&second, &min, &second_key, &min_key) {
+ min = second;
+ min_key = second_key;
+ }
+ if !lt(&first, &max, &first_key, &max_key) {
+ max = first;
+ max_key = first_key;
+ }
+ }
+ }
+
+ MinMaxResult::MinMax(min, max)
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/multipeek_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/multipeek_impl.rs
new file mode 100644
index 0000000000..8b49c695eb
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/multipeek_impl.rs
@@ -0,0 +1,101 @@
+use std::iter::Fuse;
+use alloc::collections::VecDeque;
+use crate::size_hint;
+use crate::PeekingNext;
+#[cfg(doc)]
+use crate::Itertools;
+
+/// See [`multipeek()`] for more information.
+#[derive(Clone, Debug)]
+pub struct MultiPeek<I>
+ where I: Iterator
+{
+ iter: Fuse<I>,
+ buf: VecDeque<I::Item>,
+ index: usize,
+}
+
+/// An iterator adaptor that allows the user to peek at multiple `.next()`
+/// values without advancing the base iterator.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::multipeek`].
+pub fn multipeek<I>(iterable: I) -> MultiPeek<I::IntoIter>
+ where I: IntoIterator
+{
+ MultiPeek {
+ iter: iterable.into_iter().fuse(),
+ buf: VecDeque::new(),
+ index: 0,
+ }
+}
+
+impl<I> MultiPeek<I>
+ where I: Iterator
+{
+ /// Reset the peeking “cursor”
+ pub fn reset_peek(&mut self) {
+ self.index = 0;
+ }
+}
+
+impl<I: Iterator> MultiPeek<I> {
+ /// Works exactly like `.next()` with the only difference that it doesn't
+ /// advance itself. `.peek()` can be called multiple times, to peek
+ /// further ahead.
+ /// When `.next()` is called, reset the peeking “cursor”.
+ pub fn peek(&mut self) -> Option<&I::Item> {
+ let ret = if self.index < self.buf.len() {
+ Some(&self.buf[self.index])
+ } else {
+ match self.iter.next() {
+ Some(x) => {
+ self.buf.push_back(x);
+ Some(&self.buf[self.index])
+ }
+ None => return None,
+ }
+ };
+
+ self.index += 1;
+ ret
+ }
+}
+
+impl<I> PeekingNext for MultiPeek<I>
+ where I: Iterator,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ if self.buf.is_empty() {
+ if let Some(r) = self.peek() {
+ if !accept(r) { return None }
+ }
+ } else if let Some(r) = self.buf.get(0) {
+ if !accept(r) { return None }
+ }
+ self.next()
+ }
+}
+
+impl<I> Iterator for MultiPeek<I>
+ where I: Iterator
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.index = 0;
+ self.buf.pop_front().or_else(|| self.iter.next())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add_scalar(self.iter.size_hint(), self.buf.len())
+ }
+}
+
+// Same size
+impl<I> ExactSizeIterator for MultiPeek<I>
+ where I: ExactSizeIterator
+{}
+
+
diff --git a/rust/hw/char/pl011/vendor/itertools/src/pad_tail.rs
b/rust/hw/char/pl011/vendor/itertools/src/pad_tail.rs
new file mode 100644
index 0000000000..248a432436
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/pad_tail.rs
@@ -0,0 +1,96 @@
+use std::iter::{Fuse, FusedIterator};
+use crate::size_hint;
+
+/// An iterator adaptor that pads a sequence to a minimum length by filling
+/// missing elements using a closure.
+///
+/// Iterator element type is `I::Item`.
+///
+/// See [`.pad_using()`](crate::Itertools::pad_using) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct PadUsing<I, F> {
+ iter: Fuse<I>,
+ min: usize,
+ pos: usize,
+ filler: F,
+}
+
+impl<I, F> std::fmt::Debug for PadUsing<I, F>
+where
+ I: std::fmt::Debug,
+{
+ debug_fmt_fields!(PadUsing, iter, min, pos);
+}
+
+/// Create a new `PadUsing` iterator.
+pub fn pad_using<I, F>(iter: I, min: usize, filler: F) -> PadUsing<I, F>
+ where I: Iterator,
+ F: FnMut(usize) -> I::Item
+{
+ PadUsing {
+ iter: iter.fuse(),
+ min,
+ pos: 0,
+ filler,
+ }
+}
+
+impl<I, F> Iterator for PadUsing<I, F>
+ where I: Iterator,
+ F: FnMut(usize) -> I::Item
+{
+ type Item = I::Item;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.iter.next() {
+ None => {
+ if self.pos < self.min {
+ let e = Some((self.filler)(self.pos));
+ self.pos += 1;
+ e
+ } else {
+ None
+ }
+ },
+ e => {
+ self.pos += 1;
+ e
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let tail = self.min.saturating_sub(self.pos);
+ size_hint::max(self.iter.size_hint(), (tail, Some(tail)))
+ }
+}
+
+impl<I, F> DoubleEndedIterator for PadUsing<I, F>
+ where I: DoubleEndedIterator + ExactSizeIterator,
+ F: FnMut(usize) -> I::Item
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.min == 0 {
+ self.iter.next_back()
+ } else if self.iter.len() >= self.min {
+ self.min -= 1;
+ self.iter.next_back()
+ } else {
+ self.min -= 1;
+ Some((self.filler)(self.min))
+ }
+ }
+}
+
+impl<I, F> ExactSizeIterator for PadUsing<I, F>
+ where I: ExactSizeIterator,
+ F: FnMut(usize) -> I::Item
+{}
+
+
+impl<I, F> FusedIterator for PadUsing<I, F>
+ where I: FusedIterator,
+ F: FnMut(usize) -> I::Item
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/peek_nth.rs
b/rust/hw/char/pl011/vendor/itertools/src/peek_nth.rs
new file mode 100644
index 0000000000..bcca45838e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/peek_nth.rs
@@ -0,0 +1,102 @@
+use crate::size_hint;
+use crate::PeekingNext;
+use alloc::collections::VecDeque;
+use std::iter::Fuse;
+
+/// See [`peek_nth()`] for more information.
+#[derive(Clone, Debug)]
+pub struct PeekNth<I>
+where
+ I: Iterator,
+{
+ iter: Fuse<I>,
+ buf: VecDeque<I::Item>,
+}
+
+/// A drop-in replacement for [`std::iter::Peekable`] which adds a `peek_nth`
+/// method allowing the user to `peek` at a value several iterations forward
+/// without advancing the base iterator.
+///
+/// This differs from `multipeek` in that subsequent calls to `peek` or
+/// `peek_nth` will always return the same value until `next` is called
+/// (making `reset_peek` unnecessary).
+pub fn peek_nth<I>(iterable: I) -> PeekNth<I::IntoIter>
+where
+ I: IntoIterator,
+{
+ PeekNth {
+ iter: iterable.into_iter().fuse(),
+ buf: VecDeque::new(),
+ }
+}
+
+impl<I> PeekNth<I>
+where
+ I: Iterator,
+{
+ /// Works exactly like the `peek` method in `std::iter::Peekable`
+ pub fn peek(&mut self) -> Option<&I::Item> {
+ self.peek_nth(0)
+ }
+
+ /// Returns a reference to the `nth` value without advancing the iterator.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```rust
+ /// use itertools::peek_nth;
+ ///
+ /// let xs = vec![1,2,3];
+ /// let mut iter = peek_nth(xs.iter());
+ ///
+ /// assert_eq!(iter.peek_nth(0), Some(&&1));
+ /// assert_eq!(iter.next(), Some(&1));
+ ///
+ /// // The iterator does not advance even if we call `peek_nth` multiple
times
+ /// assert_eq!(iter.peek_nth(0), Some(&&2));
+ /// assert_eq!(iter.peek_nth(1), Some(&&3));
+ /// assert_eq!(iter.next(), Some(&2));
+ ///
+ /// // Calling `peek_nth` past the end of the iterator will return `None`
+ /// assert_eq!(iter.peek_nth(1), None);
+ /// ```
+ pub fn peek_nth(&mut self, n: usize) -> Option<&I::Item> {
+ let unbuffered_items = (n + 1).saturating_sub(self.buf.len());
+
+ self.buf.extend(self.iter.by_ref().take(unbuffered_items));
+
+ self.buf.get(n)
+ }
+}
+
+impl<I> Iterator for PeekNth<I>
+where
+ I: Iterator,
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.buf.pop_front().or_else(|| self.iter.next())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add_scalar(self.iter.size_hint(), self.buf.len())
+ }
+}
+
+impl<I> ExactSizeIterator for PeekNth<I> where I: ExactSizeIterator {}
+
+impl<I> PeekingNext for PeekNth<I>
+where
+ I: Iterator,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where
+ F: FnOnce(&Self::Item) -> bool,
+ {
+ self.peek().filter(|item| accept(item))?;
+ self.next()
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/peeking_take_while.rs
b/rust/hw/char/pl011/vendor/itertools/src/peeking_take_while.rs
new file mode 100644
index 0000000000..3a37228122
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/peeking_take_while.rs
@@ -0,0 +1,177 @@
+use std::iter::Peekable;
+use crate::PutBack;
+#[cfg(feature = "use_alloc")]
+use crate::PutBackN;
+
+/// An iterator that allows peeking at an element before deciding to accept it.
+///
+/// See [`.peeking_take_while()`](crate::Itertools::peeking_take_while)
+/// for more information.
+///
+/// This is implemented by peeking adaptors like peekable and put back,
+/// but also by a few iterators that can be peeked natively, like the slice’s
+/// by reference iterator (`std::slice::Iter`).
+pub trait PeekingNext : Iterator {
+ /// Pass a reference to the next iterator element to the closure `accept`;
+ /// if `accept` returns true, return it as the next element,
+ /// else None.
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where Self: Sized,
+ F: FnOnce(&Self::Item) -> bool;
+}
+
+impl<'a, I> PeekingNext for &'a mut I
+ where I: PeekingNext,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ (*self).peeking_next(accept)
+ }
+}
+
+impl<I> PeekingNext for Peekable<I>
+ where I: Iterator,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ if let Some(r) = self.peek() {
+ if !accept(r) {
+ return None;
+ }
+ }
+ self.next()
+ }
+}
+
+impl<I> PeekingNext for PutBack<I>
+ where I: Iterator,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ if let Some(r) = self.next() {
+ if !accept(&r) {
+ self.put_back(r);
+ return None;
+ }
+ Some(r)
+ } else {
+ None
+ }
+ }
+}
+
+#[cfg(feature = "use_alloc")]
+impl<I> PeekingNext for PutBackN<I>
+ where I: Iterator,
+{
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ if let Some(r) = self.next() {
+ if !accept(&r) {
+ self.put_back(r);
+ return None;
+ }
+ Some(r)
+ } else {
+ None
+ }
+ }
+}
+
+/// An iterator adaptor that takes items while a closure returns `true`.
+///
+/// See [`.peeking_take_while()`](crate::Itertools::peeking_take_while)
+/// for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct PeekingTakeWhile<'a, I: 'a, F>
+ where I: Iterator,
+{
+ iter: &'a mut I,
+ f: F,
+}
+
+impl<'a, I: 'a, F> std::fmt::Debug for PeekingTakeWhile<'a, I, F>
+where
+ I: Iterator + std::fmt::Debug,
+{
+ debug_fmt_fields!(PeekingTakeWhile, iter);
+}
+
+/// Create a `PeekingTakeWhile`
+pub fn peeking_take_while<I, F>(iter: &mut I, f: F) -> PeekingTakeWhile<I, F>
+ where I: Iterator,
+{
+ PeekingTakeWhile {
+ iter,
+ f,
+ }
+}
+
+impl<'a, I, F> Iterator for PeekingTakeWhile<'a, I, F>
+ where I: PeekingNext,
+ F: FnMut(&I::Item) -> bool,
+
+{
+ type Item = I::Item;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.peeking_next(&mut self.f)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+}
+
+impl<'a, I, F> PeekingNext for PeekingTakeWhile<'a, I, F>
+ where I: PeekingNext,
+ F: FnMut(&I::Item) -> bool,
+{
+ fn peeking_next<G>(&mut self, g: G) -> Option<Self::Item>
+ where G: FnOnce(&Self::Item) -> bool,
+ {
+ let f = &mut self.f;
+ self.iter.peeking_next(|r| f(r) && g(r))
+ }
+}
+
+// Some iterators are so lightweight we can simply clone them to save their
+// state and use that for peeking.
+macro_rules! peeking_next_by_clone {
+ ([$($typarm:tt)*] $type_:ty) => {
+ impl<$($typarm)*> PeekingNext for $type_ {
+ fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
+ where F: FnOnce(&Self::Item) -> bool
+ {
+ let saved_state = self.clone();
+ if let Some(r) = self.next() {
+ if !accept(&r) {
+ *self = saved_state;
+ } else {
+ return Some(r)
+ }
+ }
+ None
+ }
+ }
+ }
+}
+
+peeking_next_by_clone! { ['a, T] ::std::slice::Iter<'a, T> }
+peeking_next_by_clone! { ['a] ::std::str::Chars<'a> }
+peeking_next_by_clone! { ['a] ::std::str::CharIndices<'a> }
+peeking_next_by_clone! { ['a] ::std::str::Bytes<'a> }
+peeking_next_by_clone! { ['a, T] ::std::option::Iter<'a, T> }
+peeking_next_by_clone! { ['a, T] ::std::result::Iter<'a, T> }
+peeking_next_by_clone! { [T] ::std::iter::Empty<T> }
+#[cfg(feature = "use_alloc")]
+peeking_next_by_clone! { ['a, T] alloc::collections::linked_list::Iter<'a, T> }
+#[cfg(feature = "use_alloc")]
+peeking_next_by_clone! { ['a, T] alloc::collections::vec_deque::Iter<'a, T> }
+
+// cloning a Rev has no extra overhead; peekable and put backs are never DEI.
+peeking_next_by_clone! { [I: Clone + PeekingNext + DoubleEndedIterator]
+ ::std::iter::Rev<I> }
diff --git a/rust/hw/char/pl011/vendor/itertools/src/permutations.rs
b/rust/hw/char/pl011/vendor/itertools/src/permutations.rs
new file mode 100644
index 0000000000..d03b852626
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/permutations.rs
@@ -0,0 +1,277 @@
+use alloc::vec::Vec;
+use std::fmt;
+use std::iter::once;
+
+use super::lazy_buffer::LazyBuffer;
+
+/// An iterator adaptor that iterates through all the `k`-permutations of the
+/// elements from an iterator.
+///
+/// See [`.permutations()`](crate::Itertools::permutations) for
+/// more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Permutations<I: Iterator> {
+ vals: LazyBuffer<I>,
+ state: PermutationState,
+}
+
+impl<I> Clone for Permutations<I>
+ where I: Clone + Iterator,
+ I::Item: Clone,
+{
+ clone_fields!(vals, state);
+}
+
+#[derive(Clone, Debug)]
+enum PermutationState {
+ StartUnknownLen {
+ k: usize,
+ },
+ OngoingUnknownLen {
+ k: usize,
+ min_n: usize,
+ },
+ Complete(CompleteState),
+ Empty,
+}
+
+#[derive(Clone, Debug)]
+enum CompleteState {
+ Start {
+ n: usize,
+ k: usize,
+ },
+ Ongoing {
+ indices: Vec<usize>,
+ cycles: Vec<usize>,
+ }
+}
+
+enum CompleteStateRemaining {
+ Known(usize),
+ Overflow,
+}
+
+impl<I> fmt::Debug for Permutations<I>
+ where I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug,
+{
+ debug_fmt_fields!(Permutations, vals, state);
+}
+
+pub fn permutations<I: Iterator>(iter: I, k: usize) -> Permutations<I> {
+ let mut vals = LazyBuffer::new(iter);
+
+ if k == 0 {
+ // Special case, yields single empty vec; `n` is irrelevant
+ let state = PermutationState::Complete(CompleteState::Start { n: 0, k:
0 });
+
+ return Permutations {
+ vals,
+ state
+ };
+ }
+
+ let mut enough_vals = true;
+
+ while vals.len() < k {
+ if !vals.get_next() {
+ enough_vals = false;
+ break;
+ }
+ }
+
+ let state = if enough_vals {
+ PermutationState::StartUnknownLen { k }
+ } else {
+ PermutationState::Empty
+ };
+
+ Permutations {
+ vals,
+ state
+ }
+}
+
+impl<I> Iterator for Permutations<I>
+where
+ I: Iterator,
+ I::Item: Clone
+{
+ type Item = Vec<I::Item>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.advance();
+
+ let &mut Permutations { ref vals, ref state } = self;
+
+ match *state {
+ PermutationState::StartUnknownLen { .. } => panic!("unexpected
iterator state"),
+ PermutationState::OngoingUnknownLen { k, min_n } => {
+ let latest_idx = min_n - 1;
+ let indices = (0..(k - 1)).chain(once(latest_idx));
+
+ Some(indices.map(|i| vals[i].clone()).collect())
+ }
+ PermutationState::Complete(CompleteState::Ongoing { ref indices,
ref cycles }) => {
+ let k = cycles.len();
+ Some(indices[0..k].iter().map(|&i| vals[i].clone()).collect())
+ },
+ PermutationState::Complete(CompleteState::Start { .. }) |
PermutationState::Empty => None
+ }
+ }
+
+ fn count(self) -> usize {
+ fn from_complete(complete_state: CompleteState) -> usize {
+ match complete_state.remaining() {
+ CompleteStateRemaining::Known(count) => count,
+ CompleteStateRemaining::Overflow => {
+ panic!("Iterator count greater than usize::MAX");
+ }
+ }
+ }
+
+ let Permutations { vals, state } = self;
+ match state {
+ PermutationState::StartUnknownLen { k } => {
+ let n = vals.len() + vals.it.count();
+ let complete_state = CompleteState::Start { n, k };
+
+ from_complete(complete_state)
+ }
+ PermutationState::OngoingUnknownLen { k, min_n } => {
+ let prev_iteration_count = min_n - k + 1;
+ let n = vals.len() + vals.it.count();
+ let complete_state = CompleteState::Start { n, k };
+
+ from_complete(complete_state) - prev_iteration_count
+ },
+ PermutationState::Complete(state) => from_complete(state),
+ PermutationState::Empty => 0
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match self.state {
+ PermutationState::StartUnknownLen { .. } |
+ PermutationState::OngoingUnknownLen { .. } => (0, None), // TODO
can we improve this lower bound?
+ PermutationState::Complete(ref state) => match state.remaining() {
+ CompleteStateRemaining::Known(count) => (count, Some(count)),
+ CompleteStateRemaining::Overflow => (::std::usize::MAX, None)
+ }
+ PermutationState::Empty => (0, Some(0))
+ }
+ }
+}
+
+impl<I> Permutations<I>
+where
+ I: Iterator,
+ I::Item: Clone
+{
+ fn advance(&mut self) {
+ let &mut Permutations { ref mut vals, ref mut state } = self;
+
+ *state = match *state {
+ PermutationState::StartUnknownLen { k } => {
+ PermutationState::OngoingUnknownLen { k, min_n: k }
+ }
+ PermutationState::OngoingUnknownLen { k, min_n } => {
+ if vals.get_next() {
+ PermutationState::OngoingUnknownLen { k, min_n: min_n + 1 }
+ } else {
+ let n = min_n;
+ let prev_iteration_count = n - k + 1;
+ let mut complete_state = CompleteState::Start { n, k };
+
+ // Advance the complete-state iterator to the correct point
+ for _ in 0..(prev_iteration_count + 1) {
+ complete_state.advance();
+ }
+
+ PermutationState::Complete(complete_state)
+ }
+ }
+ PermutationState::Complete(ref mut state) => {
+ state.advance();
+
+ return;
+ }
+ PermutationState::Empty => { return; }
+ };
+ }
+}
+
+impl CompleteState {
+ fn advance(&mut self) {
+ *self = match *self {
+ CompleteState::Start { n, k } => {
+ let indices = (0..n).collect();
+ let cycles = ((n - k)..n).rev().collect();
+
+ CompleteState::Ongoing {
+ cycles,
+ indices
+ }
+ },
+ CompleteState::Ongoing { ref mut indices, ref mut cycles } => {
+ let n = indices.len();
+ let k = cycles.len();
+
+ for i in (0..k).rev() {
+ if cycles[i] == 0 {
+ cycles[i] = n - i - 1;
+
+ let to_push = indices.remove(i);
+ indices.push(to_push);
+ } else {
+ let swap_index = n - cycles[i];
+ indices.swap(i, swap_index);
+
+ cycles[i] -= 1;
+ return;
+ }
+ }
+
+ CompleteState::Start { n, k }
+ }
+ }
+ }
+
+ fn remaining(&self) -> CompleteStateRemaining {
+ use self::CompleteStateRemaining::{Known, Overflow};
+
+ match *self {
+ CompleteState::Start { n, k } => {
+ if n < k {
+ return Known(0);
+ }
+
+ let count: Option<usize> = (n - k + 1..n + 1).fold(Some(1),
|acc, i| {
+ acc.and_then(|acc| acc.checked_mul(i))
+ });
+
+ match count {
+ Some(count) => Known(count),
+ None => Overflow
+ }
+ }
+ CompleteState::Ongoing { ref indices, ref cycles } => {
+ let mut count: usize = 0;
+
+ for (i, &c) in cycles.iter().enumerate() {
+ let radix = indices.len() - i;
+ let next_count = count.checked_mul(radix)
+ .and_then(|count| count.checked_add(c));
+
+ count = match next_count {
+ Some(count) => count,
+ None => { return Overflow; }
+ };
+ }
+
+ Known(count)
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/powerset.rs
b/rust/hw/char/pl011/vendor/itertools/src/powerset.rs
new file mode 100644
index 0000000000..4d7685b12a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/powerset.rs
@@ -0,0 +1,90 @@
+use std::fmt;
+use std::iter::FusedIterator;
+use std::usize;
+use alloc::vec::Vec;
+
+use super::combinations::{Combinations, combinations};
+use super::size_hint;
+
+/// An iterator to iterate through the powerset of the elements from an
iterator.
+///
+/// See [`.powerset()`](crate::Itertools::powerset) for more
+/// information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Powerset<I: Iterator> {
+ combs: Combinations<I>,
+ // Iterator `position` (equal to count of yielded elements).
+ pos: usize,
+}
+
+impl<I> Clone for Powerset<I>
+ where I: Clone + Iterator,
+ I::Item: Clone,
+{
+ clone_fields!(combs, pos);
+}
+
+impl<I> fmt::Debug for Powerset<I>
+ where I: Iterator + fmt::Debug,
+ I::Item: fmt::Debug,
+{
+ debug_fmt_fields!(Powerset, combs, pos);
+}
+
+/// Create a new `Powerset` from a clonable iterator.
+pub fn powerset<I>(src: I) -> Powerset<I>
+ where I: Iterator,
+ I::Item: Clone,
+{
+ Powerset {
+ combs: combinations(src, 0),
+ pos: 0,
+ }
+}
+
+impl<I> Iterator for Powerset<I>
+ where
+ I: Iterator,
+ I::Item: Clone,
+{
+ type Item = Vec<I::Item>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(elt) = self.combs.next() {
+ self.pos = self.pos.saturating_add(1);
+ Some(elt)
+ } else if self.combs.k() < self.combs.n()
+ || self.combs.k() == 0
+ {
+ self.combs.reset(self.combs.k() + 1);
+ self.combs.next().map(|elt| {
+ self.pos = self.pos.saturating_add(1);
+ elt
+ })
+ } else {
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // Total bounds for source iterator.
+ let src_total = size_hint::add_scalar(self.combs.src().size_hint(),
self.combs.n());
+
+ // Total bounds for self ( length(powerset(set) == 2 ^ length(set) )
+ let self_total = size_hint::pow_scalar_base(2, src_total);
+
+ if self.pos < usize::MAX {
+ // Subtract count of elements already yielded from total.
+ size_hint::sub_scalar(self_total, self.pos)
+ } else {
+ // Fallback: self.pos is saturated and no longer reliable.
+ (0, self_total.1)
+ }
+ }
+}
+
+impl<I> FusedIterator for Powerset<I>
+ where
+ I: Iterator,
+ I::Item: Clone,
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/process_results_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/process_results_impl.rs
new file mode 100644
index 0000000000..713db45514
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/process_results_impl.rs
@@ -0,0 +1,68 @@
+#[cfg(doc)]
+use crate::Itertools;
+
+/// An iterator that produces only the `T` values as long as the
+/// inner iterator produces `Ok(T)`.
+///
+/// Used by [`process_results`](crate::process_results), see its docs
+/// for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Debug)]
+pub struct ProcessResults<'a, I, E: 'a> {
+ error: &'a mut Result<(), E>,
+ iter: I,
+}
+
+impl<'a, I, T, E> Iterator for ProcessResults<'a, I, E>
+ where I: Iterator<Item = Result<T, E>>
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.iter.next() {
+ Some(Ok(x)) => Some(x),
+ Some(Err(e)) => {
+ *self.error = Err(e);
+ None
+ }
+ None => None,
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.iter.size_hint().1)
+ }
+
+ fn fold<B, F>(mut self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ let error = self.error;
+ self.iter
+ .try_fold(init, |acc, opt| match opt {
+ Ok(x) => Ok(f(acc, x)),
+ Err(e) => {
+ *error = Err(e);
+ Err(acc)
+ }
+ })
+ .unwrap_or_else(|e| e)
+ }
+}
+
+/// “Lift” a function of the values of an iterator so that it can process
+/// an iterator of `Result` values instead.
+///
+/// [`IntoIterator`] enabled version of [`Itertools::process_results`].
+pub fn process_results<I, F, T, E, R>(iterable: I, processor: F) -> Result<R,
E>
+ where I: IntoIterator<Item = Result<T, E>>,
+ F: FnOnce(ProcessResults<I::IntoIter, E>) -> R
+{
+ let iter = iterable.into_iter();
+ let mut error = Ok(());
+
+ let result = processor(ProcessResults { error: &mut error, iter });
+
+ error.map(|_| result)
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/put_back_n_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/put_back_n_impl.rs
new file mode 100644
index 0000000000..60ea8e6495
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/put_back_n_impl.rs
@@ -0,0 +1,61 @@
+use alloc::vec::Vec;
+
+use crate::size_hint;
+
+/// An iterator adaptor that allows putting multiple
+/// items in front of the iterator.
+///
+/// Iterator element type is `I::Item`.
+#[derive(Debug, Clone)]
+pub struct PutBackN<I: Iterator> {
+ top: Vec<I::Item>,
+ iter: I,
+}
+
+/// Create an iterator where you can put back multiple values to the front
+/// of the iteration.
+///
+/// Iterator element type is `I::Item`.
+pub fn put_back_n<I>(iterable: I) -> PutBackN<I::IntoIter>
+ where I: IntoIterator
+{
+ PutBackN {
+ top: Vec::new(),
+ iter: iterable.into_iter(),
+ }
+}
+
+impl<I: Iterator> PutBackN<I> {
+ /// Puts x in front of the iterator.
+ /// The values are yielded in order of the most recently put back
+ /// values first.
+ ///
+ /// ```rust
+ /// use itertools::put_back_n;
+ ///
+ /// let mut it = put_back_n(1..5);
+ /// it.next();
+ /// it.put_back(1);
+ /// it.put_back(0);
+ ///
+ /// assert!(itertools::equal(it, 0..5));
+ /// ```
+ #[inline]
+ pub fn put_back(&mut self, x: I::Item) {
+ self.top.push(x);
+ }
+}
+
+impl<I: Iterator> Iterator for PutBackN<I> {
+ type Item = I::Item;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.top.pop().or_else(|| self.iter.next())
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::add_scalar(self.iter.size_hint(), self.top.len())
+ }
+}
+
diff --git a/rust/hw/char/pl011/vendor/itertools/src/rciter_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/rciter_impl.rs
new file mode 100644
index 0000000000..7298350a88
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/rciter_impl.rs
@@ -0,0 +1,99 @@
+
+use std::iter::{FusedIterator, IntoIterator};
+use alloc::rc::Rc;
+use std::cell::RefCell;
+
+/// A wrapper for `Rc<RefCell<I>>`, that implements the `Iterator` trait.
+#[derive(Debug)]
+pub struct RcIter<I> {
+ /// The boxed iterator.
+ pub rciter: Rc<RefCell<I>>,
+}
+
+/// Return an iterator inside a `Rc<RefCell<_>>` wrapper.
+///
+/// The returned `RcIter` can be cloned, and each clone will refer back to the
+/// same original iterator.
+///
+/// `RcIter` allows doing interesting things like using `.zip()` on an
iterator with
+/// itself, at the cost of runtime borrow checking which may have a performance
+/// penalty.
+///
+/// Iterator element type is `Self::Item`.
+///
+/// ```
+/// use itertools::rciter;
+/// use itertools::zip;
+///
+/// // In this example a range iterator is created and we iterate it using
+/// // three separate handles (two of them given to zip).
+/// // We also use the IntoIterator implementation for `&RcIter`.
+///
+/// let mut iter = rciter(0..9);
+/// let mut z = zip(&iter, &iter);
+///
+/// assert_eq!(z.next(), Some((0, 1)));
+/// assert_eq!(z.next(), Some((2, 3)));
+/// assert_eq!(z.next(), Some((4, 5)));
+/// assert_eq!(iter.next(), Some(6));
+/// assert_eq!(z.next(), Some((7, 8)));
+/// assert_eq!(z.next(), None);
+/// ```
+///
+/// **Panics** in iterator methods if a borrow error is encountered in the
+/// iterator methods. It can only happen if the `RcIter` is reentered in
+/// `.next()`, i.e. if it somehow participates in an “iterator knot”
+/// where it is an adaptor of itself.
+pub fn rciter<I>(iterable: I) -> RcIter<I::IntoIter>
+ where I: IntoIterator
+{
+ RcIter { rciter: Rc::new(RefCell::new(iterable.into_iter())) }
+}
+
+impl<I> Clone for RcIter<I> {
+ clone_fields!(rciter);
+}
+
+impl<A, I> Iterator for RcIter<I>
+ where I: Iterator<Item = A>
+{
+ type Item = A;
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.rciter.borrow_mut().next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // To work sanely with other API that assume they own an iterator,
+ // so it can't change in other places, we can't guarantee as much
+ // in our size_hint. Other clones may drain values under our feet.
+ (0, self.rciter.borrow().size_hint().1)
+ }
+}
+
+impl<I> DoubleEndedIterator for RcIter<I>
+ where I: DoubleEndedIterator
+{
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.rciter.borrow_mut().next_back()
+ }
+}
+
+/// Return an iterator from `&RcIter<I>` (by simply cloning it).
+impl<'a, I> IntoIterator for &'a RcIter<I>
+ where I: Iterator
+{
+ type Item = I::Item;
+ type IntoIter = RcIter<I>;
+
+ fn into_iter(self) -> RcIter<I> {
+ self.clone()
+ }
+}
+
+
+impl<A, I> FusedIterator for RcIter<I>
+ where I: FusedIterator<Item = A>
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/repeatn.rs
b/rust/hw/char/pl011/vendor/itertools/src/repeatn.rs
new file mode 100644
index 0000000000..e025f6f6a5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/repeatn.rs
@@ -0,0 +1,59 @@
+use std::iter::FusedIterator;
+
+/// An iterator that produces *n* repetitions of an element.
+///
+/// See [`repeat_n()`](crate::repeat_n) for more information.
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[derive(Clone, Debug)]
+pub struct RepeatN<A> {
+ elt: Option<A>,
+ n: usize,
+}
+
+/// Create an iterator that produces `n` repetitions of `element`.
+pub fn repeat_n<A>(element: A, n: usize) -> RepeatN<A>
+ where A: Clone,
+{
+ if n == 0 {
+ RepeatN { elt: None, n, }
+ } else {
+ RepeatN { elt: Some(element), n, }
+ }
+}
+
+impl<A> Iterator for RepeatN<A>
+ where A: Clone
+{
+ type Item = A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.n > 1 {
+ self.n -= 1;
+ self.elt.as_ref().cloned()
+ } else {
+ self.n = 0;
+ self.elt.take()
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.n, Some(self.n))
+ }
+}
+
+impl<A> DoubleEndedIterator for RepeatN<A>
+ where A: Clone
+{
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.next()
+ }
+}
+
+impl<A> ExactSizeIterator for RepeatN<A>
+ where A: Clone
+{}
+
+impl<A> FusedIterator for RepeatN<A>
+ where A: Clone
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/size_hint.rs
b/rust/hw/char/pl011/vendor/itertools/src/size_hint.rs
new file mode 100644
index 0000000000..71ea1412b5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/size_hint.rs
@@ -0,0 +1,119 @@
+//! Arithmetic on `Iterator.size_hint()` values.
+//!
+
+use std::usize;
+use std::cmp;
+use std::u32;
+
+/// `SizeHint` is the return type of `Iterator::size_hint()`.
+pub type SizeHint = (usize, Option<usize>);
+
+/// Add `SizeHint` correctly.
+#[inline]
+pub fn add(a: SizeHint, b: SizeHint) -> SizeHint {
+ let min = a.0.saturating_add(b.0);
+ let max = match (a.1, b.1) {
+ (Some(x), Some(y)) => x.checked_add(y),
+ _ => None,
+ };
+
+ (min, max)
+}
+
+/// Add `x` correctly to a `SizeHint`.
+#[inline]
+pub fn add_scalar(sh: SizeHint, x: usize) -> SizeHint {
+ let (mut low, mut hi) = sh;
+ low = low.saturating_add(x);
+ hi = hi.and_then(|elt| elt.checked_add(x));
+ (low, hi)
+}
+
+/// Subtract `x` correctly from a `SizeHint`.
+#[inline]
+#[allow(dead_code)]
+pub fn sub_scalar(sh: SizeHint, x: usize) -> SizeHint {
+ let (mut low, mut hi) = sh;
+ low = low.saturating_sub(x);
+ hi = hi.map(|elt| elt.saturating_sub(x));
+ (low, hi)
+}
+
+
+/// Multiply `SizeHint` correctly
+///
+/// ```ignore
+/// use std::usize;
+/// use itertools::size_hint;
+///
+/// assert_eq!(size_hint::mul((3, Some(4)), (3, Some(4))),
+/// (9, Some(16)));
+///
+/// assert_eq!(size_hint::mul((3, Some(4)), (usize::MAX, None)),
+/// (usize::MAX, None));
+///
+/// assert_eq!(size_hint::mul((3, None), (0, Some(0))),
+/// (0, Some(0)));
+/// ```
+#[inline]
+pub fn mul(a: SizeHint, b: SizeHint) -> SizeHint {
+ let low = a.0.saturating_mul(b.0);
+ let hi = match (a.1, b.1) {
+ (Some(x), Some(y)) => x.checked_mul(y),
+ (Some(0), None) | (None, Some(0)) => Some(0),
+ _ => None,
+ };
+ (low, hi)
+}
+
+/// Multiply `x` correctly with a `SizeHint`.
+#[inline]
+pub fn mul_scalar(sh: SizeHint, x: usize) -> SizeHint {
+ let (mut low, mut hi) = sh;
+ low = low.saturating_mul(x);
+ hi = hi.and_then(|elt| elt.checked_mul(x));
+ (low, hi)
+}
+
+/// Raise `base` correctly by a `SizeHint` exponent.
+#[inline]
+pub fn pow_scalar_base(base: usize, exp: SizeHint) -> SizeHint {
+ let exp_low = cmp::min(exp.0, u32::MAX as usize) as u32;
+ let low = base.saturating_pow(exp_low);
+
+ let hi = exp.1.and_then(|exp| {
+ let exp_hi = cmp::min(exp, u32::MAX as usize) as u32;
+ base.checked_pow(exp_hi)
+ });
+
+ (low, hi)
+}
+
+/// Return the maximum
+#[inline]
+pub fn max(a: SizeHint, b: SizeHint) -> SizeHint {
+ let (a_lower, a_upper) = a;
+ let (b_lower, b_upper) = b;
+
+ let lower = cmp::max(a_lower, b_lower);
+
+ let upper = match (a_upper, b_upper) {
+ (Some(x), Some(y)) => Some(cmp::max(x, y)),
+ _ => None,
+ };
+
+ (lower, upper)
+}
+
+/// Return the minimum
+#[inline]
+pub fn min(a: SizeHint, b: SizeHint) -> SizeHint {
+ let (a_lower, a_upper) = a;
+ let (b_lower, b_upper) = b;
+ let lower = cmp::min(a_lower, b_lower);
+ let upper = match (a_upper, b_upper) {
+ (Some(u1), Some(u2)) => Some(cmp::min(u1, u2)),
+ _ => a_upper.or(b_upper),
+ };
+ (lower, upper)
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/sources.rs
b/rust/hw/char/pl011/vendor/itertools/src/sources.rs
new file mode 100644
index 0000000000..3877ce3c8b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/sources.rs
@@ -0,0 +1,183 @@
+//! Iterators that are sources (produce elements from parameters,
+//! not from another iterator).
+#![allow(deprecated)]
+
+use std::fmt;
+use std::mem;
+
+/// See [`repeat_call`](crate::repeat_call) for more information.
+#[derive(Clone)]
+#[deprecated(note="Use std repeat_with() instead", since="0.8.0")]
+pub struct RepeatCall<F> {
+ f: F,
+}
+
+impl<F> fmt::Debug for RepeatCall<F>
+{
+ debug_fmt_fields!(RepeatCall, );
+}
+
+/// An iterator source that produces elements indefinitely by calling
+/// a given closure.
+///
+/// Iterator element type is the return type of the closure.
+///
+/// ```
+/// use itertools::repeat_call;
+/// use itertools::Itertools;
+/// use std::collections::BinaryHeap;
+///
+/// let mut heap = BinaryHeap::from(vec![2, 5, 3, 7, 8]);
+///
+/// // extract each element in sorted order
+/// for element in repeat_call(|| heap.pop()).while_some() {
+/// print!("{}", element);
+/// }
+///
+/// itertools::assert_equal(
+/// repeat_call(|| 1).take(5),
+/// vec![1, 1, 1, 1, 1]
+/// );
+/// ```
+#[deprecated(note="Use std repeat_with() instead", since="0.8.0")]
+pub fn repeat_call<F, A>(function: F) -> RepeatCall<F>
+ where F: FnMut() -> A
+{
+ RepeatCall { f: function }
+}
+
+impl<A, F> Iterator for RepeatCall<F>
+ where F: FnMut() -> A
+{
+ type Item = A;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ Some((self.f)())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (usize::max_value(), None)
+ }
+}
+
+/// Creates a new unfold source with the specified closure as the "iterator
+/// function" and an initial state to eventually pass to the closure
+///
+/// `unfold` is a general iterator builder: it has a mutable state value,
+/// and a closure with access to the state that produces the next value.
+///
+/// This more or less equivalent to a regular struct with an [`Iterator`]
+/// implementation, and is useful for one-off iterators.
+///
+/// ```
+/// // an iterator that yields sequential Fibonacci numbers,
+/// // and stops at the maximum representable value.
+///
+/// use itertools::unfold;
+///
+/// let mut fibonacci = unfold((1u32, 1u32), |(x1, x2)| {
+/// // Attempt to get the next Fibonacci number
+/// let next = x1.saturating_add(*x2);
+///
+/// // Shift left: ret <- x1 <- x2 <- next
+/// let ret = *x1;
+/// *x1 = *x2;
+/// *x2 = next;
+///
+/// // If addition has saturated at the maximum, we are finished
+/// if ret == *x1 && ret > 1 {
+/// None
+/// } else {
+/// Some(ret)
+/// }
+/// });
+///
+/// itertools::assert_equal(fibonacci.by_ref().take(8),
+/// vec![1, 1, 2, 3, 5, 8, 13, 21]);
+/// assert_eq!(fibonacci.last(), Some(2_971_215_073))
+/// ```
+pub fn unfold<A, St, F>(initial_state: St, f: F) -> Unfold<St, F>
+ where F: FnMut(&mut St) -> Option<A>
+{
+ Unfold {
+ f,
+ state: initial_state,
+ }
+}
+
+impl<St, F> fmt::Debug for Unfold<St, F>
+ where St: fmt::Debug,
+{
+ debug_fmt_fields!(Unfold, state);
+}
+
+/// See [`unfold`](crate::unfold) for more information.
+#[derive(Clone)]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct Unfold<St, F> {
+ f: F,
+ /// Internal state that will be passed to the closure on the next iteration
+ pub state: St,
+}
+
+impl<A, St, F> Iterator for Unfold<St, F>
+ where F: FnMut(&mut St) -> Option<A>
+{
+ type Item = A;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ (self.f)(&mut self.state)
+ }
+}
+
+/// An iterator that infinitely applies function to value and yields results.
+///
+/// This `struct` is created by the [`iterate()`](crate::iterate) function.
+/// See its documentation for more.
+#[derive(Clone)]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct Iterate<St, F> {
+ state: St,
+ f: F,
+}
+
+impl<St, F> fmt::Debug for Iterate<St, F>
+ where St: fmt::Debug,
+{
+ debug_fmt_fields!(Iterate, state);
+}
+
+impl<St, F> Iterator for Iterate<St, F>
+ where F: FnMut(&St) -> St
+{
+ type Item = St;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ let next_state = (self.f)(&self.state);
+ Some(mem::replace(&mut self.state, next_state))
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (usize::max_value(), None)
+ }
+}
+
+/// Creates a new iterator that infinitely applies function to value and
yields results.
+///
+/// ```
+/// use itertools::iterate;
+///
+/// itertools::assert_equal(iterate(1, |&i| i * 3).take(5), vec![1, 3, 9, 27,
81]);
+/// ```
+pub fn iterate<St, F>(initial_value: St, f: F) -> Iterate<St, F>
+ where F: FnMut(&St) -> St
+{
+ Iterate {
+ state: initial_value,
+ f,
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/take_while_inclusive.rs
b/rust/hw/char/pl011/vendor/itertools/src/take_while_inclusive.rs
new file mode 100644
index 0000000000..e2a7479e0b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/take_while_inclusive.rs
@@ -0,0 +1,68 @@
+use core::iter::FusedIterator;
+use std::fmt;
+
+/// An iterator adaptor that consumes elements while the given predicate is
+/// `true`, including the element for which the predicate first returned
+/// `false`.
+///
+/// See [`.take_while_inclusive()`](crate::Itertools::take_while_inclusive)
+/// for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct TakeWhileInclusive<'a, I: 'a, F> {
+ iter: &'a mut I,
+ predicate: F,
+ done: bool,
+}
+
+impl<'a, I, F> TakeWhileInclusive<'a, I, F>
+where
+ I: Iterator,
+ F: FnMut(&I::Item) -> bool,
+{
+ /// Create a new [`TakeWhileInclusive`] from an iterator and a predicate.
+ pub fn new(iter: &'a mut I, predicate: F) -> Self {
+ Self { iter, predicate, done: false}
+ }
+}
+
+impl<'a, I, F> fmt::Debug for TakeWhileInclusive<'a, I, F>
+ where I: Iterator + fmt::Debug,
+{
+ debug_fmt_fields!(TakeWhileInclusive, iter);
+}
+
+impl<'a, I, F> Iterator for TakeWhileInclusive<'a, I, F>
+where
+ I: Iterator,
+ F: FnMut(&I::Item) -> bool
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.done {
+ None
+ } else {
+ self.iter.next().map(|item| {
+ if !(self.predicate)(&item) {
+ self.done = true;
+ }
+ item
+ })
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ if self.done {
+ (0, Some(0))
+ } else {
+ (0, self.iter.size_hint().1)
+ }
+ }
+}
+
+impl<I, F> FusedIterator for TakeWhileInclusive<'_, I, F>
+where
+ I: Iterator,
+ F: FnMut(&I::Item) -> bool
+{
+}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/itertools/src/tee.rs
b/rust/hw/char/pl011/vendor/itertools/src/tee.rs
new file mode 100644
index 0000000000..ea4752906f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/tee.rs
@@ -0,0 +1,78 @@
+use super::size_hint;
+
+use std::cell::RefCell;
+use alloc::collections::VecDeque;
+use alloc::rc::Rc;
+
+/// Common buffer object for the two tee halves
+#[derive(Debug)]
+struct TeeBuffer<A, I> {
+ backlog: VecDeque<A>,
+ iter: I,
+ /// The owner field indicates which id should read from the backlog
+ owner: bool,
+}
+
+/// One half of an iterator pair where both return the same elements.
+///
+/// See [`.tee()`](crate::Itertools::tee) for more information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Debug)]
+pub struct Tee<I>
+ where I: Iterator
+{
+ rcbuffer: Rc<RefCell<TeeBuffer<I::Item, I>>>,
+ id: bool,
+}
+
+pub fn new<I>(iter: I) -> (Tee<I>, Tee<I>)
+ where I: Iterator
+{
+ let buffer = TeeBuffer{backlog: VecDeque::new(), iter, owner: false};
+ let t1 = Tee{rcbuffer: Rc::new(RefCell::new(buffer)), id: true};
+ let t2 = Tee{rcbuffer: t1.rcbuffer.clone(), id: false};
+ (t1, t2)
+}
+
+impl<I> Iterator for Tee<I>
+ where I: Iterator,
+ I::Item: Clone
+{
+ type Item = I::Item;
+ fn next(&mut self) -> Option<Self::Item> {
+ // .borrow_mut may fail here -- but only if the user has tied some
kind of weird
+ // knot where the iterator refers back to itself.
+ let mut buffer = self.rcbuffer.borrow_mut();
+ if buffer.owner == self.id {
+ match buffer.backlog.pop_front() {
+ None => {}
+ some_elt => return some_elt,
+ }
+ }
+ match buffer.iter.next() {
+ None => None,
+ Some(elt) => {
+ buffer.backlog.push_back(elt.clone());
+ buffer.owner = !self.id;
+ Some(elt)
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let buffer = self.rcbuffer.borrow();
+ let sh = buffer.iter.size_hint();
+
+ if buffer.owner == self.id {
+ let log_len = buffer.backlog.len();
+ size_hint::add_scalar(sh, log_len)
+ } else {
+ sh
+ }
+ }
+}
+
+impl<I> ExactSizeIterator for Tee<I>
+ where I: ExactSizeIterator,
+ I::Item: Clone
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/tuple_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/tuple_impl.rs
new file mode 100644
index 0000000000..fdf0865856
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/tuple_impl.rs
@@ -0,0 +1,331 @@
+//! Some iterator that produces tuples
+
+use std::iter::Fuse;
+use std::iter::FusedIterator;
+use std::iter::Take;
+use std::iter::Cycle;
+use std::marker::PhantomData;
+
+// `HomogeneousTuple` is a public facade for `TupleCollect`, allowing
+// tuple-related methods to be used by clients in generic contexts, while
+// hiding the implementation details of `TupleCollect`.
+// See https://github.com/rust-itertools/itertools/issues/387
+
+/// Implemented for homogeneous tuples of size up to 12.
+pub trait HomogeneousTuple
+ : TupleCollect
+{}
+
+impl<T: TupleCollect> HomogeneousTuple for T {}
+
+/// An iterator over a incomplete tuple.
+///
+/// See [`.tuples()`](crate::Itertools::tuples) and
+/// [`Tuples::into_buffer()`].
+#[derive(Clone, Debug)]
+pub struct TupleBuffer<T>
+ where T: HomogeneousTuple
+{
+ cur: usize,
+ buf: T::Buffer,
+}
+
+impl<T> TupleBuffer<T>
+ where T: HomogeneousTuple
+{
+ fn new(buf: T::Buffer) -> Self {
+ TupleBuffer {
+ cur: 0,
+ buf,
+ }
+ }
+}
+
+impl<T> Iterator for TupleBuffer<T>
+ where T: HomogeneousTuple
+{
+ type Item = T::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let s = self.buf.as_mut();
+ if let Some(ref mut item) = s.get_mut(self.cur) {
+ self.cur += 1;
+ item.take()
+ } else {
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let buffer = &self.buf.as_ref()[self.cur..];
+ let len = if buffer.is_empty() {
+ 0
+ } else {
+ buffer.iter()
+ .position(|x| x.is_none())
+ .unwrap_or_else(|| buffer.len())
+ };
+ (len, Some(len))
+ }
+}
+
+impl<T> ExactSizeIterator for TupleBuffer<T>
+ where T: HomogeneousTuple
+{
+}
+
+/// An iterator that groups the items in tuples of a specific size.
+///
+/// See [`.tuples()`](crate::Itertools::tuples) for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Tuples<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple
+{
+ iter: Fuse<I>,
+ buf: T::Buffer,
+}
+
+/// Create a new tuples iterator.
+pub fn tuples<I, T>(iter: I) -> Tuples<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple
+{
+ Tuples {
+ iter: iter.fuse(),
+ buf: Default::default(),
+ }
+}
+
+impl<I, T> Iterator for Tuples<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ T::collect_from_iter(&mut self.iter, &mut self.buf)
+ }
+}
+
+impl<I, T> Tuples<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple
+{
+ /// Return a buffer with the produced items that was not enough to be
grouped in a tuple.
+ ///
+ /// ```
+ /// use itertools::Itertools;
+ ///
+ /// let mut iter = (0..5).tuples();
+ /// assert_eq!(Some((0, 1, 2)), iter.next());
+ /// assert_eq!(None, iter.next());
+ /// itertools::assert_equal(vec![3, 4], iter.into_buffer());
+ /// ```
+ pub fn into_buffer(self) -> TupleBuffer<T> {
+ TupleBuffer::new(self.buf)
+ }
+}
+
+
+/// An iterator over all contiguous windows that produces tuples of a specific
size.
+///
+/// See [`.tuple_windows()`](crate::Itertools::tuple_windows) for more
+/// information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Clone, Debug)]
+pub struct TupleWindows<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple
+{
+ iter: I,
+ last: Option<T>,
+}
+
+/// Create a new tuple windows iterator.
+pub fn tuple_windows<I, T>(mut iter: I) -> TupleWindows<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple,
+ T::Item: Clone
+{
+ use std::iter::once;
+
+ let mut last = None;
+ if T::num_items() != 1 {
+ // put in a duplicate item in front of the tuple; this simplifies
+ // .next() function.
+ if let Some(item) = iter.next() {
+ let iter = once(item.clone()).chain(once(item)).chain(&mut iter);
+ last = T::collect_from_iter_no_buf(iter);
+ }
+ }
+
+ TupleWindows {
+ iter,
+ last,
+ }
+}
+
+impl<I, T> Iterator for TupleWindows<I, T>
+ where I: Iterator<Item = T::Item>,
+ T: HomogeneousTuple + Clone,
+ T::Item: Clone
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if T::num_items() == 1 {
+ return T::collect_from_iter_no_buf(&mut self.iter)
+ }
+ if let Some(ref mut last) = self.last {
+ if let Some(new) = self.iter.next() {
+ last.left_shift_push(new);
+ return Some(last.clone());
+ }
+ }
+ None
+ }
+}
+
+impl<I, T> FusedIterator for TupleWindows<I, T>
+ where I: FusedIterator<Item = T::Item>,
+ T: HomogeneousTuple + Clone,
+ T::Item: Clone
+{}
+
+/// An iterator over all windows, wrapping back to the first elements when the
+/// window would otherwise exceed the length of the iterator, producing tuples
+/// of a specific size.
+///
+/// See
[`.circular_tuple_windows()`](crate::Itertools::circular_tuple_windows) for more
+/// information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+#[derive(Debug, Clone)]
+pub struct CircularTupleWindows<I, T: Clone>
+ where I: Iterator<Item = T::Item> + Clone,
+ T: TupleCollect + Clone
+{
+ iter: Take<TupleWindows<Cycle<I>, T>>,
+ phantom_data: PhantomData<T>
+}
+
+pub fn circular_tuple_windows<I, T>(iter: I) -> CircularTupleWindows<I, T>
+ where I: Iterator<Item = T::Item> + Clone + ExactSizeIterator,
+ T: TupleCollect + Clone,
+ T::Item: Clone
+{
+ let len = iter.len();
+ let iter = tuple_windows(iter.cycle()).take(len);
+
+ CircularTupleWindows {
+ iter,
+ phantom_data: PhantomData{}
+ }
+}
+
+impl<I, T> Iterator for CircularTupleWindows<I, T>
+ where I: Iterator<Item = T::Item> + Clone,
+ T: TupleCollect + Clone,
+ T::Item: Clone
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next()
+ }
+}
+
+pub trait TupleCollect: Sized {
+ type Item;
+ type Buffer: Default + AsRef<[Option<Self::Item>]> +
AsMut<[Option<Self::Item>]>;
+
+ fn collect_from_iter<I>(iter: I, buf: &mut Self::Buffer) -> Option<Self>
+ where I: IntoIterator<Item = Self::Item>;
+
+ fn collect_from_iter_no_buf<I>(iter: I) -> Option<Self>
+ where I: IntoIterator<Item = Self::Item>;
+
+ fn num_items() -> usize;
+
+ fn left_shift_push(&mut self, item: Self::Item);
+}
+
+macro_rules! count_ident{
+ () => {0};
+ ($i0:ident, $($i:ident,)*) => {1 + count_ident!($($i,)*)};
+}
+macro_rules! rev_for_each_ident{
+ ($m:ident, ) => {};
+ ($m:ident, $i0:ident, $($i:ident,)*) => {
+ rev_for_each_ident!($m, $($i,)*);
+ $m!($i0);
+ };
+}
+
+macro_rules! impl_tuple_collect {
+ ($dummy:ident,) => {}; // stop
+ ($dummy:ident, $($Y:ident,)*) => (
+ impl_tuple_collect!($($Y,)*);
+ impl<A> TupleCollect for ($(ignore_ident!($Y, A),)*) {
+ type Item = A;
+ type Buffer = [Option<A>; count_ident!($($Y,)*) - 1];
+
+ #[allow(unused_assignments, unused_mut)]
+ fn collect_from_iter<I>(iter: I, buf: &mut Self::Buffer) ->
Option<Self>
+ where I: IntoIterator<Item = A>
+ {
+ let mut iter = iter.into_iter();
+ $(
+ let mut $Y = None;
+ )*
+
+ loop {
+ $(
+ $Y = iter.next();
+ if $Y.is_none() {
+ break
+ }
+ )*
+ return Some(($($Y.unwrap()),*,))
+ }
+
+ let mut i = 0;
+ let mut s = buf.as_mut();
+ $(
+ if i < s.len() {
+ s[i] = $Y;
+ i += 1;
+ }
+ )*
+ return None;
+ }
+
+ fn collect_from_iter_no_buf<I>(iter: I) -> Option<Self>
+ where I: IntoIterator<Item = A>
+ {
+ let mut iter = iter.into_iter();
+
+ Some(($(
+ { let $Y = iter.next()?; $Y },
+ )*))
+ }
+
+ fn num_items() -> usize {
+ count_ident!($($Y,)*)
+ }
+
+ fn left_shift_push(&mut self, mut item: A) {
+ use std::mem::replace;
+
+ let &mut ($(ref mut $Y),*,) = self;
+ macro_rules! replace_item{($i:ident) => {
+ item = replace($i, item);
+ }}
+ rev_for_each_ident!(replace_item, $($Y,)*);
+ drop(item);
+ }
+ }
+ )
+}
+impl_tuple_collect!(dummy, a, b, c, d, e, f, g, h, i, j, k, l,);
diff --git a/rust/hw/char/pl011/vendor/itertools/src/unique_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/unique_impl.rs
new file mode 100644
index 0000000000..4e81e78ec0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/unique_impl.rs
@@ -0,0 +1,179 @@
+use std::collections::HashMap;
+use std::collections::hash_map::Entry;
+use std::hash::Hash;
+use std::fmt;
+use std::iter::FusedIterator;
+
+/// An iterator adapter to filter out duplicate elements.
+///
+/// See [`.unique_by()`](crate::Itertools::unique) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct UniqueBy<I: Iterator, V, F> {
+ iter: I,
+ // Use a Hashmap for the Entry API in order to prevent hashing twice.
+ // This can maybe be replaced with a HashSet once `get_or_insert_with`
+ // or a proper Entry API for Hashset is stable and meets this msrv
+ used: HashMap<V, ()>,
+ f: F,
+}
+
+impl<I, V, F> fmt::Debug for UniqueBy<I, V, F>
+ where I: Iterator + fmt::Debug,
+ V: fmt::Debug + Hash + Eq,
+{
+ debug_fmt_fields!(UniqueBy, iter, used);
+}
+
+/// Create a new `UniqueBy` iterator.
+pub fn unique_by<I, V, F>(iter: I, f: F) -> UniqueBy<I, V, F>
+ where V: Eq + Hash,
+ F: FnMut(&I::Item) -> V,
+ I: Iterator,
+{
+ UniqueBy {
+ iter,
+ used: HashMap::new(),
+ f,
+ }
+}
+
+// count the number of new unique keys in iterable (`used` is the set already
seen)
+fn count_new_keys<I, K>(mut used: HashMap<K, ()>, iterable: I) -> usize
+ where I: IntoIterator<Item=K>,
+ K: Hash + Eq,
+{
+ let iter = iterable.into_iter();
+ let current_used = used.len();
+ used.extend(iter.map(|key| (key, ())));
+ used.len() - current_used
+}
+
+impl<I, V, F> Iterator for UniqueBy<I, V, F>
+ where I: Iterator,
+ V: Eq + Hash,
+ F: FnMut(&I::Item) -> V
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.next() {
+ let key = (self.f)(&v);
+ if self.used.insert(key, ()).is_none() {
+ return Some(v);
+ }
+ }
+ None
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (low, hi) = self.iter.size_hint();
+ ((low > 0 && self.used.is_empty()) as usize, hi)
+ }
+
+ fn count(self) -> usize {
+ let mut key_f = self.f;
+ count_new_keys(self.used, self.iter.map(move |elt| key_f(&elt)))
+ }
+}
+
+impl<I, V, F> DoubleEndedIterator for UniqueBy<I, V, F>
+ where I: DoubleEndedIterator,
+ V: Eq + Hash,
+ F: FnMut(&I::Item) -> V
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.next_back() {
+ let key = (self.f)(&v);
+ if self.used.insert(key, ()).is_none() {
+ return Some(v);
+ }
+ }
+ None
+ }
+}
+
+impl<I, V, F> FusedIterator for UniqueBy<I, V, F>
+ where I: FusedIterator,
+ V: Eq + Hash,
+ F: FnMut(&I::Item) -> V
+{}
+
+impl<I> Iterator for Unique<I>
+ where I: Iterator,
+ I::Item: Eq + Hash + Clone
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.iter.next() {
+ if let Entry::Vacant(entry) = self.iter.used.entry(v) {
+ let elt = entry.key().clone();
+ entry.insert(());
+ return Some(elt);
+ }
+ }
+ None
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (low, hi) = self.iter.iter.size_hint();
+ ((low > 0 && self.iter.used.is_empty()) as usize, hi)
+ }
+
+ fn count(self) -> usize {
+ count_new_keys(self.iter.used, self.iter.iter)
+ }
+}
+
+impl<I> DoubleEndedIterator for Unique<I>
+ where I: DoubleEndedIterator,
+ I::Item: Eq + Hash + Clone
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ while let Some(v) = self.iter.iter.next_back() {
+ if let Entry::Vacant(entry) = self.iter.used.entry(v) {
+ let elt = entry.key().clone();
+ entry.insert(());
+ return Some(elt);
+ }
+ }
+ None
+ }
+}
+
+impl<I> FusedIterator for Unique<I>
+ where I: FusedIterator,
+ I::Item: Eq + Hash + Clone
+{}
+
+/// An iterator adapter to filter out duplicate elements.
+///
+/// See [`.unique()`](crate::Itertools::unique) for more information.
+#[derive(Clone)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Unique<I: Iterator> {
+ iter: UniqueBy<I, I::Item, ()>,
+}
+
+impl<I> fmt::Debug for Unique<I>
+ where I: Iterator + fmt::Debug,
+ I::Item: Hash + Eq + fmt::Debug,
+{
+ debug_fmt_fields!(Unique, iter);
+}
+
+pub fn unique<I>(iter: I) -> Unique<I>
+ where I: Iterator,
+ I::Item: Eq + Hash,
+{
+ Unique {
+ iter: UniqueBy {
+ iter,
+ used: HashMap::new(),
+ f: (),
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/unziptuple.rs
b/rust/hw/char/pl011/vendor/itertools/src/unziptuple.rs
new file mode 100644
index 0000000000..7af29ec4ab
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/unziptuple.rs
@@ -0,0 +1,80 @@
+/// Converts an iterator of tuples into a tuple of containers.
+///
+/// `unzip()` consumes an entire iterator of n-ary tuples, producing `n`
collections, one for each
+/// column.
+///
+/// This function is, in some sense, the opposite of [`multizip`].
+///
+/// ```
+/// use itertools::multiunzip;
+///
+/// let inputs = vec![(1, 2, 3), (4, 5, 6), (7, 8, 9)];
+///
+/// let (a, b, c): (Vec<_>, Vec<_>, Vec<_>) = multiunzip(inputs);
+///
+/// assert_eq!(a, vec![1, 4, 7]);
+/// assert_eq!(b, vec![2, 5, 8]);
+/// assert_eq!(c, vec![3, 6, 9]);
+/// ```
+///
+/// [`multizip`]: crate::multizip
+pub fn multiunzip<FromI, I>(i: I) -> FromI
+where
+ I: IntoIterator,
+ I::IntoIter: MultiUnzip<FromI>,
+{
+ i.into_iter().multiunzip()
+}
+
+/// An iterator that can be unzipped into multiple collections.
+///
+/// See [`.multiunzip()`](crate::Itertools::multiunzip) for more information.
+pub trait MultiUnzip<FromI>: Iterator {
+ /// Unzip this iterator into multiple collections.
+ fn multiunzip(self) -> FromI;
+}
+
+macro_rules! impl_unzip_iter {
+ ($($T:ident => $FromT:ident),*) => (
+ #[allow(non_snake_case)]
+ impl<IT: Iterator<Item = ($($T,)*)>, $($T, $FromT: Default +
Extend<$T>),* > MultiUnzip<($($FromT,)*)> for IT {
+ fn multiunzip(self) -> ($($FromT,)*) {
+ // This implementation mirrors the logic of Iterator::unzip
resp. Extend for (A, B) as close as possible.
+ // Unfortunately a lot of the used api there is still unstable
(https://github.com/rust-lang/rust/issues/72631).
+ //
+ // Iterator::unzip:
https://doc.rust-lang.org/src/core/iter/traits/iterator.rs.html#2825-2865
+ // Extend for (A, B):
https://doc.rust-lang.org/src/core/iter/traits/collect.rs.html#370-411
+
+ let mut res = ($($FromT::default(),)*);
+ let ($($FromT,)*) = &mut res;
+
+ // Still unstable #72631
+ // let (lower_bound, _) = self.size_hint();
+ // if lower_bound > 0 {
+ // $($FromT.extend_reserve(lower_bound);)*
+ // }
+
+ self.fold((), |(), ($($T,)*)| {
+ // Still unstable #72631
+ // $( $FromT.extend_one($T); )*
+ $( $FromT.extend(std::iter::once($T)); )*
+ });
+ res
+ }
+ }
+ );
+}
+
+impl_unzip_iter!();
+impl_unzip_iter!(A => FromA);
+impl_unzip_iter!(A => FromA, B => FromB);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG, H => FromH);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG, H => FromH, I => FromI);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG, H => FromH, I => FromI, J => FromJ);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG, H => FromH, I => FromI, J => FromJ, K => FromK);
+impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F
=> FromF, G => FromG, H => FromH, I => FromI, J => FromJ, K => FromK, L =>
FromL);
diff --git a/rust/hw/char/pl011/vendor/itertools/src/with_position.rs
b/rust/hw/char/pl011/vendor/itertools/src/with_position.rs
new file mode 100644
index 0000000000..dda9b25dc3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/with_position.rs
@@ -0,0 +1,88 @@
+use std::iter::{Fuse,Peekable, FusedIterator};
+
+/// An iterator adaptor that wraps each element in an [`Position`].
+///
+/// Iterator element type is `(Position, I::Item)`.
+///
+/// See [`.with_position()`](crate::Itertools::with_position) for more
information.
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct WithPosition<I>
+ where I: Iterator,
+{
+ handled_first: bool,
+ peekable: Peekable<Fuse<I>>,
+}
+
+impl<I> Clone for WithPosition<I>
+ where I: Clone + Iterator,
+ I::Item: Clone,
+{
+ clone_fields!(handled_first, peekable);
+}
+
+/// Create a new `WithPosition` iterator.
+pub fn with_position<I>(iter: I) -> WithPosition<I>
+ where I: Iterator,
+{
+ WithPosition {
+ handled_first: false,
+ peekable: iter.fuse().peekable(),
+ }
+}
+
+/// The first component of the value yielded by `WithPosition`.
+/// Indicates the position of this element in the iterator results.
+///
+/// See [`.with_position()`](crate::Itertools::with_position) for more
information.
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum Position {
+ /// This is the first element.
+ First,
+ /// This is neither the first nor the last element.
+ Middle,
+ /// This is the last element.
+ Last,
+ /// This is the only element.
+ Only,
+}
+
+impl<I: Iterator> Iterator for WithPosition<I> {
+ type Item = (Position, I::Item);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.peekable.next() {
+ Some(item) => {
+ if !self.handled_first {
+ // Haven't seen the first item yet, and there is one to
give.
+ self.handled_first = true;
+ // Peek to see if this is also the last item,
+ // in which case tag it as `Only`.
+ match self.peekable.peek() {
+ Some(_) => Some((Position::First, item)),
+ None => Some((Position::Only, item)),
+ }
+ } else {
+ // Have seen the first item, and there's something left.
+ // Peek to see if this is the last item.
+ match self.peekable.peek() {
+ Some(_) => Some((Position::Middle, item)),
+ None => Some((Position::Last, item)),
+ }
+ }
+ }
+ // Iterator is finished.
+ None => None,
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.peekable.size_hint()
+ }
+}
+
+impl<I> ExactSizeIterator for WithPosition<I>
+ where I: ExactSizeIterator,
+{ }
+
+impl<I: Iterator> FusedIterator for WithPosition<I>
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/zip_eq_impl.rs
b/rust/hw/char/pl011/vendor/itertools/src/zip_eq_impl.rs
new file mode 100644
index 0000000000..a079b326a4
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/zip_eq_impl.rs
@@ -0,0 +1,60 @@
+use super::size_hint;
+
+/// An iterator which iterates two other iterators simultaneously
+///
+/// See [`.zip_eq()`](crate::Itertools::zip_eq) for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct ZipEq<I, J> {
+ a: I,
+ b: J,
+}
+
+/// Iterate `i` and `j` in lock step.
+///
+/// **Panics** if the iterators are not of the same length.
+///
+/// [`IntoIterator`] enabled version of
[`Itertools::zip_eq`](crate::Itertools::zip_eq).
+///
+/// ```
+/// use itertools::zip_eq;
+///
+/// let data = [1, 2, 3, 4, 5];
+/// for (a, b) in zip_eq(&data[..data.len() - 1], &data[1..]) {
+/// /* loop body */
+/// }
+/// ```
+pub fn zip_eq<I, J>(i: I, j: J) -> ZipEq<I::IntoIter, J::IntoIter>
+ where I: IntoIterator,
+ J: IntoIterator
+{
+ ZipEq {
+ a: i.into_iter(),
+ b: j.into_iter(),
+ }
+}
+
+impl<I, J> Iterator for ZipEq<I, J>
+ where I: Iterator,
+ J: Iterator
+{
+ type Item = (I::Item, J::Item);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match (self.a.next(), self.b.next()) {
+ (None, None) => None,
+ (Some(a), Some(b)) => Some((a, b)),
+ (None, Some(_)) | (Some(_), None) =>
+ panic!("itertools: .zip_eq() reached end of one iterator before
the other")
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::min(self.a.size_hint(), self.b.size_hint())
+ }
+}
+
+impl<I, J> ExactSizeIterator for ZipEq<I, J>
+ where I: ExactSizeIterator,
+ J: ExactSizeIterator
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/zip_longest.rs
b/rust/hw/char/pl011/vendor/itertools/src/zip_longest.rs
new file mode 100644
index 0000000000..cb9a7bacb2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/zip_longest.rs
@@ -0,0 +1,83 @@
+use std::cmp::Ordering::{Equal, Greater, Less};
+use super::size_hint;
+use std::iter::{Fuse, FusedIterator};
+
+use crate::either_or_both::EitherOrBoth;
+
+// ZipLongest originally written by SimonSapin,
+// and dedicated to itertools https://github.com/rust-lang/rust/pull/19283
+
+/// An iterator which iterates two other iterators simultaneously
+///
+/// This iterator is *fused*.
+///
+/// See [`.zip_longest()`](crate::Itertools::zip_longest) for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct ZipLongest<T, U> {
+ a: Fuse<T>,
+ b: Fuse<U>,
+}
+
+/// Create a new `ZipLongest` iterator.
+pub fn zip_longest<T, U>(a: T, b: U) -> ZipLongest<T, U>
+ where T: Iterator,
+ U: Iterator
+{
+ ZipLongest {
+ a: a.fuse(),
+ b: b.fuse(),
+ }
+}
+
+impl<T, U> Iterator for ZipLongest<T, U>
+ where T: Iterator,
+ U: Iterator
+{
+ type Item = EitherOrBoth<T::Item, U::Item>;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ match (self.a.next(), self.b.next()) {
+ (None, None) => None,
+ (Some(a), None) => Some(EitherOrBoth::Left(a)),
+ (None, Some(b)) => Some(EitherOrBoth::Right(b)),
+ (Some(a), Some(b)) => Some(EitherOrBoth::Both(a, b)),
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ size_hint::max(self.a.size_hint(), self.b.size_hint())
+ }
+}
+
+impl<T, U> DoubleEndedIterator for ZipLongest<T, U>
+ where T: DoubleEndedIterator + ExactSizeIterator,
+ U: DoubleEndedIterator + ExactSizeIterator
+{
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ match self.a.len().cmp(&self.b.len()) {
+ Equal => match (self.a.next_back(), self.b.next_back()) {
+ (None, None) => None,
+ (Some(a), Some(b)) => Some(EitherOrBoth::Both(a, b)),
+ // These can only happen if .len() is inconsistent with
.next_back()
+ (Some(a), None) => Some(EitherOrBoth::Left(a)),
+ (None, Some(b)) => Some(EitherOrBoth::Right(b)),
+ },
+ Greater => self.a.next_back().map(EitherOrBoth::Left),
+ Less => self.b.next_back().map(EitherOrBoth::Right),
+ }
+ }
+}
+
+impl<T, U> ExactSizeIterator for ZipLongest<T, U>
+ where T: ExactSizeIterator,
+ U: ExactSizeIterator
+{}
+
+impl<T, U> FusedIterator for ZipLongest<T, U>
+ where T: Iterator,
+ U: Iterator
+{}
diff --git a/rust/hw/char/pl011/vendor/itertools/src/ziptuple.rs
b/rust/hw/char/pl011/vendor/itertools/src/ziptuple.rs
new file mode 100644
index 0000000000..6d3a584c49
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/src/ziptuple.rs
@@ -0,0 +1,138 @@
+use super::size_hint;
+
+/// See [`multizip`] for more information.
+#[derive(Clone, Debug)]
+#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
+pub struct Zip<T> {
+ t: T,
+}
+
+/// An iterator that generalizes *.zip()* and allows running multiple
iterators in lockstep.
+///
+/// The iterator `Zip<(I, J, ..., M)>` is formed from a tuple of iterators (or
values that
+/// implement [`IntoIterator`]) and yields elements
+/// until any of the subiterators yields `None`.
+///
+/// The iterator element type is a tuple like like `(A, B, ..., E)` where `A`
to `E` are the
+/// element types of the subiterator.
+///
+/// **Note:** The result of this macro is a value of a named type (`Zip<(I, J,
+/// ..)>` of each component iterator `I, J, ...`) if each component iterator is
+/// nameable.
+///
+/// Prefer [`izip!()`] over `multizip` for the performance benefits of using
the
+/// standard library `.zip()`. Prefer `multizip` if a nameable type is needed.
+///
+/// ```
+/// use itertools::multizip;
+///
+/// // iterate over three sequences side-by-side
+/// let mut results = [0, 0, 0, 0];
+/// let inputs = [3, 7, 9, 6];
+///
+/// for (r, index, input) in multizip((&mut results, 0..10, &inputs)) {
+/// *r = index * 10 + input;
+/// }
+///
+/// assert_eq!(results, [0 + 3, 10 + 7, 29, 36]);
+/// ```
+/// [`izip!()`]: crate::izip
+pub fn multizip<T, U>(t: U) -> Zip<T>
+ where Zip<T>: From<U>,
+ Zip<T>: Iterator,
+{
+ Zip::from(t)
+}
+
+macro_rules! impl_zip_iter {
+ ($($B:ident),*) => (
+ #[allow(non_snake_case)]
+ impl<$($B: IntoIterator),*> From<($($B,)*)> for
Zip<($($B::IntoIter,)*)> {
+ fn from(t: ($($B,)*)) -> Self {
+ let ($($B,)*) = t;
+ Zip { t: ($($B.into_iter(),)*) }
+ }
+ }
+
+ #[allow(non_snake_case)]
+ #[allow(unused_assignments)]
+ impl<$($B),*> Iterator for Zip<($($B,)*)>
+ where
+ $(
+ $B: Iterator,
+ )*
+ {
+ type Item = ($($B::Item,)*);
+
+ fn next(&mut self) -> Option<Self::Item>
+ {
+ let ($(ref mut $B,)*) = self.t;
+
+ // NOTE: Just like iter::Zip, we check the iterators
+ // for None in order. We may finish unevenly (some
+ // iterators gave n + 1 elements, some only n).
+ $(
+ let $B = match $B.next() {
+ None => return None,
+ Some(elt) => elt
+ };
+ )*
+ Some(($($B,)*))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>)
+ {
+ let sh = (::std::usize::MAX, None);
+ let ($(ref $B,)*) = self.t;
+ $(
+ let sh = size_hint::min($B.size_hint(), sh);
+ )*
+ sh
+ }
+ }
+
+ #[allow(non_snake_case)]
+ impl<$($B),*> ExactSizeIterator for Zip<($($B,)*)> where
+ $(
+ $B: ExactSizeIterator,
+ )*
+ { }
+
+ #[allow(non_snake_case)]
+ impl<$($B),*> DoubleEndedIterator for Zip<($($B,)*)> where
+ $(
+ $B: DoubleEndedIterator + ExactSizeIterator,
+ )*
+ {
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ let ($(ref mut $B,)*) = self.t;
+ let size = *[$( $B.len(), )*].iter().min().unwrap();
+
+ $(
+ if $B.len() != size {
+ for _ in 0..$B.len() - size { $B.next_back(); }
+ }
+ )*
+
+ match ($($B.next_back(),)*) {
+ ($(Some($B),)*) => Some(($($B,)*)),
+ _ => None,
+ }
+ }
+ }
+ );
+}
+
+impl_zip_iter!(A);
+impl_zip_iter!(A, B);
+impl_zip_iter!(A, B, C);
+impl_zip_iter!(A, B, C, D);
+impl_zip_iter!(A, B, C, D, E);
+impl_zip_iter!(A, B, C, D, E, F);
+impl_zip_iter!(A, B, C, D, E, F, G);
+impl_zip_iter!(A, B, C, D, E, F, G, H);
+impl_zip_iter!(A, B, C, D, E, F, G, H, I);
+impl_zip_iter!(A, B, C, D, E, F, G, H, I, J);
+impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K);
+impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L);
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/adaptors_no_collect.rs
b/rust/hw/char/pl011/vendor/itertools/tests/adaptors_no_collect.rs
new file mode 100644
index 0000000000..103db23f1e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/adaptors_no_collect.rs
@@ -0,0 +1,46 @@
+use itertools::Itertools;
+
+struct PanickingCounter {
+ curr: usize,
+ max: usize,
+}
+
+impl Iterator for PanickingCounter {
+ type Item = ();
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.curr += 1;
+
+ assert_ne!(
+ self.curr, self.max,
+ "Input iterator reached maximum of {} suggesting collection by
adaptor",
+ self.max
+ );
+
+ Some(())
+ }
+}
+
+fn no_collect_test<A, T>(to_adaptor: T)
+ where A: Iterator, T: Fn(PanickingCounter) -> A
+{
+ let counter = PanickingCounter { curr: 0, max: 10_000 };
+ let adaptor = to_adaptor(counter);
+
+ for _ in adaptor.take(5) {}
+}
+
+#[test]
+fn permutations_no_collect() {
+ no_collect_test(|iter| iter.permutations(5))
+}
+
+#[test]
+fn combinations_no_collect() {
+ no_collect_test(|iter| iter.combinations(5))
+}
+
+#[test]
+fn combinations_with_replacement_no_collect() {
+ no_collect_test(|iter| iter.combinations_with_replacement(5))
+}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/flatten_ok.rs
b/rust/hw/char/pl011/vendor/itertools/tests/flatten_ok.rs
new file mode 100644
index 0000000000..bf835b5d70
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/flatten_ok.rs
@@ -0,0 +1,76 @@
+use itertools::{assert_equal, Itertools};
+use std::{ops::Range, vec::IntoIter};
+
+fn mix_data() -> IntoIter<Result<Range<i32>, bool>> {
+ vec![Ok(0..2), Err(false), Ok(2..4), Err(true), Ok(4..6)].into_iter()
+}
+
+fn ok_data() -> IntoIter<Result<Range<i32>, bool>> {
+ vec![Ok(0..2), Ok(2..4), Ok(4..6)].into_iter()
+}
+
+#[test]
+fn flatten_ok_mixed_expected_forward() {
+ assert_equal(
+ mix_data().flatten_ok(),
+ vec![
+ Ok(0),
+ Ok(1),
+ Err(false),
+ Ok(2),
+ Ok(3),
+ Err(true),
+ Ok(4),
+ Ok(5),
+ ],
+ );
+}
+
+#[test]
+fn flatten_ok_mixed_expected_reverse() {
+ assert_equal(
+ mix_data().flatten_ok().rev(),
+ vec![
+ Ok(5),
+ Ok(4),
+ Err(true),
+ Ok(3),
+ Ok(2),
+ Err(false),
+ Ok(1),
+ Ok(0),
+ ],
+ );
+}
+
+#[test]
+fn flatten_ok_collect_mixed_forward() {
+ assert_eq!(
+ mix_data().flatten_ok().collect::<Result<Vec<_>, _>>(),
+ Err(false)
+ );
+}
+
+#[test]
+fn flatten_ok_collect_mixed_reverse() {
+ assert_eq!(
+ mix_data().flatten_ok().rev().collect::<Result<Vec<_>, _>>(),
+ Err(true)
+ );
+}
+
+#[test]
+fn flatten_ok_collect_ok_forward() {
+ assert_eq!(
+ ok_data().flatten_ok().collect::<Result<Vec<_>, _>>(),
+ Ok((0..6).collect())
+ );
+}
+
+#[test]
+fn flatten_ok_collect_ok_reverse() {
+ assert_eq!(
+ ok_data().flatten_ok().rev().collect::<Result<Vec<_>, _>>(),
+ Ok((0..6).rev().collect())
+ );
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/macros_hygiene.rs
b/rust/hw/char/pl011/vendor/itertools/tests/macros_hygiene.rs
new file mode 100644
index 0000000000..d1111245d6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/macros_hygiene.rs
@@ -0,0 +1,13 @@
+#[test]
+fn iproduct_hygiene() {
+ let _ = itertools::iproduct!(0..6);
+ let _ = itertools::iproduct!(0..6, 0..9);
+ let _ = itertools::iproduct!(0..6, 0..9, 0..12);
+}
+
+#[test]
+fn izip_hygiene() {
+ let _ = itertools::izip!(0..6);
+ let _ = itertools::izip!(0..6, 0..9);
+ let _ = itertools::izip!(0..6, 0..9, 0..12);
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/merge_join.rs
b/rust/hw/char/pl011/vendor/itertools/tests/merge_join.rs
new file mode 100644
index 0000000000..3280b7d4ec
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/merge_join.rs
@@ -0,0 +1,108 @@
+use itertools::EitherOrBoth;
+use itertools::free::merge_join_by;
+
+#[test]
+fn empty() {
+ let left: Vec<u32> = vec![];
+ let right: Vec<u32> = vec![];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn left_only() {
+ let left: Vec<u32> = vec![1,2,3];
+ let right: Vec<u32> = vec![];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Left(1),
+ EitherOrBoth::Left(2),
+ EitherOrBoth::Left(3)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn right_only() {
+ let left: Vec<u32> = vec![];
+ let right: Vec<u32> = vec![1,2,3];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Right(1),
+ EitherOrBoth::Right(2),
+ EitherOrBoth::Right(3)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn first_left_then_right() {
+ let left: Vec<u32> = vec![1,2,3];
+ let right: Vec<u32> = vec![4,5,6];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Left(1),
+ EitherOrBoth::Left(2),
+ EitherOrBoth::Left(3),
+ EitherOrBoth::Right(4),
+ EitherOrBoth::Right(5),
+ EitherOrBoth::Right(6)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn first_right_then_left() {
+ let left: Vec<u32> = vec![4,5,6];
+ let right: Vec<u32> = vec![1,2,3];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Right(1),
+ EitherOrBoth::Right(2),
+ EitherOrBoth::Right(3),
+ EitherOrBoth::Left(4),
+ EitherOrBoth::Left(5),
+ EitherOrBoth::Left(6)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn interspersed_left_and_right() {
+ let left: Vec<u32> = vec![1,3,5];
+ let right: Vec<u32> = vec![2,4,6];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Left(1),
+ EitherOrBoth::Right(2),
+ EitherOrBoth::Left(3),
+ EitherOrBoth::Right(4),
+ EitherOrBoth::Left(5),
+ EitherOrBoth::Right(6)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
+
+#[test]
+fn overlapping_left_and_right() {
+ let left: Vec<u32> = vec![1,3,4,6];
+ let right: Vec<u32> = vec![2,3,4,5];
+ let expected_result: Vec<EitherOrBoth<u32, u32>> = vec![
+ EitherOrBoth::Left(1),
+ EitherOrBoth::Right(2),
+ EitherOrBoth::Both(3, 3),
+ EitherOrBoth::Both(4, 4),
+ EitherOrBoth::Right(5),
+ EitherOrBoth::Left(6)
+ ];
+ let actual_result = merge_join_by(left, right, |l, r| l.cmp(r))
+ .collect::<Vec<_>>();
+ assert_eq!(expected_result, actual_result);
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/peeking_take_while.rs
b/rust/hw/char/pl011/vendor/itertools/tests/peeking_take_while.rs
new file mode 100644
index 0000000000..5be97271dd
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/peeking_take_while.rs
@@ -0,0 +1,69 @@
+use itertools::Itertools;
+use itertools::{put_back, put_back_n};
+
+#[test]
+fn peeking_take_while_peekable() {
+ let mut r = (0..10).peekable();
+ r.peeking_take_while(|x| *x <= 3).count();
+ assert_eq!(r.next(), Some(4));
+}
+
+#[test]
+fn peeking_take_while_put_back() {
+ let mut r = put_back(0..10);
+ r.peeking_take_while(|x| *x <= 3).count();
+ assert_eq!(r.next(), Some(4));
+ r.peeking_take_while(|_| true).count();
+ assert_eq!(r.next(), None);
+}
+
+#[test]
+fn peeking_take_while_put_back_n() {
+ let mut r = put_back_n(6..10);
+ for elt in (0..6).rev() {
+ r.put_back(elt);
+ }
+ r.peeking_take_while(|x| *x <= 3).count();
+ assert_eq!(r.next(), Some(4));
+ r.peeking_take_while(|_| true).count();
+ assert_eq!(r.next(), None);
+}
+
+#[test]
+fn peeking_take_while_slice_iter() {
+ let v = [1, 2, 3, 4, 5, 6];
+ let mut r = v.iter();
+ r.peeking_take_while(|x| **x <= 3).count();
+ assert_eq!(r.next(), Some(&4));
+ r.peeking_take_while(|_| true).count();
+ assert_eq!(r.next(), None);
+}
+
+#[test]
+fn peeking_take_while_slice_iter_rev() {
+ let v = [1, 2, 3, 4, 5, 6];
+ let mut r = v.iter().rev();
+ r.peeking_take_while(|x| **x >= 3).count();
+ assert_eq!(r.next(), Some(&2));
+ r.peeking_take_while(|_| true).count();
+ assert_eq!(r.next(), None);
+}
+
+#[test]
+fn peeking_take_while_nested() {
+ let mut xs = (0..10).peekable();
+ let ys: Vec<_> = xs
+ .peeking_take_while(|x| *x < 6)
+ .peeking_take_while(|x| *x != 3)
+ .collect();
+ assert_eq!(ys, vec![0, 1, 2]);
+ assert_eq!(xs.next(), Some(3));
+
+ let mut xs = (4..10).peekable();
+ let ys: Vec<_> = xs
+ .peeking_take_while(|x| *x != 3)
+ .peeking_take_while(|x| *x < 6)
+ .collect();
+ assert_eq!(ys, vec![4, 5]);
+ assert_eq!(xs.next(), Some(6));
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/quick.rs
b/rust/hw/char/pl011/vendor/itertools/tests/quick.rs
new file mode 100644
index 0000000000..c19af6c1ea
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/quick.rs
@@ -0,0 +1,1849 @@
+//! The purpose of these tests is to cover corner cases of iterators
+//! and adaptors.
+//!
+//! In particular we test the tedious size_hint and exact size correctness.
+
+use quickcheck as qc;
+use std::default::Default;
+use std::num::Wrapping;
+use std::ops::Range;
+use std::cmp::{max, min, Ordering};
+use std::collections::{HashMap, HashSet};
+use itertools::Itertools;
+use itertools::{
+ multizip,
+ EitherOrBoth,
+ iproduct,
+ izip,
+};
+use itertools::free::{
+ cloned,
+ enumerate,
+ multipeek,
+ peek_nth,
+ put_back,
+ put_back_n,
+ rciter,
+ zip,
+ zip_eq,
+};
+
+use rand::Rng;
+use rand::seq::SliceRandom;
+use quickcheck::TestResult;
+
+/// Trait for size hint modifier types
+trait HintKind: Copy + Send + qc::Arbitrary {
+ fn loosen_bounds(&self, org_hint: (usize, Option<usize>)) -> (usize,
Option<usize>);
+}
+
+/// Exact size hint variant that leaves hints unchanged
+#[derive(Clone, Copy, Debug)]
+struct Exact {}
+
+impl HintKind for Exact {
+ fn loosen_bounds(&self, org_hint: (usize, Option<usize>)) -> (usize,
Option<usize>) {
+ org_hint
+ }
+}
+
+impl qc::Arbitrary for Exact {
+ fn arbitrary<G: qc::Gen>(_: &mut G) -> Self {
+ Exact {}
+ }
+}
+
+/// Inexact size hint variant to simulate imprecise (but valid) size hints
+///
+/// Will always decrease the lower bound and increase the upper bound
+/// of the size hint by set amounts.
+#[derive(Clone, Copy, Debug)]
+struct Inexact {
+ underestimate: usize,
+ overestimate: usize,
+}
+
+impl HintKind for Inexact {
+ fn loosen_bounds(&self, org_hint: (usize, Option<usize>)) -> (usize,
Option<usize>) {
+ let (org_lower, org_upper) = org_hint;
+ (org_lower.saturating_sub(self.underestimate),
+ org_upper.and_then(move |x| x.checked_add(self.overestimate)))
+ }
+}
+
+impl qc::Arbitrary for Inexact {
+ fn arbitrary<G: qc::Gen>(g: &mut G) -> Self {
+ let ue_value = usize::arbitrary(g);
+ let oe_value = usize::arbitrary(g);
+ // Compensate for quickcheck using extreme values too rarely
+ let ue_choices = &[0, ue_value, usize::max_value()];
+ let oe_choices = &[0, oe_value, usize::max_value()];
+ Inexact {
+ underestimate: *ue_choices.choose(g).unwrap(),
+ overestimate: *oe_choices.choose(g).unwrap(),
+ }
+ }
+
+ fn shrink(&self) -> Box<dyn Iterator<Item=Self>> {
+ let underestimate_value = self.underestimate;
+ let overestimate_value = self.overestimate;
+ Box::new(
+ underestimate_value.shrink().flat_map(move |ue_value|
+ overestimate_value.shrink().map(move |oe_value|
+ Inexact {
+ underestimate: ue_value,
+ overestimate: oe_value,
+ }
+ )
+ )
+ )
+ }
+}
+
+/// Our base iterator that we can impl Arbitrary for
+///
+/// By default we'll return inexact bounds estimates for size_hint
+/// to make tests harder to pass.
+///
+/// NOTE: Iter is tricky and is not fused, to help catch bugs.
+/// At the end it will return None once, then return Some(0),
+/// then return None again.
+#[derive(Clone, Debug)]
+struct Iter<T, SK: HintKind = Inexact> {
+ iterator: Range<T>,
+ // fuse/done flag
+ fuse_flag: i32,
+ hint_kind: SK,
+}
+
+impl<T, HK> Iter<T, HK> where HK: HintKind
+{
+ fn new(it: Range<T>, hint_kind: HK) -> Self {
+ Iter {
+ iterator: it,
+ fuse_flag: 0,
+ hint_kind,
+ }
+ }
+}
+
+impl<T, HK> Iterator for Iter<T, HK>
+ where Range<T>: Iterator,
+ <Range<T> as Iterator>::Item: Default,
+ HK: HintKind,
+{
+ type Item = <Range<T> as Iterator>::Item;
+
+ fn next(&mut self) -> Option<Self::Item>
+ {
+ let elt = self.iterator.next();
+ if elt.is_none() {
+ self.fuse_flag += 1;
+ // check fuse flag
+ if self.fuse_flag == 2 {
+ return Some(Default::default())
+ }
+ }
+ elt
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>)
+ {
+ let org_hint = self.iterator.size_hint();
+ self.hint_kind.loosen_bounds(org_hint)
+ }
+}
+
+impl<T, HK> DoubleEndedIterator for Iter<T, HK>
+ where Range<T>: DoubleEndedIterator,
+ <Range<T> as Iterator>::Item: Default,
+ HK: HintKind
+{
+ fn next_back(&mut self) -> Option<Self::Item> { self.iterator.next_back() }
+}
+
+impl<T> ExactSizeIterator for Iter<T, Exact> where Range<T>: ExactSizeIterator,
+ <Range<T> as Iterator>::Item: Default,
+{ }
+
+impl<T, HK> qc::Arbitrary for Iter<T, HK>
+ where T: qc::Arbitrary,
+ HK: HintKind,
+{
+ fn arbitrary<G: qc::Gen>(g: &mut G) -> Self
+ {
+ Iter::new(T::arbitrary(g)..T::arbitrary(g), HK::arbitrary(g))
+ }
+
+ fn shrink(&self) -> Box<dyn Iterator<Item=Iter<T, HK>>>
+ {
+ let r = self.iterator.clone();
+ let hint_kind = self.hint_kind;
+ Box::new(
+ r.start.shrink().flat_map(move |a|
+ r.end.shrink().map(move |b|
+ Iter::new(a.clone()..b, hint_kind)
+ )
+ )
+ )
+ }
+}
+
+/// A meta-iterator which yields `Iter<i32>`s whose start/endpoints are
+/// increased or decreased linearly on each iteration.
+#[derive(Clone, Debug)]
+struct ShiftRange<HK = Inexact> {
+ range_start: i32,
+ range_end: i32,
+ start_step: i32,
+ end_step: i32,
+ iter_count: u32,
+ hint_kind: HK,
+}
+
+impl<HK> Iterator for ShiftRange<HK> where HK: HintKind {
+ type Item = Iter<i32, HK>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.iter_count == 0 {
+ return None;
+ }
+
+ let iter = Iter::new(self.range_start..self.range_end, self.hint_kind);
+
+ self.range_start += self.start_step;
+ self.range_end += self.end_step;
+ self.iter_count -= 1;
+
+ Some(iter)
+ }
+}
+
+impl ExactSizeIterator for ShiftRange<Exact> { }
+
+impl<HK> qc::Arbitrary for ShiftRange<HK>
+ where HK: HintKind
+{
+ fn arbitrary<G: qc::Gen>(g: &mut G) -> Self {
+ const MAX_STARTING_RANGE_DIFF: i32 = 32;
+ const MAX_STEP_MODULO: i32 = 8;
+ const MAX_ITER_COUNT: u32 = 3;
+
+ let range_start = qc::Arbitrary::arbitrary(g);
+ let range_end = range_start + g.gen_range(0, MAX_STARTING_RANGE_DIFF +
1);
+ let start_step = g.gen_range(-MAX_STEP_MODULO, MAX_STEP_MODULO + 1);
+ let end_step = g.gen_range(-MAX_STEP_MODULO, MAX_STEP_MODULO + 1);
+ let iter_count = g.gen_range(0, MAX_ITER_COUNT + 1);
+ let hint_kind = qc::Arbitrary::arbitrary(g);
+
+ ShiftRange {
+ range_start,
+ range_end,
+ start_step,
+ end_step,
+ iter_count,
+ hint_kind,
+ }
+ }
+}
+
+fn correct_count<I, F>(get_it: F) -> bool
+where
+ I: Iterator,
+ F: Fn() -> I
+{
+ let mut counts = vec![get_it().count()];
+
+ 'outer: loop {
+ let mut it = get_it();
+
+ for _ in 0..(counts.len() - 1) {
+ #[allow(clippy::manual_assert)]
+ if it.next().is_none() {
+ panic!("Iterator shouldn't be finished, may not be
deterministic");
+ }
+ }
+
+ if it.next().is_none() {
+ break 'outer;
+ }
+
+ counts.push(it.count());
+ }
+
+ let total_actual_count = counts.len() - 1;
+
+ for (i, returned_count) in counts.into_iter().enumerate() {
+ let actual_count = total_actual_count - i;
+ if actual_count != returned_count {
+ println!("Total iterations: {} True count: {} returned count: {}",
i, actual_count, returned_count);
+
+ return false;
+ }
+ }
+
+ true
+}
+
+fn correct_size_hint<I: Iterator>(mut it: I) -> bool {
+ // record size hint at each iteration
+ let initial_hint = it.size_hint();
+ let mut hints = Vec::with_capacity(initial_hint.0 + 1);
+ hints.push(initial_hint);
+ while let Some(_) = it.next() {
+ hints.push(it.size_hint())
+ }
+
+ let mut true_count = hints.len(); // start off +1 too much
+
+ // check all the size hints
+ for &(low, hi) in &hints {
+ true_count -= 1;
+ if low > true_count ||
+ (hi.is_some() && hi.unwrap() < true_count)
+ {
+ println!("True size: {:?}, size hint: {:?}", true_count, (low,
hi));
+ //println!("All hints: {:?}", hints);
+ return false
+ }
+ }
+ true
+}
+
+fn exact_size<I: ExactSizeIterator>(mut it: I) -> bool {
+ // check every iteration
+ let (mut low, mut hi) = it.size_hint();
+ if Some(low) != hi { return false; }
+ while let Some(_) = it.next() {
+ let (xlow, xhi) = it.size_hint();
+ if low != xlow + 1 { return false; }
+ low = xlow;
+ hi = xhi;
+ if Some(low) != hi { return false; }
+ }
+ let (low, hi) = it.size_hint();
+ low == 0 && hi == Some(0)
+}
+
+// Exact size for this case, without ExactSizeIterator
+fn exact_size_for_this<I: Iterator>(mut it: I) -> bool {
+ // check every iteration
+ let (mut low, mut hi) = it.size_hint();
+ if Some(low) != hi { return false; }
+ while let Some(_) = it.next() {
+ let (xlow, xhi) = it.size_hint();
+ if low != xlow + 1 { return false; }
+ low = xlow;
+ hi = xhi;
+ if Some(low) != hi { return false; }
+ }
+ let (low, hi) = it.size_hint();
+ low == 0 && hi == Some(0)
+}
+
+/*
+ * NOTE: Range<i8> is broken!
+ * (all signed ranges are)
+#[quickcheck]
+fn size_range_i8(a: Iter<i8>) -> bool {
+ exact_size(a)
+}
+
+#[quickcheck]
+fn size_range_i16(a: Iter<i16>) -> bool {
+ exact_size(a)
+}
+
+#[quickcheck]
+fn size_range_u8(a: Iter<u8>) -> bool {
+ exact_size(a)
+}
+ */
+
+macro_rules! quickcheck {
+ // accept several property function definitions
+ // The property functions can use pattern matching and `mut` as usual
+ // in the function arguments, but the functions can not be generic.
+ {$($(#$attr:tt)* fn $fn_name:ident($($arg:tt)*) -> $ret:ty { $($code:tt)*
})*} => (
+ $(
+ #[test]
+ $(#$attr)*
+ fn $fn_name() {
+ fn prop($($arg)*) -> $ret {
+ $($code)*
+ }
+ ::quickcheck::quickcheck(quickcheck!(@fn prop [] $($arg)*));
+ }
+ )*
+ );
+ // parse argument list (with patterns allowed) into prop as fn(_, _) -> _
+ (@fn $f:ident [$($t:tt)*]) => {
+ $f as fn($($t),*) -> _
+ };
+ (@fn $f:ident [$($p:tt)*] : $($tail:tt)*) => {
+ quickcheck!(@fn $f [$($p)* _] $($tail)*)
+ };
+ (@fn $f:ident [$($p:tt)*] $t:tt $($tail:tt)*) => {
+ quickcheck!(@fn $f [$($p)*] $($tail)*)
+ };
+}
+
+quickcheck! {
+
+ fn size_product(a: Iter<u16>, b: Iter<u16>) -> bool {
+ correct_size_hint(a.cartesian_product(b))
+ }
+ fn size_product3(a: Iter<u16>, b: Iter<u16>, c: Iter<u16>) -> bool {
+ correct_size_hint(iproduct!(a, b, c))
+ }
+
+ fn correct_cartesian_product3(a: Iter<u16>, b: Iter<u16>, c: Iter<u16>,
+ take_manual: usize) -> ()
+ {
+ // test correctness of iproduct through regular iteration (take)
+ // and through fold.
+ let ac = a.clone();
+ let br = &b.clone();
+ let cr = &c.clone();
+ let answer: Vec<_> = ac.flat_map(move |ea| br.clone().flat_map(move
|eb| cr.clone().map(move |ec| (ea, eb, ec)))).collect();
+ let mut product_iter = iproduct!(a, b, c);
+ let mut actual = Vec::new();
+
+ actual.extend((&mut product_iter).take(take_manual));
+ if actual.len() == take_manual {
+ product_iter.fold((), |(), elt| actual.push(elt));
+ }
+ assert_eq!(answer, actual);
+ }
+
+ fn size_multi_product(a: ShiftRange) -> bool {
+ correct_size_hint(a.multi_cartesian_product())
+ }
+ fn correct_multi_product3(a: ShiftRange, take_manual: usize) -> () {
+ // Fix no. of iterators at 3
+ let a = ShiftRange { iter_count: 3, ..a };
+
+ // test correctness of MultiProduct through regular iteration (take)
+ // and through fold.
+ let mut iters = a.clone();
+ let i0 = iters.next().unwrap();
+ let i1r = &iters.next().unwrap();
+ let i2r = &iters.next().unwrap();
+ let answer: Vec<_> = i0.flat_map(move |ei0| i1r.clone().flat_map(move
|ei1| i2r.clone().map(move |ei2| vec![ei0, ei1, ei2]))).collect();
+ let mut multi_product = a.clone().multi_cartesian_product();
+ let mut actual = Vec::new();
+
+ actual.extend((&mut multi_product).take(take_manual));
+ if actual.len() == take_manual {
+ multi_product.fold((), |(), elt| actual.push(elt));
+ }
+ assert_eq!(answer, actual);
+
+ assert_eq!(answer.into_iter().last(),
a.multi_cartesian_product().last());
+ }
+
+ #[allow(deprecated)]
+ fn size_step(a: Iter<i16, Exact>, s: usize) -> bool {
+ let mut s = s;
+ if s == 0 {
+ s += 1; // never zero
+ }
+ let filt = a.clone().dedup();
+ correct_size_hint(filt.step(s)) &&
+ exact_size(a.step(s))
+ }
+
+ #[allow(deprecated)]
+ fn equal_step(a: Iter<i16>, s: usize) -> bool {
+ let mut s = s;
+ if s == 0 {
+ s += 1; // never zero
+ }
+ let mut i = 0;
+ itertools::equal(a.clone().step(s), a.filter(|_| {
+ let keep = i % s == 0;
+ i += 1;
+ keep
+ }))
+ }
+
+ #[allow(deprecated)]
+ fn equal_step_vec(a: Vec<i16>, s: usize) -> bool {
+ let mut s = s;
+ if s == 0 {
+ s += 1; // never zero
+ }
+ let mut i = 0;
+ itertools::equal(a.iter().step(s), a.iter().filter(|_| {
+ let keep = i % s == 0;
+ i += 1;
+ keep
+ }))
+ }
+
+ fn size_multipeek(a: Iter<u16, Exact>, s: u8) -> bool {
+ let mut it = multipeek(a);
+ // peek a few times
+ for _ in 0..s {
+ it.peek();
+ }
+ exact_size(it)
+ }
+
+ fn size_peek_nth(a: Iter<u16, Exact>, s: u8) -> bool {
+ let mut it = peek_nth(a);
+ // peek a few times
+ for n in 0..s {
+ it.peek_nth(n as usize);
+ }
+ exact_size(it)
+ }
+
+ fn equal_merge(mut a: Vec<i16>, mut b: Vec<i16>) -> bool {
+ a.sort();
+ b.sort();
+ let mut merged = a.clone();
+ merged.extend(b.iter().cloned());
+ merged.sort();
+ itertools::equal(&merged, a.iter().merge(&b))
+ }
+ fn size_merge(a: Iter<u16>, b: Iter<u16>) -> bool {
+ correct_size_hint(a.merge(b))
+ }
+ fn size_zip(a: Iter<i16, Exact>, b: Iter<i16, Exact>, c: Iter<i16, Exact>)
-> bool {
+ let filt = a.clone().dedup();
+ correct_size_hint(multizip((filt, b.clone(), c.clone()))) &&
+ exact_size(multizip((a, b, c)))
+ }
+ fn size_zip_rc(a: Iter<i16>, b: Iter<i16>) -> bool {
+ let rc = rciter(a);
+ correct_size_hint(multizip((&rc, &rc, b)))
+ }
+
+ fn size_zip_macro(a: Iter<i16, Exact>, b: Iter<i16, Exact>, c: Iter<i16,
Exact>) -> bool {
+ let filt = a.clone().dedup();
+ correct_size_hint(izip!(filt, b.clone(), c.clone())) &&
+ exact_size(izip!(a, b, c))
+ }
+ fn equal_kmerge(mut a: Vec<i16>, mut b: Vec<i16>, mut c: Vec<i16>) -> bool
{
+ use itertools::free::kmerge;
+ a.sort();
+ b.sort();
+ c.sort();
+ let mut merged = a.clone();
+ merged.extend(b.iter().cloned());
+ merged.extend(c.iter().cloned());
+ merged.sort();
+ itertools::equal(merged.into_iter(), kmerge(vec![a, b, c]))
+ }
+
+ // Any number of input iterators
+ fn equal_kmerge_2(mut inputs: Vec<Vec<i16>>) -> bool {
+ use itertools::free::kmerge;
+ // sort the inputs
+ for input in &mut inputs {
+ input.sort();
+ }
+ let mut merged = inputs.concat();
+ merged.sort();
+ itertools::equal(merged.into_iter(), kmerge(inputs))
+ }
+
+ // Any number of input iterators
+ fn equal_kmerge_by_ge(mut inputs: Vec<Vec<i16>>) -> bool {
+ // sort the inputs
+ for input in &mut inputs {
+ input.sort();
+ input.reverse();
+ }
+ let mut merged = inputs.concat();
+ merged.sort();
+ merged.reverse();
+ itertools::equal(merged.into_iter(),
+ inputs.into_iter().kmerge_by(|x, y| x >= y))
+ }
+
+ // Any number of input iterators
+ fn equal_kmerge_by_lt(mut inputs: Vec<Vec<i16>>) -> bool {
+ // sort the inputs
+ for input in &mut inputs {
+ input.sort();
+ }
+ let mut merged = inputs.concat();
+ merged.sort();
+ itertools::equal(merged.into_iter(),
+ inputs.into_iter().kmerge_by(|x, y| x < y))
+ }
+
+ // Any number of input iterators
+ fn equal_kmerge_by_le(mut inputs: Vec<Vec<i16>>) -> bool {
+ // sort the inputs
+ for input in &mut inputs {
+ input.sort();
+ }
+ let mut merged = inputs.concat();
+ merged.sort();
+ itertools::equal(merged.into_iter(),
+ inputs.into_iter().kmerge_by(|x, y| x <= y))
+ }
+ fn size_kmerge(a: Iter<i16>, b: Iter<i16>, c: Iter<i16>) -> bool {
+ use itertools::free::kmerge;
+ correct_size_hint(kmerge(vec![a, b, c]))
+ }
+ fn equal_zip_eq(a: Vec<i32>, b: Vec<i32>) -> bool {
+ let len = std::cmp::min(a.len(), b.len());
+ let a = &a[..len];
+ let b = &b[..len];
+ itertools::equal(zip_eq(a, b), zip(a, b))
+ }
+ fn size_zip_longest(a: Iter<i16, Exact>, b: Iter<i16, Exact>) -> bool {
+ let filt = a.clone().dedup();
+ let filt2 = b.clone().dedup();
+ correct_size_hint(filt.zip_longest(b.clone())) &&
+ correct_size_hint(a.clone().zip_longest(filt2)) &&
+ exact_size(a.zip_longest(b))
+ }
+ fn size_2_zip_longest(a: Iter<i16>, b: Iter<i16>) -> bool {
+ let it = a.clone().zip_longest(b.clone());
+ let jt = a.clone().zip_longest(b.clone());
+ itertools::equal(a,
+ it.filter_map(|elt| match elt {
+ EitherOrBoth::Both(x, _) => Some(x),
+ EitherOrBoth::Left(x) => Some(x),
+ _ => None,
+ }
+ ))
+ &&
+ itertools::equal(b,
+ jt.filter_map(|elt| match elt {
+ EitherOrBoth::Both(_, y) => Some(y),
+ EitherOrBoth::Right(y) => Some(y),
+ _ => None,
+ }
+ ))
+ }
+ fn size_interleave(a: Iter<i16>, b: Iter<i16>) -> bool {
+ correct_size_hint(a.interleave(b))
+ }
+ fn exact_interleave(a: Iter<i16, Exact>, b: Iter<i16, Exact>) -> bool {
+ exact_size_for_this(a.interleave(b))
+ }
+ fn size_interleave_shortest(a: Iter<i16>, b: Iter<i16>) -> bool {
+ correct_size_hint(a.interleave_shortest(b))
+ }
+ fn exact_interleave_shortest(a: Vec<()>, b: Vec<()>) -> bool {
+ exact_size_for_this(a.iter().interleave_shortest(&b))
+ }
+ fn size_intersperse(a: Iter<i16>, x: i16) -> bool {
+ correct_size_hint(a.intersperse(x))
+ }
+ fn equal_intersperse(a: Vec<i32>, x: i32) -> bool {
+ let mut inter = false;
+ let mut i = 0;
+ for elt in a.iter().cloned().intersperse(x) {
+ if inter {
+ if elt != x { return false }
+ } else {
+ if elt != a[i] { return false }
+ i += 1;
+ }
+ inter = !inter;
+ }
+ true
+ }
+
+ fn equal_combinations_2(a: Vec<u8>) -> bool {
+ let mut v = Vec::new();
+ for (i, x) in enumerate(&a) {
+ for y in &a[i + 1..] {
+ v.push((x, y));
+ }
+ }
+ itertools::equal(a.iter().tuple_combinations::<(_, _)>(), v)
+ }
+
+ fn collect_tuple_matches_size(a: Iter<i16>) -> bool {
+ let size = a.clone().count();
+ a.collect_tuple::<(_, _, _)>().is_some() == (size == 3)
+ }
+
+ fn correct_permutations(vals: HashSet<i32>, k: usize) -> () {
+ // Test permutations only on iterators of distinct integers, to prevent
+ // false positives.
+
+ const MAX_N: usize = 5;
+
+ let n = min(vals.len(), MAX_N);
+ let vals: HashSet<i32> = vals.into_iter().take(n).collect();
+
+ let perms = vals.iter().permutations(k);
+
+ let mut actual = HashSet::new();
+
+ for perm in perms {
+ assert_eq!(perm.len(), k);
+
+ let all_items_valid = perm.iter().all(|p| vals.contains(p));
+ assert!(all_items_valid, "perm contains value not from input:
{:?}", perm);
+
+ // Check that all perm items are distinct
+ let distinct_len = {
+ let perm_set: HashSet<_> = perm.iter().collect();
+ perm_set.len()
+ };
+ assert_eq!(perm.len(), distinct_len);
+
+ // Check that the perm is new
+ assert!(actual.insert(perm.clone()), "perm already encountered:
{:?}", perm);
+ }
+ }
+
+ fn permutations_lexic_order(a: usize, b: usize) -> () {
+ let a = a % 6;
+ let b = b % 6;
+
+ let n = max(a, b);
+ let k = min (a, b);
+
+ let expected_first: Vec<usize> = (0..k).collect();
+ let expected_last: Vec<usize> = ((n - k)..n).rev().collect();
+
+ let mut perms = (0..n).permutations(k);
+
+ let mut curr_perm = match perms.next() {
+ Some(p) => p,
+ None => { return; }
+ };
+
+ assert_eq!(expected_first, curr_perm);
+
+ for next_perm in perms {
+ assert!(
+ next_perm > curr_perm,
+ "next perm isn't greater-than current; next_perm={:?}
curr_perm={:?} n={}",
+ next_perm, curr_perm, n
+ );
+
+ curr_perm = next_perm;
+ }
+
+ assert_eq!(expected_last, curr_perm);
+
+ }
+
+ fn permutations_count(n: usize, k: usize) -> bool {
+ let n = n % 6;
+
+ correct_count(|| (0..n).permutations(k))
+ }
+
+ fn permutations_size(a: Iter<i32>, k: usize) -> bool {
+ correct_size_hint(a.take(5).permutations(k))
+ }
+
+ fn permutations_k0_yields_once(n: usize) -> () {
+ let k = 0;
+ let expected: Vec<Vec<usize>> = vec![vec![]];
+ let actual = (0..n).permutations(k).collect_vec();
+
+ assert_eq!(expected, actual);
+ }
+}
+
+quickcheck! {
+ fn dedup_via_coalesce(a: Vec<i32>) -> bool {
+ let mut b = a.clone();
+ b.dedup();
+ itertools::equal(
+ &b,
+ a
+ .iter()
+ .coalesce(|x, y| {
+ if x==y {
+ Ok(x)
+ } else {
+ Err((x, y))
+ }
+ })
+ .fold(vec![], |mut v, n| {
+ v.push(n);
+ v
+ })
+ )
+ }
+}
+
+quickcheck! {
+ fn equal_dedup(a: Vec<i32>) -> bool {
+ let mut b = a.clone();
+ b.dedup();
+ itertools::equal(&b, a.iter().dedup())
+ }
+}
+
+quickcheck! {
+ fn equal_dedup_by(a: Vec<(i32, i32)>) -> bool {
+ let mut b = a.clone();
+ b.dedup_by(|x, y| x.0==y.0);
+ itertools::equal(&b, a.iter().dedup_by(|x, y| x.0==y.0))
+ }
+}
+
+quickcheck! {
+ fn size_dedup(a: Vec<i32>) -> bool {
+ correct_size_hint(a.iter().dedup())
+ }
+}
+
+quickcheck! {
+ fn size_dedup_by(a: Vec<(i32, i32)>) -> bool {
+ correct_size_hint(a.iter().dedup_by(|x, y| x.0==y.0))
+ }
+}
+
+quickcheck! {
+ fn exact_repeatn((n, x): (usize, i32)) -> bool {
+ let it = itertools::repeat_n(x, n);
+ exact_size(it)
+ }
+}
+
+quickcheck! {
+ fn size_put_back(a: Vec<u8>, x: Option<u8>) -> bool {
+ let mut it = put_back(a.into_iter());
+ match x {
+ Some(t) => it.put_back(t),
+ None => {}
+ }
+ correct_size_hint(it)
+ }
+}
+
+quickcheck! {
+ fn size_put_backn(a: Vec<u8>, b: Vec<u8>) -> bool {
+ let mut it = put_back_n(a.into_iter());
+ for elt in b {
+ it.put_back(elt)
+ }
+ correct_size_hint(it)
+ }
+}
+
+quickcheck! {
+ fn merge_join_by_ordering_vs_bool(a: Vec<u8>, b: Vec<u8>) -> bool {
+ use either::Either;
+ use itertools::free::merge_join_by;
+ let mut has_equal = false;
+ let it_ord = merge_join_by(a.clone(), b.clone(),
Ord::cmp).flat_map(|v| match v {
+ EitherOrBoth::Both(l, r) => {
+ has_equal = true;
+ vec![Either::Left(l), Either::Right(r)]
+ }
+ EitherOrBoth::Left(l) => vec![Either::Left(l)],
+ EitherOrBoth::Right(r) => vec![Either::Right(r)],
+ });
+ let it_bool = merge_join_by(a, b, PartialOrd::le);
+ itertools::equal(it_ord, it_bool) || has_equal
+ }
+ fn merge_join_by_bool_unwrapped_is_merge_by(a: Vec<u8>, b: Vec<u8>) ->
bool {
+ use either::Either;
+ use itertools::free::merge_join_by;
+ let it = a.clone().into_iter().merge_by(b.clone(), PartialOrd::ge);
+ let it_join = merge_join_by(a, b,
PartialOrd::ge).map(Either::into_inner);
+ itertools::equal(it, it_join)
+ }
+}
+
+quickcheck! {
+ fn size_tee(a: Vec<u8>) -> bool {
+ let (mut t1, mut t2) = a.iter().tee();
+ t1.next();
+ t1.next();
+ t2.next();
+ exact_size(t1) && exact_size(t2)
+ }
+}
+
+quickcheck! {
+ fn size_tee_2(a: Vec<u8>) -> bool {
+ let (mut t1, mut t2) = a.iter().dedup().tee();
+ t1.next();
+ t1.next();
+ t2.next();
+ correct_size_hint(t1) && correct_size_hint(t2)
+ }
+}
+
+quickcheck! {
+ fn size_take_while_ref(a: Vec<u8>, stop: u8) -> bool {
+ correct_size_hint(a.iter().take_while_ref(|x| **x != stop))
+ }
+}
+
+quickcheck! {
+ fn equal_partition(a: Vec<i32>) -> bool {
+ let mut a = a;
+ let mut ap = a.clone();
+ let split_index = itertools::partition(&mut ap, |x| *x >= 0);
+ let parted = (0..split_index).all(|i| ap[i] >= 0) &&
+ (split_index..a.len()).all(|i| ap[i] < 0);
+
+ a.sort();
+ ap.sort();
+ parted && (a == ap)
+ }
+}
+
+quickcheck! {
+ fn size_combinations(it: Iter<i16>) -> bool {
+ correct_size_hint(it.tuple_combinations::<(_, _)>())
+ }
+}
+
+quickcheck! {
+ fn equal_combinations(it: Iter<i16>) -> bool {
+ let values = it.clone().collect_vec();
+ let mut cmb = it.tuple_combinations();
+ for i in 0..values.len() {
+ for j in i+1..values.len() {
+ let pair = (values[i], values[j]);
+ if pair != cmb.next().unwrap() {
+ return false;
+ }
+ }
+ }
+ cmb.next() == None
+ }
+}
+
+quickcheck! {
+ fn size_pad_tail(it: Iter<i8>, pad: u8) -> bool {
+ correct_size_hint(it.clone().pad_using(pad as usize, |_| 0)) &&
+ correct_size_hint(it.dropping(1).rev().pad_using(pad as usize, |_|
0))
+ }
+}
+
+quickcheck! {
+ fn size_pad_tail2(it: Iter<i8, Exact>, pad: u8) -> bool {
+ exact_size(it.pad_using(pad as usize, |_| 0))
+ }
+}
+
+quickcheck! {
+ fn size_powerset(it: Iter<u8, Exact>) -> bool {
+ // Powerset cardinality gets large very quickly, limit input to keep
test fast.
+ correct_size_hint(it.take(12).powerset())
+ }
+}
+
+quickcheck! {
+ fn size_duplicates(it: Iter<i8>) -> bool {
+ correct_size_hint(it.duplicates())
+ }
+}
+
+quickcheck! {
+ fn size_unique(it: Iter<i8>) -> bool {
+ correct_size_hint(it.unique())
+ }
+
+ fn count_unique(it: Vec<i8>, take_first: u8) -> () {
+ let answer = {
+ let mut v = it.clone();
+ v.sort(); v.dedup();
+ v.len()
+ };
+ let mut iter = cloned(&it).unique();
+ let first_count = (&mut iter).take(take_first as usize).count();
+ let rest_count = iter.count();
+ assert_eq!(answer, first_count + rest_count);
+ }
+}
+
+quickcheck! {
+ fn fuzz_group_by_lazy_1(it: Iter<u8>) -> bool {
+ let jt = it.clone();
+ let groups = it.group_by(|k| *k);
+ itertools::equal(jt, groups.into_iter().flat_map(|(_, x)| x))
+ }
+}
+
+quickcheck! {
+ fn fuzz_group_by_lazy_2(data: Vec<u8>) -> bool {
+ let groups = data.iter().group_by(|k| *k / 10);
+ let res = itertools::equal(data.iter(),
groups.into_iter().flat_map(|(_, x)| x));
+ res
+ }
+}
+
+quickcheck! {
+ fn fuzz_group_by_lazy_3(data: Vec<u8>) -> bool {
+ let grouper = data.iter().group_by(|k| *k / 10);
+ let groups = grouper.into_iter().collect_vec();
+ let res = itertools::equal(data.iter(),
groups.into_iter().flat_map(|(_, x)| x));
+ res
+ }
+}
+
+quickcheck! {
+ fn fuzz_group_by_lazy_duo(data: Vec<u8>, order: Vec<(bool, bool)>) -> bool
{
+ let grouper = data.iter().group_by(|k| *k / 3);
+ let mut groups1 = grouper.into_iter();
+ let mut groups2 = grouper.into_iter();
+ let mut elts = Vec::<&u8>::new();
+ let mut old_groups = Vec::new();
+
+ let tup1 = |(_, b)| b;
+ for &(ord, consume_now) in &order {
+ let iter = &mut [&mut groups1, &mut groups2][ord as usize];
+ match iter.next() {
+ Some((_, gr)) => if consume_now {
+ for og in old_groups.drain(..) {
+ elts.extend(og);
+ }
+ elts.extend(gr);
+ } else {
+ old_groups.push(gr);
+ },
+ None => break,
+ }
+ }
+ for og in old_groups.drain(..) {
+ elts.extend(og);
+ }
+ for gr in groups1.map(&tup1) { elts.extend(gr); }
+ for gr in groups2.map(&tup1) { elts.extend(gr); }
+ itertools::assert_equal(&data, elts);
+ true
+ }
+}
+
+quickcheck! {
+ fn chunk_clone_equal(a: Vec<u8>, size: u8) -> () {
+ let mut size = size;
+ if size == 0 {
+ size += 1;
+ }
+ let it = a.chunks(size as usize);
+ itertools::assert_equal(it.clone(), it);
+ }
+}
+
+quickcheck! {
+ fn equal_chunks_lazy(a: Vec<u8>, size: u8) -> bool {
+ let mut size = size;
+ if size == 0 {
+ size += 1;
+ }
+ let chunks = a.iter().chunks(size as usize);
+ let it = a.chunks(size as usize);
+ for (a, b) in chunks.into_iter().zip(it) {
+ if !itertools::equal(a, b) {
+ return false;
+ }
+ }
+ true
+ }
+}
+
+// tuple iterators
+quickcheck! {
+ fn equal_circular_tuple_windows_1(a: Vec<u8>) -> bool {
+ let x = a.iter().map(|e| (e,) );
+ let y = a.iter().circular_tuple_windows::<(_,)>();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_circular_tuple_windows_2(a: Vec<u8>) -> bool {
+ let x = (0..a.len()).map(|start_idx| (
+ &a[start_idx],
+ &a[(start_idx + 1) % a.len()],
+ ));
+ let y = a.iter().circular_tuple_windows::<(_, _)>();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_circular_tuple_windows_3(a: Vec<u8>) -> bool {
+ let x = (0..a.len()).map(|start_idx| (
+ &a[start_idx],
+ &a[(start_idx + 1) % a.len()],
+ &a[(start_idx + 2) % a.len()],
+ ));
+ let y = a.iter().circular_tuple_windows::<(_, _, _)>();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_circular_tuple_windows_4(a: Vec<u8>) -> bool {
+ let x = (0..a.len()).map(|start_idx| (
+ &a[start_idx],
+ &a[(start_idx + 1) % a.len()],
+ &a[(start_idx + 2) % a.len()],
+ &a[(start_idx + 3) % a.len()],
+ ));
+ let y = a.iter().circular_tuple_windows::<(_, _, _, _)>();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_cloned_circular_tuple_windows(a: Vec<u8>) -> bool {
+ let x = a.iter().circular_tuple_windows::<(_, _, _, _)>();
+ let y = x.clone();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_cloned_circular_tuple_windows_noninitial(a: Vec<u8>) -> bool {
+ let mut x = a.iter().circular_tuple_windows::<(_, _, _, _)>();
+ let _ = x.next();
+ let y = x.clone();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_cloned_circular_tuple_windows_complete(a: Vec<u8>) -> bool {
+ let mut x = a.iter().circular_tuple_windows::<(_, _, _, _)>();
+ for _ in x.by_ref() {}
+ let y = x.clone();
+ itertools::assert_equal(x,y);
+ true
+ }
+
+ fn equal_tuple_windows_1(a: Vec<u8>) -> bool {
+ let x = a.windows(1).map(|s| (&s[0], ));
+ let y = a.iter().tuple_windows::<(_,)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuple_windows_2(a: Vec<u8>) -> bool {
+ let x = a.windows(2).map(|s| (&s[0], &s[1]));
+ let y = a.iter().tuple_windows::<(_, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuple_windows_3(a: Vec<u8>) -> bool {
+ let x = a.windows(3).map(|s| (&s[0], &s[1], &s[2]));
+ let y = a.iter().tuple_windows::<(_, _, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuple_windows_4(a: Vec<u8>) -> bool {
+ let x = a.windows(4).map(|s| (&s[0], &s[1], &s[2], &s[3]));
+ let y = a.iter().tuple_windows::<(_, _, _, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuples_1(a: Vec<u8>) -> bool {
+ let x = a.chunks(1).map(|s| (&s[0], ));
+ let y = a.iter().tuples::<(_,)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuples_2(a: Vec<u8>) -> bool {
+ let x = a.chunks(2).filter(|s| s.len() == 2).map(|s| (&s[0], &s[1]));
+ let y = a.iter().tuples::<(_, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuples_3(a: Vec<u8>) -> bool {
+ let x = a.chunks(3).filter(|s| s.len() == 3).map(|s| (&s[0], &s[1],
&s[2]));
+ let y = a.iter().tuples::<(_, _, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn equal_tuples_4(a: Vec<u8>) -> bool {
+ let x = a.chunks(4).filter(|s| s.len() == 4).map(|s| (&s[0], &s[1],
&s[2], &s[3]));
+ let y = a.iter().tuples::<(_, _, _, _)>();
+ itertools::equal(x, y)
+ }
+
+ fn exact_tuple_buffer(a: Vec<u8>) -> bool {
+ let mut iter = a.iter().tuples::<(_, _, _, _)>();
+ (&mut iter).last();
+ let buffer = iter.into_buffer();
+ assert_eq!(buffer.len(), a.len() % 4);
+ exact_size(buffer)
+ }
+}
+
+// with_position
+quickcheck! {
+ fn with_position_exact_size_1(a: Vec<u8>) -> bool {
+ exact_size_for_this(a.iter().with_position())
+ }
+ fn with_position_exact_size_2(a: Iter<u8, Exact>) -> bool {
+ exact_size_for_this(a.with_position())
+ }
+}
+
+quickcheck! {
+ fn correct_group_map_modulo_key(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let count = a.len();
+ let lookup = a.into_iter().map(|i| (i % modulo, i)).into_group_map();
+
+ assert_eq!(lookup.values().flat_map(|vals| vals.iter()).count(),
count);
+
+ for (&key, vals) in lookup.iter() {
+ assert!(vals.iter().all(|&val| val % modulo == key));
+ }
+ }
+}
+
+/// A peculiar type: Equality compares both tuple items, but ordering only the
+/// first item. This is so we can check the stability property easily.
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct Val(u32, u32);
+
+impl PartialOrd<Val> for Val {
+ fn partial_cmp(&self, other: &Val) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+}
+
+impl Ord for Val {
+ fn cmp(&self, other: &Val) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+}
+
+impl qc::Arbitrary for Val {
+ fn arbitrary<G: qc::Gen>(g: &mut G) -> Self {
+ let (x, y) = <(u32, u32)>::arbitrary(g);
+ Val(x, y)
+ }
+ fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
+ Box::new((self.0, self.1).shrink().map(|(x, y)| Val(x, y)))
+ }
+}
+
+quickcheck! {
+ fn minmax(a: Vec<Val>) -> bool {
+ use itertools::MinMaxResult;
+
+
+ let minmax = a.iter().minmax();
+ let expected = match a.len() {
+ 0 => MinMaxResult::NoElements,
+ 1 => MinMaxResult::OneElement(&a[0]),
+ _ => MinMaxResult::MinMax(a.iter().min().unwrap(),
+ a.iter().max().unwrap()),
+ };
+ minmax == expected
+ }
+}
+
+quickcheck! {
+ fn minmax_f64(a: Vec<f64>) -> TestResult {
+ use itertools::MinMaxResult;
+
+ if a.iter().any(|x| x.is_nan()) {
+ return TestResult::discard();
+ }
+
+ let min = cloned(&a).fold1(f64::min);
+ let max = cloned(&a).fold1(f64::max);
+
+ let minmax = cloned(&a).minmax();
+ let expected = match a.len() {
+ 0 => MinMaxResult::NoElements,
+ 1 => MinMaxResult::OneElement(min.unwrap()),
+ _ => MinMaxResult::MinMax(min.unwrap(), max.unwrap()),
+ };
+ TestResult::from_bool(minmax == expected)
+ }
+}
+
+quickcheck! {
+ #[allow(deprecated)]
+ fn tree_fold1_f64(mut a: Vec<f64>) -> TestResult {
+ fn collapse_adjacent<F>(x: Vec<f64>, mut f: F) -> Vec<f64>
+ where F: FnMut(f64, f64) -> f64
+ {
+ let mut out = Vec::new();
+ for i in (0..x.len()).step(2) {
+ if i == x.len()-1 {
+ out.push(x[i])
+ } else {
+ out.push(f(x[i], x[i+1]));
+ }
+ }
+ out
+ }
+
+ if a.iter().any(|x| x.is_nan()) {
+ return TestResult::discard();
+ }
+
+ let actual = a.iter().cloned().tree_fold1(f64::atan2);
+
+ while a.len() > 1 {
+ a = collapse_adjacent(a, f64::atan2);
+ }
+ let expected = a.pop();
+
+ TestResult::from_bool(actual == expected)
+ }
+}
+
+quickcheck! {
+ fn exactly_one_i32(a: Vec<i32>) -> TestResult {
+ let ret = a.iter().cloned().exactly_one();
+ match a.len() {
+ 1 => TestResult::from_bool(ret.unwrap() == a[0]),
+ _ => TestResult::from_bool(ret.unwrap_err().eq(a.iter().cloned())),
+ }
+ }
+}
+
+quickcheck! {
+ fn at_most_one_i32(a: Vec<i32>) -> TestResult {
+ let ret = a.iter().cloned().at_most_one();
+ match a.len() {
+ 0 => TestResult::from_bool(ret.unwrap() == None),
+ 1 => TestResult::from_bool(ret.unwrap() == Some(a[0])),
+ _ => TestResult::from_bool(ret.unwrap_err().eq(a.iter().cloned())),
+ }
+ }
+}
+
+quickcheck! {
+ fn consistent_grouping_map_with_by(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+
+ let lookup_grouping_map = a.iter().copied().map(|i| (i % modulo,
i)).into_grouping_map().collect::<Vec<_>>();
+ let lookup_grouping_map_by =
a.iter().copied().into_grouping_map_by(|i| i % modulo).collect::<Vec<_>>();
+
+ assert_eq!(lookup_grouping_map, lookup_grouping_map_by);
+ }
+
+ fn correct_grouping_map_by_aggregate_modulo_key(a: Vec<u8>, modulo: u8) ->
() {
+ let modulo = if modulo < 2 { 2 } else { modulo } as u64; // Avoid `% 0`
+ let lookup = a.iter()
+ .map(|&b| b as u64) // Avoid overflows
+ .into_grouping_map_by(|i| i % modulo)
+ .aggregate(|acc, &key, val| {
+ assert!(val % modulo == key);
+ if val % (modulo - 1) == 0 {
+ None
+ } else {
+ Some(acc.unwrap_or(0) + val)
+ }
+ });
+
+ let group_map_lookup = a.iter()
+ .map(|&b| b as u64)
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .filter_map(|(key, vals)| {
+ vals.into_iter().fold(None, |acc, val| {
+ if val % (modulo - 1) == 0 {
+ None
+ } else {
+ Some(acc.unwrap_or(0) + val)
+ }
+ }).map(|new_val| (key, new_val))
+ })
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for m in 0..modulo {
+ assert_eq!(
+ lookup.get(&m).copied(),
+ a.iter()
+ .map(|&b| b as u64)
+ .filter(|&val| val % modulo == m)
+ .fold(None, |acc, val| {
+ if val % (modulo - 1) == 0 {
+ None
+ } else {
+ Some(acc.unwrap_or(0) + val)
+ }
+ })
+ );
+ }
+ }
+
+ fn correct_grouping_map_by_fold_modulo_key(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo } as u64; // Avoid `%
0`
+ let lookup = a.iter().map(|&b| b as u64) // Avoid overflows
+ .into_grouping_map_by(|i| i % modulo)
+ .fold(0u64, |acc, &key, val| {
+ assert!(val % modulo == key);
+ acc + val
+ });
+
+ let group_map_lookup = a.iter()
+ .map(|&b| b as u64)
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().sum()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &sum) in lookup.iter() {
+ assert_eq!(sum, a.iter().map(|&b| b as u64).filter(|&val| val %
modulo == key).sum::<u64>());
+ }
+ }
+
+ fn correct_grouping_map_by_fold_first_modulo_key(a: Vec<u8>, modulo: u8)
-> () {
+ let modulo = if modulo == 0 { 1 } else { modulo } as u64; // Avoid `%
0`
+ let lookup = a.iter().map(|&b| b as u64) // Avoid overflows
+ .into_grouping_map_by(|i| i % modulo)
+ .fold_first(|acc, &key, val| {
+ assert!(val % modulo == key);
+ acc + val
+ });
+
+ // TODO: Swap `fold1` with stdlib's `fold_first` when it's stabilized
+ let group_map_lookup = a.iter()
+ .map(|&b| b as u64)
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().fold1(|acc, val| acc +
val).unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &sum) in lookup.iter() {
+ assert_eq!(sum, a.iter().map(|&b| b as u64).filter(|&val| val %
modulo == key).sum::<u64>());
+ }
+ }
+
+ fn correct_grouping_map_by_collect_modulo_key(a: Vec<u8>, modulo: u8) ->
() {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup_grouping_map = a.iter().copied().into_grouping_map_by(|i| i
% modulo).collect::<Vec<_>>();
+ let lookup_group_map = a.iter().copied().map(|i| (i % modulo,
i)).into_group_map();
+
+ assert_eq!(lookup_grouping_map, lookup_group_map);
+ }
+
+ fn correct_grouping_map_by_max_modulo_key(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).max();
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().max().unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &max) in lookup.iter() {
+ assert_eq!(Some(max), a.iter().copied().filter(|&val| val % modulo
== key).max());
+ }
+ }
+
+ fn correct_grouping_map_by_max_by_modulo_key(a: Vec<u8>, modulo: u8) -> ()
{
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).max_by(|_, v1, v2| v1.cmp(v2));
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().max_by(|v1, v2|
v1.cmp(v2)).unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &max) in lookup.iter() {
+ assert_eq!(Some(max), a.iter().copied().filter(|&val| val % modulo
== key).max_by(|v1, v2| v1.cmp(v2)));
+ }
+ }
+
+ fn correct_grouping_map_by_max_by_key_modulo_key(a: Vec<u8>, modulo: u8)
-> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).max_by_key(|_, &val| val);
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().max_by_key(|&val|
val).unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &max) in lookup.iter() {
+ assert_eq!(Some(max), a.iter().copied().filter(|&val| val % modulo
== key).max_by_key(|&val| val));
+ }
+ }
+
+ fn correct_grouping_map_by_min_modulo_key(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).min();
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().min().unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &min) in lookup.iter() {
+ assert_eq!(Some(min), a.iter().copied().filter(|&val| val % modulo
== key).min());
+ }
+ }
+
+ fn correct_grouping_map_by_min_by_modulo_key(a: Vec<u8>, modulo: u8) -> ()
{
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).min_by(|_, v1, v2| v1.cmp(v2));
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().min_by(|v1, v2|
v1.cmp(v2)).unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &min) in lookup.iter() {
+ assert_eq!(Some(min), a.iter().copied().filter(|&val| val % modulo
== key).min_by(|v1, v2| v1.cmp(v2)));
+ }
+ }
+
+ fn correct_grouping_map_by_min_by_key_modulo_key(a: Vec<u8>, modulo: u8)
-> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).min_by_key(|_, &val| val);
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().min_by_key(|&val|
val).unwrap()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &min) in lookup.iter() {
+ assert_eq!(Some(min), a.iter().copied().filter(|&val| val % modulo
== key).min_by_key(|&val| val));
+ }
+ }
+
+ fn correct_grouping_map_by_minmax_modulo_key(a: Vec<u8>, modulo: u8) -> ()
{
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).minmax();
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().minmax()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &minmax) in lookup.iter() {
+ assert_eq!(minmax, a.iter().copied().filter(|&val| val % modulo ==
key).minmax());
+ }
+ }
+
+ fn correct_grouping_map_by_minmax_by_modulo_key(a: Vec<u8>, modulo: u8) ->
() {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).minmax_by(|_, v1, v2| v1.cmp(v2));
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().minmax_by(|v1, v2|
v1.cmp(v2))))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &minmax) in lookup.iter() {
+ assert_eq!(minmax, a.iter().copied().filter(|&val| val % modulo ==
key).minmax_by(|v1, v2| v1.cmp(v2)));
+ }
+ }
+
+ fn correct_grouping_map_by_minmax_by_key_modulo_key(a: Vec<u8>, modulo:
u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo }; // Avoid `% 0`
+ let lookup = a.iter().copied().into_grouping_map_by(|i| i %
modulo).minmax_by_key(|_, &val| val);
+
+ let group_map_lookup = a.iter().copied()
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().minmax_by_key(|&val|
val)))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &minmax) in lookup.iter() {
+ assert_eq!(minmax, a.iter().copied().filter(|&val| val % modulo ==
key).minmax_by_key(|&val| val));
+ }
+ }
+
+ fn correct_grouping_map_by_sum_modulo_key(a: Vec<u8>, modulo: u8) -> () {
+ let modulo = if modulo == 0 { 1 } else { modulo } as u64; // Avoid `%
0`
+ let lookup = a.iter().map(|&b| b as u64) // Avoid overflows
+ .into_grouping_map_by(|i| i % modulo)
+ .sum();
+
+ let group_map_lookup = a.iter().map(|&b| b as u64)
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key, vals.into_iter().sum()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &sum) in lookup.iter() {
+ assert_eq!(sum, a.iter().map(|&b| b as u64).filter(|&val| val %
modulo == key).sum::<u64>());
+ }
+ }
+
+ fn correct_grouping_map_by_product_modulo_key(a: Vec<u8>, modulo: u8) ->
() {
+ let modulo = Wrapping(if modulo == 0 { 1 } else { modulo } as u64); //
Avoid `% 0`
+ let lookup = a.iter().map(|&b| Wrapping(b as u64)) // Avoid overflows
+ .into_grouping_map_by(|i| i % modulo)
+ .product();
+
+ let group_map_lookup = a.iter().map(|&b| Wrapping(b as u64))
+ .map(|i| (i % modulo, i))
+ .into_group_map()
+ .into_iter()
+ .map(|(key, vals)| (key,
vals.into_iter().product::<Wrapping<u64>>()))
+ .collect::<HashMap<_,_>>();
+ assert_eq!(lookup, group_map_lookup);
+
+ for (&key, &prod) in lookup.iter() {
+ assert_eq!(
+ prod,
+ a.iter()
+ .map(|&b| Wrapping(b as u64))
+ .filter(|&val| val % modulo == key)
+ .product::<Wrapping<u64>>()
+ );
+ }
+ }
+
+ // This should check that if multiple elements are equally minimum or
maximum
+ // then `max`, `min` and `minmax` pick the first minimum and the last
maximum.
+ // This is to be consistent with `std::iter::max` and `std::iter::min`.
+ fn correct_grouping_map_by_min_max_minmax_order_modulo_key() -> () {
+ use itertools::MinMaxResult;
+
+ let lookup = (0..=10)
+ .into_grouping_map_by(|_| 0)
+ .max_by(|_, _, _| Ordering::Equal);
+
+ assert_eq!(lookup[&0], 10);
+
+ let lookup = (0..=10)
+ .into_grouping_map_by(|_| 0)
+ .min_by(|_, _, _| Ordering::Equal);
+
+ assert_eq!(lookup[&0], 0);
+
+ let lookup = (0..=10)
+ .into_grouping_map_by(|_| 0)
+ .minmax_by(|_, _, _| Ordering::Equal);
+
+ assert_eq!(lookup[&0], MinMaxResult::MinMax(0, 10));
+ }
+}
+
+quickcheck! {
+ fn counts(nums: Vec<isize>) -> TestResult {
+ let counts = nums.iter().counts();
+ for (&item, &count) in counts.iter() {
+ #[allow(clippy::absurd_extreme_comparisons)]
+ if count <= 0 {
+ return TestResult::failed();
+ }
+ if count != nums.iter().filter(|&x| x == item).count() {
+ return TestResult::failed();
+ }
+ }
+ for item in nums.iter() {
+ if !counts.contains_key(item) {
+ return TestResult::failed();
+ }
+ }
+ TestResult::passed()
+ }
+}
+
+quickcheck! {
+ fn test_double_ended_zip_2(a: Vec<u8>, b: Vec<u8>) -> TestResult {
+ let mut x =
+ multizip((a.clone().into_iter(), b.clone().into_iter()))
+ .collect_vec();
+ x.reverse();
+
+ let y =
+ multizip((a.into_iter(), b.into_iter()))
+ .rfold(Vec::new(), |mut vec, e| { vec.push(e); vec });
+
+ TestResult::from_bool(itertools::equal(x, y))
+ }
+
+ fn test_double_ended_zip_3(a: Vec<u8>, b: Vec<u8>, c: Vec<u8>) ->
TestResult {
+ let mut x =
+ multizip((a.clone().into_iter(), b.clone().into_iter(),
c.clone().into_iter()))
+ .collect_vec();
+ x.reverse();
+
+ let y =
+ multizip((a.into_iter(), b.into_iter(), c.into_iter()))
+ .rfold(Vec::new(), |mut vec, e| { vec.push(e); vec });
+
+ TestResult::from_bool(itertools::equal(x, y))
+ }
+}
+
+
+fn is_fused<I: Iterator>(mut it: I) -> bool
+{
+ for _ in it.by_ref() {}
+ for _ in 0..10{
+ if it.next().is_some(){
+ return false;
+ }
+ }
+ true
+}
+
+quickcheck! {
+ fn fused_combination(a: Iter<i16>) -> bool
+ {
+ is_fused(a.clone().combinations(1)) &&
+ is_fused(a.combinations(3))
+ }
+
+ fn fused_combination_with_replacement(a: Iter<i16>) -> bool
+ {
+ is_fused(a.clone().combinations_with_replacement(1)) &&
+ is_fused(a.combinations_with_replacement(3))
+ }
+
+ fn fused_tuple_combination(a: Iter<i16>) -> bool
+ {
+ is_fused(a.clone().fuse().tuple_combinations::<(_,)>()) &&
+ is_fused(a.fuse().tuple_combinations::<(_,_,_)>())
+ }
+
+ fn fused_unique(a: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().unique())
+ }
+
+ fn fused_unique_by(a: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().unique_by(|x| x % 100))
+ }
+
+ fn fused_interleave_shortest(a: Iter<i16>, b: Iter<i16>) -> bool
+ {
+ !is_fused(a.clone().interleave_shortest(b.clone())) &&
+ is_fused(a.fuse().interleave_shortest(b.fuse()))
+ }
+
+ fn fused_product(a: Iter<i16>, b: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().cartesian_product(b.fuse()))
+ }
+
+ fn fused_merge(a: Iter<i16>, b: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().merge(b.fuse()))
+ }
+
+ fn fused_filter_ok(a: Iter<i16>) -> bool
+ {
+ is_fused(a.map(|x| if x % 2 == 0 {Ok(x)} else {Err(x)} )
+ .filter_ok(|x| x % 3 == 0)
+ .fuse())
+ }
+
+ fn fused_filter_map_ok(a: Iter<i16>) -> bool
+ {
+ is_fused(a.map(|x| if x % 2 == 0 {Ok(x)} else {Err(x)} )
+ .filter_map_ok(|x| if x % 3 == 0 {Some(x / 3)} else {None})
+ .fuse())
+ }
+
+ fn fused_positions(a: Iter<i16>) -> bool
+ {
+ !is_fused(a.clone().positions(|x|x%2==0)) &&
+ is_fused(a.fuse().positions(|x|x%2==0))
+ }
+
+ fn fused_update(a: Iter<i16>) -> bool
+ {
+ !is_fused(a.clone().update(|x|*x+=1)) &&
+ is_fused(a.fuse().update(|x|*x+=1))
+ }
+
+ fn fused_tuple_windows(a: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().tuple_windows::<(_,_)>())
+ }
+
+ fn fused_pad_using(a: Iter<i16>) -> bool
+ {
+ is_fused(a.fuse().pad_using(100,|_|0))
+ }
+}
+
+quickcheck! {
+ fn min_set_contains_min(a: Vec<(usize, char)>) -> bool {
+ let result_set = a.iter().min_set();
+ if let Some(result_element) = a.iter().min() {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+
+ fn min_set_by_contains_min(a: Vec<(usize, char)>) -> bool {
+ let compare = |x: &&(usize, char), y: &&(usize, char)| x.1.cmp(&y.1);
+ let result_set = a.iter().min_set_by(compare);
+ if let Some(result_element) = a.iter().min_by(compare) {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+
+ fn min_set_by_key_contains_min(a: Vec<(usize, char)>) -> bool {
+ let key = |x: &&(usize, char)| x.1;
+ let result_set = a.iter().min_set_by_key(&key);
+ if let Some(result_element) = a.iter().min_by_key(&key) {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+
+ fn max_set_contains_max(a: Vec<(usize, char)>) -> bool {
+ let result_set = a.iter().max_set();
+ if let Some(result_element) = a.iter().max() {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+
+ fn max_set_by_contains_max(a: Vec<(usize, char)>) -> bool {
+ let compare = |x: &&(usize, char), y: &&(usize, char)| x.1.cmp(&y.1);
+ let result_set = a.iter().max_set_by(compare);
+ if let Some(result_element) = a.iter().max_by(compare) {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+
+ fn max_set_by_key_contains_max(a: Vec<(usize, char)>) -> bool {
+ let key = |x: &&(usize, char)| x.1;
+ let result_set = a.iter().max_set_by_key(&key);
+ if let Some(result_element) = a.iter().max_by_key(&key) {
+ result_set.contains(&result_element)
+ } else {
+ result_set.is_empty()
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/specializations.rs
b/rust/hw/char/pl011/vendor/itertools/tests/specializations.rs
new file mode 100644
index 0000000000..057e11c9f6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/specializations.rs
@@ -0,0 +1,153 @@
+use itertools::Itertools;
+use std::fmt::Debug;
+use quickcheck::quickcheck;
+
+struct Unspecialized<I>(I);
+impl<I> Iterator for Unspecialized<I>
+where
+ I: Iterator,
+{
+ type Item = I::Item;
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+macro_rules! check_specialized {
+ ($src:expr, |$it:pat| $closure:expr) => {
+ let $it = $src.clone();
+ let v1 = $closure;
+
+ let $it = Unspecialized($src.clone());
+ let v2 = $closure;
+
+ assert_eq!(v1, v2);
+ }
+}
+
+fn test_specializations<IterItem, Iter>(
+ it: &Iter,
+) where
+ IterItem: Eq + Debug + Clone,
+ Iter: Iterator<Item = IterItem> + Clone,
+{
+ check_specialized!(it, |i| i.count());
+ check_specialized!(it, |i| i.last());
+ check_specialized!(it, |i| i.collect::<Vec<_>>());
+ check_specialized!(it, |i| {
+ let mut parameters_from_fold = vec![];
+ let fold_result = i.fold(vec![], |mut acc, v: IterItem| {
+ parameters_from_fold.push((acc.clone(), v.clone()));
+ acc.push(v);
+ acc
+ });
+ (parameters_from_fold, fold_result)
+ });
+ check_specialized!(it, |mut i| {
+ let mut parameters_from_all = vec![];
+ let first = i.next();
+ let all_result = i.all(|x| {
+ parameters_from_all.push(x.clone());
+ Some(x)==first
+ });
+ (parameters_from_all, all_result)
+ });
+ let size = it.clone().count();
+ for n in 0..size + 2 {
+ check_specialized!(it, |mut i| i.nth(n));
+ }
+ // size_hint is a bit harder to check
+ let mut it_sh = it.clone();
+ for n in 0..size + 2 {
+ let len = it_sh.clone().count();
+ let (min, max) = it_sh.size_hint();
+ assert_eq!(size - n.min(size), len);
+ assert!(min <= len);
+ if let Some(max) = max {
+ assert!(len <= max);
+ }
+ it_sh.next();
+ }
+}
+
+quickcheck! {
+ fn intersperse(v: Vec<u8>) -> () {
+ test_specializations(&v.into_iter().intersperse(0));
+ }
+}
+
+quickcheck! {
+ fn put_back_qc(test_vec: Vec<i32>) -> () {
+ test_specializations(&itertools::put_back(test_vec.iter()));
+ let mut pb = itertools::put_back(test_vec.into_iter());
+ pb.put_back(1);
+ test_specializations(&pb);
+ }
+}
+
+quickcheck! {
+ fn merge_join_by_qc(i1: Vec<usize>, i2: Vec<usize>) -> () {
+ test_specializations(&i1.into_iter().merge_join_by(i2.into_iter(),
std::cmp::Ord::cmp));
+ }
+}
+
+quickcheck! {
+ fn map_into(v: Vec<u8>) -> () {
+ test_specializations(&v.into_iter().map_into::<u32>());
+ }
+}
+
+quickcheck! {
+ fn map_ok(v: Vec<Result<u8, char>>) -> () {
+ test_specializations(&v.into_iter().map_ok(|u| u.checked_add(1)));
+ }
+}
+
+quickcheck! {
+ fn process_results(v: Vec<Result<u8, u8>>) -> () {
+ helper(v.iter().copied());
+ helper(v.iter().copied().filter(Result::is_ok));
+
+ fn helper(it: impl Iterator<Item = Result<u8, u8>> + Clone) {
+ macro_rules! check_results_specialized {
+ ($src:expr, |$it:pat| $closure:expr) => {
+ assert_eq!(
+ itertools::process_results($src.clone(), |$it|
$closure),
+ itertools::process_results($src.clone(), |i| {
+ let $it = Unspecialized(i);
+ $closure
+ }),
+ )
+ }
+ }
+
+ check_results_specialized!(it, |i| i.count());
+ check_results_specialized!(it, |i| i.last());
+ check_results_specialized!(it, |i| i.collect::<Vec<_>>());
+ check_results_specialized!(it, |i| {
+ let mut parameters_from_fold = vec![];
+ let fold_result = i.fold(vec![], |mut acc, v| {
+ parameters_from_fold.push((acc.clone(), v));
+ acc.push(v);
+ acc
+ });
+ (parameters_from_fold, fold_result)
+ });
+ check_results_specialized!(it, |mut i| {
+ let mut parameters_from_all = vec![];
+ let first = i.next();
+ let all_result = i.all(|x| {
+ parameters_from_all.push(x);
+ Some(x)==first
+ });
+ (parameters_from_all, all_result)
+ });
+ let size = it.clone().count();
+ for n in 0..size + 2 {
+ check_results_specialized!(it, |mut i| i.nth(n));
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/test_core.rs
b/rust/hw/char/pl011/vendor/itertools/tests/test_core.rs
new file mode 100644
index 0000000000..df94eb665f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/test_core.rs
@@ -0,0 +1,317 @@
+//! Licensed under the Apache License, Version 2.0
+//! https://www.apache.org/licenses/LICENSE-2.0 or the MIT license
+//! https://opensource.org/licenses/MIT, at your
+//! option. This file may not be copied, modified, or distributed
+//! except according to those terms.
+#![no_std]
+
+use core::iter;
+use itertools as it;
+use crate::it::Itertools;
+use crate::it::interleave;
+use crate::it::intersperse;
+use crate::it::intersperse_with;
+use crate::it::multizip;
+use crate::it::free::put_back;
+use crate::it::iproduct;
+use crate::it::izip;
+use crate::it::chain;
+
+#[test]
+fn product2() {
+ let s = "αβ";
+
+ let mut prod = iproduct!(s.chars(), 0..2);
+ assert!(prod.next() == Some(('α', 0)));
+ assert!(prod.next() == Some(('α', 1)));
+ assert!(prod.next() == Some(('β', 0)));
+ assert!(prod.next() == Some(('β', 1)));
+ assert!(prod.next() == None);
+}
+
+#[test]
+fn product_temporary() {
+ for (_x, _y, _z) in iproduct!(
+ [0, 1, 2].iter().cloned(),
+ [0, 1, 2].iter().cloned(),
+ [0, 1, 2].iter().cloned())
+ {
+ // ok
+ }
+}
+
+
+#[test]
+fn izip_macro() {
+ let mut zip = izip!(2..3);
+ assert!(zip.next() == Some(2));
+ assert!(zip.next().is_none());
+
+ let mut zip = izip!(0..3, 0..2, 0..2i8);
+ for i in 0..2 {
+ assert!((i as usize, i, i as i8) == zip.next().unwrap());
+ }
+ assert!(zip.next().is_none());
+
+ let xs: [isize; 0] = [];
+ let mut zip = izip!(0..3, 0..2, 0..2i8, &xs);
+ assert!(zip.next().is_none());
+}
+
+#[test]
+fn izip2() {
+ let _zip1: iter::Zip<_, _> = izip!(1.., 2..);
+ let _zip2: iter::Zip<_, _> = izip!(1.., 2.., );
+}
+
+#[test]
+fn izip3() {
+ let mut zip: iter::Map<iter::Zip<_, _>, _> = izip!(0..3, 0..2, 0..2i8);
+ for i in 0..2 {
+ assert!((i as usize, i, i as i8) == zip.next().unwrap());
+ }
+ assert!(zip.next().is_none());
+}
+
+#[test]
+fn multizip3() {
+ let mut zip = multizip((0..3, 0..2, 0..2i8));
+ for i in 0..2 {
+ assert!((i as usize, i, i as i8) == zip.next().unwrap());
+ }
+ assert!(zip.next().is_none());
+
+ let xs: [isize; 0] = [];
+ let mut zip = multizip((0..3, 0..2, 0..2i8, xs.iter()));
+ assert!(zip.next().is_none());
+
+ for (_, _, _, _, _) in multizip((0..3, 0..2, xs.iter(), &xs, xs.to_vec()))
{
+ /* test compiles */
+ }
+}
+
+#[test]
+fn chain_macro() {
+ let mut chain = chain!(2..3);
+ assert!(chain.next() == Some(2));
+ assert!(chain.next().is_none());
+
+ let mut chain = chain!(0..2, 2..3, 3..5i8);
+ for i in 0..5i8 {
+ assert_eq!(Some(i), chain.next());
+ }
+ assert!(chain.next().is_none());
+
+ let mut chain = chain!();
+ assert_eq!(chain.next(), Option::<()>::None);
+}
+
+#[test]
+fn chain2() {
+ let _ = chain!(1.., 2..);
+ let _ = chain!(1.., 2.., );
+}
+
+#[test]
+fn write_to() {
+ let xs = [7, 9, 8];
+ let mut ys = [0; 5];
+ let cnt = ys.iter_mut().set_from(xs.iter().copied());
+ assert!(cnt == xs.len());
+ assert!(ys == [7, 9, 8, 0, 0]);
+
+ let cnt = ys.iter_mut().set_from(0..10);
+ assert!(cnt == ys.len());
+ assert!(ys == [0, 1, 2, 3, 4]);
+}
+
+#[test]
+fn test_interleave() {
+ let xs: [u8; 0] = [];
+ let ys = [7u8, 9, 8, 10];
+ let zs = [2u8, 77];
+ let it = interleave(xs.iter(), ys.iter());
+ it::assert_equal(it, ys.iter());
+
+ let rs = [7u8, 2, 9, 77, 8, 10];
+ let it = interleave(ys.iter(), zs.iter());
+ it::assert_equal(it, rs.iter());
+}
+
+#[test]
+fn test_intersperse() {
+ let xs = [1u8, 2, 3];
+ let ys = [1u8, 0, 2, 0, 3];
+ let it = intersperse(&xs, &0);
+ it::assert_equal(it, ys.iter());
+}
+
+#[test]
+fn test_intersperse_with() {
+ let xs = [1u8, 2, 3];
+ let ys = [1u8, 10, 2, 10, 3];
+ let i = 10;
+ let it = intersperse_with(&xs, || &i);
+ it::assert_equal(it, ys.iter());
+}
+
+#[allow(deprecated)]
+#[test]
+fn foreach() {
+ let xs = [1i32, 2, 3];
+ let mut sum = 0;
+ xs.iter().foreach(|elt| sum += *elt);
+ assert!(sum == 6);
+}
+
+#[test]
+fn dropping() {
+ let xs = [1, 2, 3];
+ let mut it = xs.iter().dropping(2);
+ assert_eq!(it.next(), Some(&3));
+ assert!(it.next().is_none());
+ let mut it = xs.iter().dropping(5);
+ assert!(it.next().is_none());
+}
+
+#[test]
+fn batching() {
+ let xs = [0, 1, 2, 1, 3];
+ let ys = [(0, 1), (2, 1)];
+
+ // An iterator that gathers elements up in pairs
+ let pit = xs
+ .iter()
+ .cloned()
+ .batching(|it| it.next().and_then(|x| it.next().map(|y| (x, y))));
+ it::assert_equal(pit, ys.iter().cloned());
+}
+
+#[test]
+fn test_put_back() {
+ let xs = [0, 1, 1, 1, 2, 1, 3, 3];
+ let mut pb = put_back(xs.iter().cloned());
+ pb.next();
+ pb.put_back(1);
+ pb.put_back(0);
+ it::assert_equal(pb, xs.iter().cloned());
+}
+
+#[allow(deprecated)]
+#[test]
+fn step() {
+ it::assert_equal((0..10).step(1), 0..10);
+ it::assert_equal((0..10).step(2), (0..10).filter(|x: &i32| *x % 2 == 0));
+ it::assert_equal((0..10).step(10), 0..1);
+}
+
+#[allow(deprecated)]
+#[test]
+fn merge() {
+ it::assert_equal((0..10).step(2).merge((1..10).step(2)), 0..10);
+}
+
+
+#[test]
+fn repeatn() {
+ let s = "α";
+ let mut it = it::repeat_n(s, 3);
+ assert_eq!(it.len(), 3);
+ assert_eq!(it.next(), Some(s));
+ assert_eq!(it.next(), Some(s));
+ assert_eq!(it.next(), Some(s));
+ assert_eq!(it.next(), None);
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn count_clones() {
+ // Check that RepeatN only clones N - 1 times.
+
+ use core::cell::Cell;
+ #[derive(PartialEq, Debug)]
+ struct Foo {
+ n: Cell<usize>
+ }
+
+ impl Clone for Foo
+ {
+ fn clone(&self) -> Self
+ {
+ let n = self.n.get();
+ self.n.set(n + 1);
+ Foo { n: Cell::new(n + 1) }
+ }
+ }
+
+
+ for n in 0..10 {
+ let f = Foo{n: Cell::new(0)};
+ let it = it::repeat_n(f, n);
+ // drain it
+ let last = it.last();
+ if n == 0 {
+ assert_eq!(last, None);
+ } else {
+ assert_eq!(last, Some(Foo{n: Cell::new(n - 1)}));
+ }
+ }
+}
+
+#[test]
+fn part() {
+ let mut data = [7, 1, 1, 9, 1, 1, 3];
+ let i = it::partition(&mut data, |elt| *elt >= 3);
+ assert_eq!(i, 3);
+ assert_eq!(data, [7, 3, 9, 1, 1, 1, 1]);
+
+ let i = it::partition(&mut data, |elt| *elt == 1);
+ assert_eq!(i, 4);
+ assert_eq!(data, [1, 1, 1, 1, 9, 3, 7]);
+
+ let mut data = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let i = it::partition(&mut data, |elt| *elt % 3 == 0);
+ assert_eq!(i, 3);
+ assert_eq!(data, [9, 6, 3, 4, 5, 2, 7, 8, 1]);
+}
+
+#[test]
+fn tree_fold1() {
+ for i in 0..100 {
+ assert_eq!((0..i).tree_fold1(|x, y| x + y), (0..i).fold1(|x, y| x +
y));
+ }
+}
+
+#[test]
+fn exactly_one() {
+ assert_eq!((0..10).filter(|&x| x == 2).exactly_one().unwrap(), 2);
+ assert!((0..10).filter(|&x| x > 1 && x <
4).exactly_one().unwrap_err().eq(2..4));
+ assert!((0..10).filter(|&x| x > 1 && x <
5).exactly_one().unwrap_err().eq(2..5));
+ assert!((0..10).filter(|&_| false).exactly_one().unwrap_err().eq(0..0));
+}
+
+#[test]
+fn at_most_one() {
+ assert_eq!((0..10).filter(|&x| x == 2).at_most_one().unwrap(), Some(2));
+ assert!((0..10).filter(|&x| x > 1 && x <
4).at_most_one().unwrap_err().eq(2..4));
+ assert!((0..10).filter(|&x| x > 1 && x <
5).at_most_one().unwrap_err().eq(2..5));
+ assert_eq!((0..10).filter(|&_| false).at_most_one().unwrap(), None);
+}
+
+#[test]
+fn sum1() {
+ let v: &[i32] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ assert_eq!(v[..0].iter().cloned().sum1::<i32>(), None);
+ assert_eq!(v[1..2].iter().cloned().sum1::<i32>(), Some(1));
+ assert_eq!(v[1..3].iter().cloned().sum1::<i32>(), Some(3));
+ assert_eq!(v.iter().cloned().sum1::<i32>(), Some(55));
+}
+
+#[test]
+fn product1() {
+ let v: &[i32] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ assert_eq!(v[..0].iter().cloned().product1::<i32>(), None);
+ assert_eq!(v[..1].iter().cloned().product1::<i32>(), Some(0));
+ assert_eq!(v[1..3].iter().cloned().product1::<i32>(), Some(2));
+ assert_eq!(v[1..5].iter().cloned().product1::<i32>(), Some(24));
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/test_std.rs
b/rust/hw/char/pl011/vendor/itertools/tests/test_std.rs
new file mode 100644
index 0000000000..77207d87e3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/test_std.rs
@@ -0,0 +1,1184 @@
+use quickcheck as qc;
+use rand::{distributions::{Distribution, Standard}, Rng, SeedableRng,
rngs::StdRng};
+use rand::{seq::SliceRandom, thread_rng};
+use std::{cmp::min, fmt::Debug, marker::PhantomData};
+use itertools as it;
+use crate::it::Itertools;
+use crate::it::ExactlyOneError;
+use crate::it::multizip;
+use crate::it::multipeek;
+use crate::it::peek_nth;
+use crate::it::free::rciter;
+use crate::it::free::put_back_n;
+use crate::it::FoldWhile;
+use crate::it::cloned;
+use crate::it::iproduct;
+use crate::it::izip;
+
+#[test]
+fn product3() {
+ let prod = iproduct!(0..3, 0..2, 0..2);
+ assert_eq!(prod.size_hint(), (12, Some(12)));
+ let v = prod.collect_vec();
+ for i in 0..3 {
+ for j in 0..2 {
+ for k in 0..2 {
+ assert!((i, j, k) == v[(i * 2 * 2 + j * 2 + k) as usize]);
+ }
+ }
+ }
+ for (_, _, _, _) in iproduct!(0..3, 0..2, 0..2, 0..3) {
+ /* test compiles */
+ }
+}
+
+#[test]
+fn interleave_shortest() {
+ let v0: Vec<i32> = vec![0, 2, 4];
+ let v1: Vec<i32> = vec![1, 3, 5, 7];
+ let it = v0.into_iter().interleave_shortest(v1.into_iter());
+ assert_eq!(it.size_hint(), (6, Some(6)));
+ assert_eq!(it.collect_vec(), vec![0, 1, 2, 3, 4, 5]);
+
+ let v0: Vec<i32> = vec![0, 2, 4, 6, 8];
+ let v1: Vec<i32> = vec![1, 3, 5];
+ let it = v0.into_iter().interleave_shortest(v1.into_iter());
+ assert_eq!(it.size_hint(), (7, Some(7)));
+ assert_eq!(it.collect_vec(), vec![0, 1, 2, 3, 4, 5, 6]);
+
+ let i0 = ::std::iter::repeat(0);
+ let v1: Vec<_> = vec![1, 3, 5];
+ let it = i0.interleave_shortest(v1.into_iter());
+ assert_eq!(it.size_hint(), (7, Some(7)));
+
+ let v0: Vec<_> = vec![0, 2, 4];
+ let i1 = ::std::iter::repeat(1);
+ let it = v0.into_iter().interleave_shortest(i1);
+ assert_eq!(it.size_hint(), (6, Some(6)));
+}
+
+#[test]
+fn duplicates_by() {
+ let xs = ["aaa", "bbbbb", "aa", "ccc", "bbbb", "aaaaa", "cccc"];
+ let ys = ["aa", "bbbb", "cccc"];
+ it::assert_equal(ys.iter(), xs.iter().duplicates_by(|x|
x[..2].to_string()));
+ it::assert_equal(ys.iter(), xs.iter().rev().duplicates_by(|x|
x[..2].to_string()).rev());
+ let ys_rev = ["ccc", "aa", "bbbbb"];
+ it::assert_equal(ys_rev.iter(), xs.iter().duplicates_by(|x|
x[..2].to_string()).rev());
+}
+
+#[test]
+fn duplicates() {
+ let xs = [0, 1, 2, 3, 2, 1, 3];
+ let ys = [2, 1, 3];
+ it::assert_equal(ys.iter(), xs.iter().duplicates());
+ it::assert_equal(ys.iter(), xs.iter().rev().duplicates().rev());
+ let ys_rev = [3, 2, 1];
+ it::assert_equal(ys_rev.iter(), xs.iter().duplicates().rev());
+
+ let xs = [0, 1, 0, 1];
+ let ys = [0, 1];
+ it::assert_equal(ys.iter(), xs.iter().duplicates());
+ it::assert_equal(ys.iter(), xs.iter().rev().duplicates().rev());
+ let ys_rev = [1, 0];
+ it::assert_equal(ys_rev.iter(), xs.iter().duplicates().rev());
+
+ let xs = vec![0, 1, 2, 1, 2];
+ let ys = vec![1, 2];
+ assert_eq!(ys, xs.iter().duplicates().cloned().collect_vec());
+ assert_eq!(ys, xs.iter().rev().duplicates().rev().cloned().collect_vec());
+ let ys_rev = vec![2, 1];
+ assert_eq!(ys_rev, xs.iter().duplicates().rev().cloned().collect_vec());
+}
+
+#[test]
+fn unique_by() {
+ let xs = ["aaa", "bbbbb", "aa", "ccc", "bbbb", "aaaaa", "cccc"];
+ let ys = ["aaa", "bbbbb", "ccc"];
+ it::assert_equal(ys.iter(), xs.iter().unique_by(|x| x[..2].to_string()));
+ it::assert_equal(ys.iter(), xs.iter().rev().unique_by(|x|
x[..2].to_string()).rev());
+ let ys_rev = ["cccc", "aaaaa", "bbbb"];
+ it::assert_equal(ys_rev.iter(), xs.iter().unique_by(|x|
x[..2].to_string()).rev());
+}
+
+#[test]
+fn unique() {
+ let xs = [0, 1, 2, 3, 2, 1, 3];
+ let ys = [0, 1, 2, 3];
+ it::assert_equal(ys.iter(), xs.iter().unique());
+ it::assert_equal(ys.iter(), xs.iter().rev().unique().rev());
+ let ys_rev = [3, 1, 2, 0];
+ it::assert_equal(ys_rev.iter(), xs.iter().unique().rev());
+
+ let xs = [0, 1];
+ let ys = [0, 1];
+ it::assert_equal(ys.iter(), xs.iter().unique());
+ it::assert_equal(ys.iter(), xs.iter().rev().unique().rev());
+ let ys_rev = [1, 0];
+ it::assert_equal(ys_rev.iter(), xs.iter().unique().rev());
+}
+
+#[test]
+fn intersperse() {
+ let xs = ["a", "", "b", "c"];
+ let v: Vec<&str> = xs.iter().cloned().intersperse(", ").collect();
+ let text: String = v.concat();
+ assert_eq!(text, "a, , b, c".to_string());
+
+ let ys = [0, 1, 2, 3];
+ let mut it = ys[..0].iter().copied().intersperse(1);
+ assert!(it.next() == None);
+}
+
+#[test]
+fn dedup() {
+ let xs = [0, 1, 1, 1, 2, 1, 3, 3];
+ let ys = [0, 1, 2, 1, 3];
+ it::assert_equal(ys.iter(), xs.iter().dedup());
+ let xs = [0, 0, 0, 0, 0];
+ let ys = [0];
+ it::assert_equal(ys.iter(), xs.iter().dedup());
+
+ let xs = [0, 1, 1, 1, 2, 1, 3, 3];
+ let ys = [0, 1, 2, 1, 3];
+ let mut xs_d = Vec::new();
+ xs.iter().dedup().fold((), |(), &elt| xs_d.push(elt));
+ assert_eq!(&xs_d, &ys);
+}
+
+#[test]
+fn coalesce() {
+ let data = vec![-1., -2., -3., 3., 1., 0., -1.];
+ let it = data.iter().cloned().coalesce(|x, y|
+ if (x >= 0.) == (y >= 0.) {
+ Ok(x + y)
+ } else {
+ Err((x, y))
+ }
+ );
+ itertools::assert_equal(it.clone(), vec![-6., 4., -1.]);
+ assert_eq!(
+ it.fold(vec![], |mut v, n| {
+ v.push(n);
+ v
+ }),
+ vec![-6., 4., -1.]
+ );
+}
+
+#[test]
+fn dedup_by() {
+ let xs = [(0, 0), (0, 1), (1, 1), (2, 1), (0, 2), (3, 1), (0, 3), (1, 3)];
+ let ys = [(0, 0), (0, 1), (0, 2), (3, 1), (0, 3)];
+ it::assert_equal(ys.iter(), xs.iter().dedup_by(|x, y| x.1==y.1));
+ let xs = [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)];
+ let ys = [(0, 1)];
+ it::assert_equal(ys.iter(), xs.iter().dedup_by(|x, y| x.0==y.0));
+
+ let xs = [(0, 0), (0, 1), (1, 1), (2, 1), (0, 2), (3, 1), (0, 3), (1, 3)];
+ let ys = [(0, 0), (0, 1), (0, 2), (3, 1), (0, 3)];
+ let mut xs_d = Vec::new();
+ xs.iter().dedup_by(|x, y| x.1==y.1).fold((), |(), &elt| xs_d.push(elt));
+ assert_eq!(&xs_d, &ys);
+}
+
+#[test]
+fn dedup_with_count() {
+ let xs: [i32; 8] = [0, 1, 1, 1, 2, 1, 3, 3];
+ let ys: [(usize, &i32); 5] = [(1, &0), (3, &1), (1, &2), (1, &1), (2, &3)];
+
+ it::assert_equal(ys.iter().cloned(), xs.iter().dedup_with_count());
+
+ let xs: [i32; 5] = [0, 0, 0, 0, 0];
+ let ys: [(usize, &i32); 1] = [(5, &0)];
+
+ it::assert_equal(ys.iter().cloned(), xs.iter().dedup_with_count());
+}
+
+
+#[test]
+fn dedup_by_with_count() {
+ let xs = [(0, 0), (0, 1), (1, 1), (2, 1), (0, 2), (3, 1), (0, 3), (1, 3)];
+ let ys = [(1, &(0, 0)), (3, &(0, 1)), (1, &(0, 2)), (1, &(3, 1)), (2, &(0,
3))];
+
+ it::assert_equal(ys.iter().cloned(), xs.iter().dedup_by_with_count(|x, y|
x.1==y.1));
+
+ let xs = [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)];
+ let ys = [( 5, &(0, 1))];
+
+ it::assert_equal(ys.iter().cloned(), xs.iter().dedup_by_with_count(|x, y|
x.0==y.0));
+}
+
+#[test]
+fn all_equal() {
+ assert!("".chars().all_equal());
+ assert!("A".chars().all_equal());
+ assert!(!"AABBCCC".chars().all_equal());
+ assert!("AAAAAAA".chars().all_equal());
+ for (_key, mut sub) in &"AABBCCC".chars().group_by(|&x| x) {
+ assert!(sub.all_equal());
+ }
+}
+
+#[test]
+fn all_equal_value() {
+ assert_eq!("".chars().all_equal_value(), Err(None));
+ assert_eq!("A".chars().all_equal_value(), Ok('A'));
+ assert_eq!("AABBCCC".chars().all_equal_value(), Err(Some(('A', 'B'))));
+ assert_eq!("AAAAAAA".chars().all_equal_value(), Ok('A'));
+ {
+ let mut it = [1,2,3].iter().copied();
+ let result = it.all_equal_value();
+ assert_eq!(result, Err(Some((1, 2))));
+ let remaining = it.next();
+ assert_eq!(remaining, Some(3));
+ assert!(it.next().is_none());
+ }
+}
+
+#[test]
+fn all_unique() {
+ assert!("ABCDEFGH".chars().all_unique());
+ assert!(!"ABCDEFGA".chars().all_unique());
+ assert!(::std::iter::empty::<usize>().all_unique());
+}
+
+#[test]
+fn test_put_back_n() {
+ let xs = [0, 1, 1, 1, 2, 1, 3, 3];
+ let mut pb = put_back_n(xs.iter().cloned());
+ pb.next();
+ pb.next();
+ pb.put_back(1);
+ pb.put_back(0);
+ it::assert_equal(pb, xs.iter().cloned());
+}
+
+#[test]
+fn tee() {
+ let xs = [0, 1, 2, 3];
+ let (mut t1, mut t2) = xs.iter().cloned().tee();
+ assert_eq!(t1.next(), Some(0));
+ assert_eq!(t2.next(), Some(0));
+ assert_eq!(t1.next(), Some(1));
+ assert_eq!(t1.next(), Some(2));
+ assert_eq!(t1.next(), Some(3));
+ assert_eq!(t1.next(), None);
+ assert_eq!(t2.next(), Some(1));
+ assert_eq!(t2.next(), Some(2));
+ assert_eq!(t1.next(), None);
+ assert_eq!(t2.next(), Some(3));
+ assert_eq!(t2.next(), None);
+ assert_eq!(t1.next(), None);
+ assert_eq!(t2.next(), None);
+
+ let (t1, t2) = xs.iter().cloned().tee();
+ it::assert_equal(t1, xs.iter().cloned());
+ it::assert_equal(t2, xs.iter().cloned());
+
+ let (t1, t2) = xs.iter().cloned().tee();
+ it::assert_equal(t1.zip(t2), xs.iter().cloned().zip(xs.iter().cloned()));
+}
+
+
+#[test]
+fn test_rciter() {
+ let xs = [0, 1, 1, 1, 2, 1, 3, 5, 6];
+
+ let mut r1 = rciter(xs.iter().cloned());
+ let mut r2 = r1.clone();
+ assert_eq!(r1.next(), Some(0));
+ assert_eq!(r2.next(), Some(1));
+ let mut z = r1.zip(r2);
+ assert_eq!(z.next(), Some((1, 1)));
+ assert_eq!(z.next(), Some((2, 1)));
+ assert_eq!(z.next(), Some((3, 5)));
+ assert_eq!(z.next(), None);
+
+ // test intoiterator
+ let r1 = rciter(0..5);
+ let mut z = izip!(&r1, r1);
+ assert_eq!(z.next(), Some((0, 1)));
+}
+
+#[allow(deprecated)]
+#[test]
+fn trait_pointers() {
+ struct ByRef<'r, I: ?Sized>(&'r mut I) ;
+
+ impl<'r, X, I: ?Sized> Iterator for ByRef<'r, I> where
+ I: 'r + Iterator<Item=X>
+ {
+ type Item = X;
+ fn next(&mut self) -> Option<Self::Item>
+ {
+ self.0.next()
+ }
+ }
+
+ let mut it = Box::new(0..10) as Box<dyn Iterator<Item=i32>>;
+ assert_eq!(it.next(), Some(0));
+
+ {
+ /* make sure foreach works on non-Sized */
+ let jt: &mut dyn Iterator<Item = i32> = &mut *it;
+ assert_eq!(jt.next(), Some(1));
+
+ {
+ let mut r = ByRef(jt);
+ assert_eq!(r.next(), Some(2));
+ }
+
+ assert_eq!(jt.find_position(|x| *x == 4), Some((1, 4)));
+ jt.foreach(|_| ());
+ }
+}
+
+#[test]
+fn merge_by() {
+ let odd : Vec<(u32, &str)> = vec![(1, "hello"), (3, "world"), (5, "!")];
+ let even = vec![(2, "foo"), (4, "bar"), (6, "baz")];
+ let expected = vec![(1, "hello"), (2, "foo"), (3, "world"), (4, "bar"),
(5, "!"), (6, "baz")];
+ let results = odd.iter().merge_by(even.iter(), |a, b| a.0 <= b.0);
+ it::assert_equal(results, expected.iter());
+}
+
+#[test]
+fn merge_by_btree() {
+ use std::collections::BTreeMap;
+ let mut bt1 = BTreeMap::new();
+ bt1.insert("hello", 1);
+ bt1.insert("world", 3);
+ let mut bt2 = BTreeMap::new();
+ bt2.insert("foo", 2);
+ bt2.insert("bar", 4);
+ let results = bt1.into_iter().merge_by(bt2.into_iter(), |a, b| a.0 <= b.0
);
+ let expected = vec![("bar", 4), ("foo", 2), ("hello", 1), ("world", 3)];
+ it::assert_equal(results, expected.into_iter());
+}
+
+#[allow(deprecated)]
+#[test]
+fn kmerge() {
+ let its = (0..4).map(|s| (s..10).step(4));
+
+ it::assert_equal(its.kmerge(), 0..10);
+}
+
+#[allow(deprecated)]
+#[test]
+fn kmerge_2() {
+ let its = vec![3, 2, 1, 0].into_iter().map(|s| (s..10).step(4));
+
+ it::assert_equal(its.kmerge(), 0..10);
+}
+
+#[test]
+fn kmerge_empty() {
+ let its = (0..4).map(|_| 0..0);
+ assert_eq!(its.kmerge().next(), None);
+}
+
+#[test]
+fn kmerge_size_hint() {
+ let its = (0..5).map(|_| (0..10));
+ assert_eq!(its.kmerge().size_hint(), (50, Some(50)));
+}
+
+#[test]
+fn kmerge_empty_size_hint() {
+ let its = (0..5).map(|_| (0..0));
+ assert_eq!(its.kmerge().size_hint(), (0, Some(0)));
+}
+
+#[test]
+fn join() {
+ let many = [1, 2, 3];
+ let one = [1];
+ let none: Vec<i32> = vec![];
+
+ assert_eq!(many.iter().join(", "), "1, 2, 3");
+ assert_eq!( one.iter().join(", "), "1");
+ assert_eq!(none.iter().join(", "), "");
+}
+
+#[test]
+fn sorted_unstable_by() {
+ let sc = [3, 4, 1, 2].iter().cloned().sorted_by(|&a, &b| {
+ a.cmp(&b)
+ });
+ it::assert_equal(sc, vec![1, 2, 3, 4]);
+
+ let v = (0..5).sorted_unstable_by(|&a, &b| a.cmp(&b).reverse());
+ it::assert_equal(v, vec![4, 3, 2, 1, 0]);
+}
+
+#[test]
+fn sorted_unstable_by_key() {
+ let sc = [3, 4, 1, 2].iter().cloned().sorted_unstable_by_key(|&x| x);
+ it::assert_equal(sc, vec![1, 2, 3, 4]);
+
+ let v = (0..5).sorted_unstable_by_key(|&x| -x);
+ it::assert_equal(v, vec![4, 3, 2, 1, 0]);
+}
+
+#[test]
+fn sorted_by() {
+ let sc = [3, 4, 1, 2].iter().cloned().sorted_by(|&a, &b| {
+ a.cmp(&b)
+ });
+ it::assert_equal(sc, vec![1, 2, 3, 4]);
+
+ let v = (0..5).sorted_by(|&a, &b| a.cmp(&b).reverse());
+ it::assert_equal(v, vec![4, 3, 2, 1, 0]);
+}
+
+qc::quickcheck! {
+ fn k_smallest_range(n: u64, m: u16, k: u16) -> () {
+ // u16 is used to constrain k and m to 0..2¹⁶,
+ // otherwise the test could use too much memory.
+ let (k, m) = (k as u64, m as u64);
+
+ // Generate a random permutation of n..n+m
+ let i = {
+ let mut v: Vec<u64> = (n..n.saturating_add(m)).collect();
+ v.shuffle(&mut thread_rng());
+ v.into_iter()
+ };
+
+ // Check that taking the k smallest elements yields n..n+min(k, m)
+ it::assert_equal(
+ i.k_smallest(k as usize),
+ n..n.saturating_add(min(k, m))
+ );
+ }
+}
+
+#[derive(Clone, Debug)]
+struct RandIter<T: 'static + Clone + Send, R: 'static + Clone + Rng +
SeedableRng + Send = StdRng> {
+ idx: usize,
+ len: usize,
+ rng: R,
+ _t: PhantomData<T>
+}
+
+impl<T: Clone + Send, R: Clone + Rng + SeedableRng + Send> Iterator for
RandIter<T, R>
+where Standard: Distribution<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<T> {
+ if self.idx == self.len {
+ None
+ } else {
+ self.idx += 1;
+ Some(self.rng.gen())
+ }
+ }
+}
+
+impl<T: Clone + Send, R: Clone + Rng + SeedableRng + Send> qc::Arbitrary for
RandIter<T, R> {
+ fn arbitrary<G: qc::Gen>(g: &mut G) -> Self {
+ RandIter {
+ idx: 0,
+ len: g.size(),
+ rng: R::seed_from_u64(g.next_u64()),
+ _t : PhantomData{},
+ }
+ }
+}
+
+// Check that taking the k smallest is the same as
+// sorting then taking the k first elements
+fn k_smallest_sort<I>(i: I, k: u16)
+where
+ I: Iterator + Clone,
+ I::Item: Ord + Debug,
+{
+ let j = i.clone();
+ let k = k as usize;
+ it::assert_equal(
+ i.k_smallest(k),
+ j.sorted().take(k)
+ )
+}
+
+macro_rules! generic_test {
+ ($f:ident, $($t:ty),+) => {
+ $(paste::item! {
+ qc::quickcheck! {
+ fn [< $f _ $t >](i: RandIter<$t>, k: u16) -> () {
+ $f(i, k)
+ }
+ }
+ })+
+ };
+}
+
+generic_test!(k_smallest_sort, u8, u16, u32, u64, i8, i16, i32, i64);
+
+#[test]
+fn sorted_by_key() {
+ let sc = [3, 4, 1, 2].iter().cloned().sorted_by_key(|&x| x);
+ it::assert_equal(sc, vec![1, 2, 3, 4]);
+
+ let v = (0..5).sorted_by_key(|&x| -x);
+ it::assert_equal(v, vec![4, 3, 2, 1, 0]);
+}
+
+#[test]
+fn sorted_by_cached_key() {
+ // Track calls to key function
+ let mut ncalls = 0;
+
+ let sorted = [3, 4, 1, 2].iter().cloned().sorted_by_cached_key(|&x| {
+ ncalls += 1;
+ x.to_string()
+ });
+ it::assert_equal(sorted, vec![1, 2, 3, 4]);
+ // Check key function called once per element
+ assert_eq!(ncalls, 4);
+
+ let mut ncalls = 0;
+
+ let sorted = (0..5).sorted_by_cached_key(|&x| {
+ ncalls += 1;
+ -x
+ });
+ it::assert_equal(sorted, vec![4, 3, 2, 1, 0]);
+ // Check key function called once per element
+ assert_eq!(ncalls, 5);
+}
+
+#[test]
+fn test_multipeek() {
+ let nums = vec![1u8,2,3,4,5];
+
+ let mp = multipeek(nums.iter().copied());
+ assert_eq!(nums, mp.collect::<Vec<_>>());
+
+ let mut mp = multipeek(nums.iter().copied());
+ assert_eq!(mp.peek(), Some(&1));
+ assert_eq!(mp.next(), Some(1));
+ assert_eq!(mp.peek(), Some(&2));
+ assert_eq!(mp.peek(), Some(&3));
+ assert_eq!(mp.next(), Some(2));
+ assert_eq!(mp.peek(), Some(&3));
+ assert_eq!(mp.peek(), Some(&4));
+ assert_eq!(mp.peek(), Some(&5));
+ assert_eq!(mp.peek(), None);
+ assert_eq!(mp.next(), Some(3));
+ assert_eq!(mp.next(), Some(4));
+ assert_eq!(mp.peek(), Some(&5));
+ assert_eq!(mp.peek(), None);
+ assert_eq!(mp.next(), Some(5));
+ assert_eq!(mp.next(), None);
+ assert_eq!(mp.peek(), None);
+}
+
+#[test]
+fn test_multipeek_reset() {
+ let data = [1, 2, 3, 4];
+
+ let mut mp = multipeek(cloned(&data));
+ assert_eq!(mp.peek(), Some(&1));
+ assert_eq!(mp.next(), Some(1));
+ assert_eq!(mp.peek(), Some(&2));
+ assert_eq!(mp.peek(), Some(&3));
+ mp.reset_peek();
+ assert_eq!(mp.peek(), Some(&2));
+ assert_eq!(mp.next(), Some(2));
+}
+
+#[test]
+fn test_multipeek_peeking_next() {
+ use crate::it::PeekingNext;
+ let nums = vec![1u8,2,3,4,5,6,7];
+
+ let mut mp = multipeek(nums.iter().copied());
+ assert_eq!(mp.peeking_next(|&x| x != 0), Some(1));
+ assert_eq!(mp.next(), Some(2));
+ assert_eq!(mp.peek(), Some(&3));
+ assert_eq!(mp.peek(), Some(&4));
+ assert_eq!(mp.peeking_next(|&x| x == 3), Some(3));
+ assert_eq!(mp.peek(), Some(&4));
+ assert_eq!(mp.peeking_next(|&x| x != 4), None);
+ assert_eq!(mp.peeking_next(|&x| x == 4), Some(4));
+ assert_eq!(mp.peek(), Some(&5));
+ assert_eq!(mp.peek(), Some(&6));
+ assert_eq!(mp.peeking_next(|&x| x != 5), None);
+ assert_eq!(mp.peek(), Some(&7));
+ assert_eq!(mp.peeking_next(|&x| x == 5), Some(5));
+ assert_eq!(mp.peeking_next(|&x| x == 6), Some(6));
+ assert_eq!(mp.peek(), Some(&7));
+ assert_eq!(mp.peek(), None);
+ assert_eq!(mp.next(), Some(7));
+ assert_eq!(mp.peek(), None);
+}
+
+#[test]
+fn test_peek_nth() {
+ let nums = vec![1u8,2,3,4,5];
+
+ let iter = peek_nth(nums.iter().copied());
+ assert_eq!(nums, iter.collect::<Vec<_>>());
+
+ let mut iter = peek_nth(nums.iter().copied());
+
+ assert_eq!(iter.peek_nth(0), Some(&1));
+ assert_eq!(iter.peek_nth(0), Some(&1));
+ assert_eq!(iter.next(), Some(1));
+
+ assert_eq!(iter.peek_nth(0), Some(&2));
+ assert_eq!(iter.peek_nth(1), Some(&3));
+ assert_eq!(iter.next(), Some(2));
+
+ assert_eq!(iter.peek_nth(0), Some(&3));
+ assert_eq!(iter.peek_nth(1), Some(&4));
+ assert_eq!(iter.peek_nth(2), Some(&5));
+ assert_eq!(iter.peek_nth(3), None);
+
+ assert_eq!(iter.next(), Some(3));
+ assert_eq!(iter.next(), Some(4));
+
+ assert_eq!(iter.peek_nth(0), Some(&5));
+ assert_eq!(iter.peek_nth(1), None);
+ assert_eq!(iter.next(), Some(5));
+ assert_eq!(iter.next(), None);
+
+ assert_eq!(iter.peek_nth(0), None);
+ assert_eq!(iter.peek_nth(1), None);
+}
+
+#[test]
+fn test_peek_nth_peeking_next() {
+ use it::PeekingNext;
+ let nums = vec![1u8,2,3,4,5,6,7];
+ let mut iter = peek_nth(nums.iter().copied());
+
+ assert_eq!(iter.peeking_next(|&x| x != 0), Some(1));
+ assert_eq!(iter.next(), Some(2));
+
+ assert_eq!(iter.peek_nth(0), Some(&3));
+ assert_eq!(iter.peek_nth(1), Some(&4));
+ assert_eq!(iter.peeking_next(|&x| x == 3), Some(3));
+ assert_eq!(iter.peek(), Some(&4));
+
+ assert_eq!(iter.peeking_next(|&x| x != 4), None);
+ assert_eq!(iter.peeking_next(|&x| x == 4), Some(4));
+ assert_eq!(iter.peek_nth(0), Some(&5));
+ assert_eq!(iter.peek_nth(1), Some(&6));
+
+ assert_eq!(iter.peeking_next(|&x| x != 5), None);
+ assert_eq!(iter.peek(), Some(&5));
+
+ assert_eq!(iter.peeking_next(|&x| x == 5), Some(5));
+ assert_eq!(iter.peeking_next(|&x| x == 6), Some(6));
+ assert_eq!(iter.peek_nth(0), Some(&7));
+ assert_eq!(iter.peek_nth(1), None);
+ assert_eq!(iter.next(), Some(7));
+ assert_eq!(iter.peek(), None);
+}
+
+#[test]
+fn pad_using() {
+ it::assert_equal((0..0).pad_using(1, |_| 1), 1..2);
+
+ let v: Vec<usize> = vec![0, 1, 2];
+ let r = v.into_iter().pad_using(5, |n| n);
+ it::assert_equal(r, vec![0, 1, 2, 3, 4]);
+
+ let v: Vec<usize> = vec![0, 1, 2];
+ let r = v.into_iter().pad_using(1, |_| panic!());
+ it::assert_equal(r, vec![0, 1, 2]);
+}
+
+#[test]
+fn group_by() {
+ for (ch1, sub) in &"AABBCCC".chars().group_by(|&x| x) {
+ for ch2 in sub {
+ assert_eq!(ch1, ch2);
+ }
+ }
+
+ for (ch1, sub) in &"AAABBBCCCCDDDD".chars().group_by(|&x| x) {
+ for ch2 in sub {
+ assert_eq!(ch1, ch2);
+ if ch1 == 'C' {
+ break;
+ }
+ }
+ }
+
+ let toupper = |ch: &char| ch.to_uppercase().next().unwrap();
+
+ // try all possible orderings
+ for indices in permutohedron::Heap::new(&mut [0, 1, 2, 3]) {
+ let groups = "AaaBbbccCcDDDD".chars().group_by(&toupper);
+ let mut subs = groups.into_iter().collect_vec();
+
+ for &idx in &indices[..] {
+ let (key, text) = match idx {
+ 0 => ('A', "Aaa".chars()),
+ 1 => ('B', "Bbb".chars()),
+ 2 => ('C', "ccCc".chars()),
+ 3 => ('D', "DDDD".chars()),
+ _ => unreachable!(),
+ };
+ assert_eq!(key, subs[idx].0);
+ it::assert_equal(&mut subs[idx].1, text);
+ }
+ }
+
+ let groups = "AAABBBCCCCDDDD".chars().group_by(|&x| x);
+ let mut subs = groups.into_iter().map(|(_, g)| g).collect_vec();
+
+ let sd = subs.pop().unwrap();
+ let sc = subs.pop().unwrap();
+ let sb = subs.pop().unwrap();
+ let sa = subs.pop().unwrap();
+ for (a, b, c, d) in multizip((sa, sb, sc, sd)) {
+ assert_eq!(a, 'A');
+ assert_eq!(b, 'B');
+ assert_eq!(c, 'C');
+ assert_eq!(d, 'D');
+ }
+
+ // check that the key closure is called exactly n times
+ {
+ let mut ntimes = 0;
+ let text = "AABCCC";
+ for (_, sub) in &text.chars().group_by(|&x| { ntimes += 1; x}) {
+ for _ in sub {
+ }
+ }
+ assert_eq!(ntimes, text.len());
+ }
+
+ {
+ let mut ntimes = 0;
+ let text = "AABCCC";
+ for _ in &text.chars().group_by(|&x| { ntimes += 1; x}) {
+ }
+ assert_eq!(ntimes, text.len());
+ }
+
+ {
+ let text = "ABCCCDEEFGHIJJKK";
+ let gr = text.chars().group_by(|&x| x);
+ it::assert_equal(gr.into_iter().flat_map(|(_, sub)| sub),
text.chars());
+ }
+}
+
+#[test]
+fn group_by_lazy_2() {
+ let data = vec![0, 1];
+ let groups = data.iter().group_by(|k| *k);
+ let gs = groups.into_iter().collect_vec();
+ it::assert_equal(data.iter(), gs.into_iter().flat_map(|(_k, g)| g));
+
+ let data = vec![0, 1, 1, 0, 0];
+ let groups = data.iter().group_by(|k| *k);
+ let mut gs = groups.into_iter().collect_vec();
+ gs[1..].reverse();
+ it::assert_equal(&[0, 0, 0, 1, 1], gs.into_iter().flat_map(|(_, g)| g));
+
+ let grouper = data.iter().group_by(|k| *k);
+ let mut groups = Vec::new();
+ for (k, group) in &grouper {
+ if *k == 1 {
+ groups.push(group);
+ }
+ }
+ it::assert_equal(&mut groups[0], &[1, 1]);
+
+ let data = vec![0, 0, 0, 1, 1, 0, 0, 2, 2, 3, 3];
+ let grouper = data.iter().group_by(|k| *k);
+ let mut groups = Vec::new();
+ for (i, (_, group)) in grouper.into_iter().enumerate() {
+ if i < 2 {
+ groups.push(group);
+ } else if i < 4 {
+ for _ in group {
+ }
+ } else {
+ groups.push(group);
+ }
+ }
+ it::assert_equal(&mut groups[0], &[0, 0, 0]);
+ it::assert_equal(&mut groups[1], &[1, 1]);
+ it::assert_equal(&mut groups[2], &[3, 3]);
+
+ // use groups as chunks
+ let data = vec![0, 0, 0, 1, 1, 0, 0, 2, 2, 3, 3];
+ let mut i = 0;
+ let grouper = data.iter().group_by(move |_| { let k = i / 3; i += 1; k });
+ for (i, group) in &grouper {
+ match i {
+ 0 => it::assert_equal(group, &[0, 0, 0]),
+ 1 => it::assert_equal(group, &[1, 1, 0]),
+ 2 => it::assert_equal(group, &[0, 2, 2]),
+ 3 => it::assert_equal(group, &[3, 3]),
+ _ => unreachable!(),
+ }
+ }
+}
+
+#[test]
+fn group_by_lazy_3() {
+ // test consuming each group on the lap after it was produced
+ let data = vec![0, 0, 0, 1, 1, 0, 0, 1, 1, 2, 2];
+ let grouper = data.iter().group_by(|elt| *elt);
+ let mut last = None;
+ for (key, group) in &grouper {
+ if let Some(gr) = last.take() {
+ for elt in gr {
+ assert!(elt != key && i32::abs(elt - key) == 1);
+ }
+ }
+ last = Some(group);
+ }
+}
+
+#[test]
+fn chunks() {
+ let data = vec![0, 0, 0, 1, 1, 0, 0, 2, 2, 3, 3];
+ let grouper = data.iter().chunks(3);
+ for (i, chunk) in grouper.into_iter().enumerate() {
+ match i {
+ 0 => it::assert_equal(chunk, &[0, 0, 0]),
+ 1 => it::assert_equal(chunk, &[1, 1, 0]),
+ 2 => it::assert_equal(chunk, &[0, 2, 2]),
+ 3 => it::assert_equal(chunk, &[3, 3]),
+ _ => unreachable!(),
+ }
+ }
+}
+
+#[test]
+fn concat_empty() {
+ let data: Vec<Vec<()>> = Vec::new();
+ assert_eq!(data.into_iter().concat(), Vec::new())
+}
+
+#[test]
+fn concat_non_empty() {
+ let data = vec![vec![1,2,3], vec![4,5,6], vec![7,8,9]];
+ assert_eq!(data.into_iter().concat(), vec![1,2,3,4,5,6,7,8,9])
+}
+
+#[test]
+fn combinations() {
+ assert!((1..3).combinations(5).next().is_none());
+
+ let it = (1..3).combinations(2);
+ it::assert_equal(it, vec![
+ vec![1, 2],
+ ]);
+
+ let it = (1..5).combinations(2);
+ it::assert_equal(it, vec![
+ vec![1, 2],
+ vec![1, 3],
+ vec![1, 4],
+ vec![2, 3],
+ vec![2, 4],
+ vec![3, 4],
+ ]);
+
+ it::assert_equal((0..0).tuple_combinations::<(_, _)>(), <Vec<_>>::new());
+ it::assert_equal((0..1).tuple_combinations::<(_, _)>(), <Vec<_>>::new());
+ it::assert_equal((0..2).tuple_combinations::<(_, _)>(), vec![(0, 1)]);
+
+ it::assert_equal((0..0).combinations(2), <Vec<Vec<_>>>::new());
+ it::assert_equal((0..1).combinations(1), vec![vec![0]]);
+ it::assert_equal((0..2).combinations(1), vec![vec![0], vec![1]]);
+ it::assert_equal((0..2).combinations(2), vec![vec![0, 1]]);
+}
+
+#[test]
+fn combinations_of_too_short() {
+ for i in 1..10 {
+ assert!((0..0).combinations(i).next().is_none());
+ assert!((0..i - 1).combinations(i).next().is_none());
+ }
+}
+
+
+#[test]
+fn combinations_zero() {
+ it::assert_equal((1..3).combinations(0), vec![vec![]]);
+ it::assert_equal((0..0).combinations(0), vec![vec![]]);
+}
+
+#[test]
+fn permutations_zero() {
+ it::assert_equal((1..3).permutations(0), vec![vec![]]);
+ it::assert_equal((0..0).permutations(0), vec![vec![]]);
+}
+
+#[test]
+fn combinations_with_replacement() {
+ // Pool smaller than n
+ it::assert_equal((0..1).combinations_with_replacement(2), vec![vec![0,
0]]);
+ // Pool larger than n
+ it::assert_equal(
+ (0..3).combinations_with_replacement(2),
+ vec![
+ vec![0, 0],
+ vec![0, 1],
+ vec![0, 2],
+ vec![1, 1],
+ vec![1, 2],
+ vec![2, 2],
+ ],
+ );
+ // Zero size
+ it::assert_equal(
+ (0..3).combinations_with_replacement(0),
+ vec![vec![]],
+ );
+ // Zero size on empty pool
+ it::assert_equal(
+ (0..0).combinations_with_replacement(0),
+ vec![vec![]],
+ );
+ // Empty pool
+ it::assert_equal(
+ (0..0).combinations_with_replacement(2),
+ <Vec<Vec<_>>>::new(),
+ );
+}
+
+#[test]
+fn powerset() {
+ it::assert_equal((0..0).powerset(), vec![vec![]]);
+ it::assert_equal((0..1).powerset(), vec![vec![], vec![0]]);
+ it::assert_equal((0..2).powerset(), vec![vec![], vec![0], vec![1], vec![0,
1]]);
+ it::assert_equal((0..3).powerset(), vec![
+ vec![],
+ vec![0], vec![1], vec![2],
+ vec![0, 1], vec![0, 2], vec![1, 2],
+ vec![0, 1, 2]
+ ]);
+
+ assert_eq!((0..4).powerset().count(), 1 << 4);
+ assert_eq!((0..8).powerset().count(), 1 << 8);
+ assert_eq!((0..16).powerset().count(), 1 << 16);
+}
+
+#[test]
+fn diff_mismatch() {
+ let a = vec![1, 2, 3, 4];
+ let b = vec![1.0, 5.0, 3.0, 4.0];
+ let b_map = b.into_iter().map(|f| f as i32);
+ let diff = it::diff_with(a.iter(), b_map, |a, b| *a == b);
+
+ assert!(match diff {
+ Some(it::Diff::FirstMismatch(1, _, from_diff)) =>
+ from_diff.collect::<Vec<_>>() == vec![5, 3, 4],
+ _ => false,
+ });
+}
+
+#[test]
+fn diff_longer() {
+ let a = vec![1, 2, 3, 4];
+ let b = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0];
+ let b_map = b.into_iter().map(|f| f as i32);
+ let diff = it::diff_with(a.iter(), b_map, |a, b| *a == b);
+
+ assert!(match diff {
+ Some(it::Diff::Longer(_, remaining)) =>
+ remaining.collect::<Vec<_>>() == vec![5, 6],
+ _ => false,
+ });
+}
+
+#[test]
+fn diff_shorter() {
+ let a = vec![1, 2, 3, 4];
+ let b = vec![1.0, 2.0];
+ let b_map = b.into_iter().map(|f| f as i32);
+ let diff = it::diff_with(a.iter(), b_map, |a, b| *a == b);
+
+ assert!(match diff {
+ Some(it::Diff::Shorter(len, _)) => len == 2,
+ _ => false,
+ });
+}
+
+#[test]
+fn extrema_set() {
+ use std::cmp::Ordering;
+
+ // A peculiar type: Equality compares both tuple items, but ordering only
the
+ // first item. Used to distinguish equal elements.
+ #[derive(Clone, Debug, PartialEq, Eq)]
+ struct Val(u32, u32);
+
+ impl PartialOrd<Val> for Val {
+ fn partial_cmp(&self, other: &Val) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+ }
+
+ impl Ord for Val {
+ fn cmp(&self, other: &Val) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+ }
+
+ assert_eq!(None::<u32>.iter().min_set(), Vec::<&u32>::new());
+ assert_eq!(None::<u32>.iter().max_set(), Vec::<&u32>::new());
+
+ assert_eq!(Some(1u32).iter().min_set(), vec![&1]);
+ assert_eq!(Some(1u32).iter().max_set(), vec![&1]);
+
+ let data = vec![Val(0, 1), Val(2, 0), Val(0, 2), Val(1, 0), Val(2, 1)];
+
+ let min_set = data.iter().min_set();
+ assert_eq!(min_set, vec![&Val(0, 1), &Val(0, 2)]);
+
+ let min_set_by_key = data.iter().min_set_by_key(|v| v.1);
+ assert_eq!(min_set_by_key, vec![&Val(2, 0), &Val(1, 0)]);
+
+ let min_set_by = data.iter().min_set_by(|x, y| x.1.cmp(&y.1));
+ assert_eq!(min_set_by, vec![&Val(2, 0), &Val(1, 0)]);
+
+ let max_set = data.iter().max_set();
+ assert_eq!(max_set, vec![&Val(2, 0), &Val(2, 1)]);
+
+ let max_set_by_key = data.iter().max_set_by_key(|v| v.1);
+ assert_eq!(max_set_by_key, vec![&Val(0, 2)]);
+
+ let max_set_by = data.iter().max_set_by(|x, y| x.1.cmp(&y.1));
+ assert_eq!(max_set_by, vec![&Val(0, 2)]);
+}
+
+#[test]
+fn minmax() {
+ use std::cmp::Ordering;
+ use crate::it::MinMaxResult;
+
+ // A peculiar type: Equality compares both tuple items, but ordering only
the
+ // first item. This is so we can check the stability property easily.
+ #[derive(Clone, Debug, PartialEq, Eq)]
+ struct Val(u32, u32);
+
+ impl PartialOrd<Val> for Val {
+ fn partial_cmp(&self, other: &Val) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+ }
+
+ impl Ord for Val {
+ fn cmp(&self, other: &Val) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+ }
+
+ assert_eq!(None::<Option<u32>>.iter().minmax(), MinMaxResult::NoElements);
+
+ assert_eq!(Some(1u32).iter().minmax(), MinMaxResult::OneElement(&1));
+
+ let data = vec![Val(0, 1), Val(2, 0), Val(0, 2), Val(1, 0), Val(2, 1)];
+
+ let minmax = data.iter().minmax();
+ assert_eq!(minmax, MinMaxResult::MinMax(&Val(0, 1), &Val(2, 1)));
+
+ let (min, max) = data.iter().minmax_by_key(|v| v.1).into_option().unwrap();
+ assert_eq!(min, &Val(2, 0));
+ assert_eq!(max, &Val(0, 2));
+
+ let (min, max) = data.iter().minmax_by(|x, y|
x.1.cmp(&y.1)).into_option().unwrap();
+ assert_eq!(min, &Val(2, 0));
+ assert_eq!(max, &Val(0, 2));
+}
+
+#[test]
+fn format() {
+ let data = [0, 1, 2, 3];
+ let ans1 = "0, 1, 2, 3";
+ let ans2 = "0--1--2--3";
+
+ let t1 = format!("{}", data.iter().format(", "));
+ assert_eq!(t1, ans1);
+ let t2 = format!("{:?}", data.iter().format("--"));
+ assert_eq!(t2, ans2);
+
+ let dataf = [1.1, 5.71828, -22.];
+ let t3 = format!("{:.2e}", dataf.iter().format(", "));
+ assert_eq!(t3, "1.10e0, 5.72e0, -2.20e1");
+}
+
+#[test]
+fn while_some() {
+ let ns = (1..10).map(|x| if x % 5 != 0 { Some(x) } else { None })
+ .while_some();
+ it::assert_equal(ns, vec![1, 2, 3, 4]);
+}
+
+#[allow(deprecated)]
+#[test]
+fn fold_while() {
+ let mut iterations = 0;
+ let vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ let sum = vec.into_iter().fold_while(0, |acc, item| {
+ iterations += 1;
+ let new_sum = acc + item;
+ if new_sum <= 20 {
+ FoldWhile::Continue(new_sum)
+ } else {
+ FoldWhile::Done(acc)
+ }
+ }).into_inner();
+ assert_eq!(iterations, 6);
+ assert_eq!(sum, 15);
+}
+
+#[test]
+fn tree_fold1() {
+ let x = [
+ "",
+ "0",
+ "0 1 x",
+ "0 1 x 2 x",
+ "0 1 x 2 3 x x",
+ "0 1 x 2 3 x x 4 x",
+ "0 1 x 2 3 x x 4 5 x x",
+ "0 1 x 2 3 x x 4 5 x 6 x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 11 x x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 11 x x 12 x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 11 x x 12 13 x x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 11 x x 12 13 x 14 x x x",
+ "0 1 x 2 3 x x 4 5 x 6 7 x x x 8 9 x 10 11 x x 12 13 x 14 15 x x x x",
+ ];
+ for (i, &s) in x.iter().enumerate() {
+ let expected = if s.is_empty() { None } else { Some(s.to_string()) };
+ let num_strings = (0..i).map(|x| x.to_string());
+ let actual = num_strings.tree_fold1(|a, b| format!("{} {} x", a, b));
+ assert_eq!(actual, expected);
+ }
+}
+
+#[test]
+fn exactly_one_question_mark_syntax_works() {
+ exactly_one_question_mark_return().unwrap_err();
+}
+
+fn exactly_one_question_mark_return() -> Result<(),
ExactlyOneError<std::slice::Iter<'static, ()>>> {
+ [].iter().exactly_one()?;
+ Ok(())
+}
+
+#[test]
+fn multiunzip() {
+ let (a, b, c): (Vec<_>, Vec<_>, Vec<_>) = [(0, 1, 2), (3, 4, 5), (6, 7,
8)].iter().cloned().multiunzip();
+ assert_eq!((a, b, c), (vec![0, 3, 6], vec![1, 4, 7], vec![2, 5, 8]));
+ let (): () = [(), (), ()].iter().cloned().multiunzip();
+ let t: (Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>,
Vec<_>, Vec<_>, Vec<_>, Vec<_>) = [(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11)].iter().cloned().multiunzip();
+ assert_eq!(t, (vec![0], vec![1], vec![2], vec![3], vec![4], vec![5],
vec![6], vec![7], vec![8], vec![9], vec![10], vec![11]));
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/tuples.rs
b/rust/hw/char/pl011/vendor/itertools/tests/tuples.rs
new file mode 100644
index 0000000000..9fc8b3cc78
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/tuples.rs
@@ -0,0 +1,86 @@
+use itertools::Itertools;
+
+#[test]
+fn tuples() {
+ let v = [1, 2, 3, 4, 5];
+ let mut iter = v.iter().cloned().tuples();
+ assert_eq!(Some((1,)), iter.next());
+ assert_eq!(Some((2,)), iter.next());
+ assert_eq!(Some((3,)), iter.next());
+ assert_eq!(Some((4,)), iter.next());
+ assert_eq!(Some((5,)), iter.next());
+ assert_eq!(None, iter.next());
+ assert_eq!(None, iter.into_buffer().next());
+
+ let mut iter = v.iter().cloned().tuples();
+ assert_eq!(Some((1, 2)), iter.next());
+ assert_eq!(Some((3, 4)), iter.next());
+ assert_eq!(None, iter.next());
+ itertools::assert_equal(vec![5], iter.into_buffer());
+
+ let mut iter = v.iter().cloned().tuples();
+ assert_eq!(Some((1, 2, 3)), iter.next());
+ assert_eq!(None, iter.next());
+ itertools::assert_equal(vec![4, 5], iter.into_buffer());
+
+ let mut iter = v.iter().cloned().tuples();
+ assert_eq!(Some((1, 2, 3, 4)), iter.next());
+ assert_eq!(None, iter.next());
+ itertools::assert_equal(vec![5], iter.into_buffer());
+}
+
+#[test]
+fn tuple_windows() {
+ let v = [1, 2, 3, 4, 5];
+
+ let mut iter = v.iter().cloned().tuple_windows();
+ assert_eq!(Some((1,)), iter.next());
+ assert_eq!(Some((2,)), iter.next());
+ assert_eq!(Some((3,)), iter.next());
+
+ let mut iter = v.iter().cloned().tuple_windows();
+ assert_eq!(Some((1, 2)), iter.next());
+ assert_eq!(Some((2, 3)), iter.next());
+ assert_eq!(Some((3, 4)), iter.next());
+ assert_eq!(Some((4, 5)), iter.next());
+ assert_eq!(None, iter.next());
+
+ let mut iter = v.iter().cloned().tuple_windows();
+ assert_eq!(Some((1, 2, 3)), iter.next());
+ assert_eq!(Some((2, 3, 4)), iter.next());
+ assert_eq!(Some((3, 4, 5)), iter.next());
+ assert_eq!(None, iter.next());
+
+ let mut iter = v.iter().cloned().tuple_windows();
+ assert_eq!(Some((1, 2, 3, 4)), iter.next());
+ assert_eq!(Some((2, 3, 4, 5)), iter.next());
+ assert_eq!(None, iter.next());
+
+ let v = [1, 2, 3];
+ let mut iter = v.iter().cloned().tuple_windows::<(_, _, _, _)>();
+ assert_eq!(None, iter.next());
+}
+
+#[test]
+fn next_tuple() {
+ let v = [1, 2, 3, 4, 5];
+ let mut iter = v.iter();
+ assert_eq!(iter.next_tuple().map(|(&x, &y)| (x, y)), Some((1, 2)));
+ assert_eq!(iter.next_tuple().map(|(&x, &y)| (x, y)), Some((3, 4)));
+ assert_eq!(iter.next_tuple::<(_, _)>(), None);
+}
+
+#[test]
+fn collect_tuple() {
+ let v = [1, 2];
+ let iter = v.iter().cloned();
+ assert_eq!(iter.collect_tuple(), Some((1, 2)));
+
+ let v = [1];
+ let iter = v.iter().cloned();
+ assert_eq!(iter.collect_tuple::<(_, _)>(), None);
+
+ let v = [1, 2, 3];
+ let iter = v.iter().cloned();
+ assert_eq!(iter.collect_tuple::<(_, _)>(), None);
+}
diff --git a/rust/hw/char/pl011/vendor/itertools/tests/zip.rs
b/rust/hw/char/pl011/vendor/itertools/tests/zip.rs
new file mode 100644
index 0000000000..75157d34f3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/itertools/tests/zip.rs
@@ -0,0 +1,77 @@
+use itertools::Itertools;
+use itertools::EitherOrBoth::{Both, Left, Right};
+use itertools::free::zip_eq;
+use itertools::multizip;
+
+#[test]
+fn zip_longest_fused() {
+ let a = [Some(1), None, Some(3), Some(4)];
+ let b = [1, 2, 3];
+
+ let unfused = a.iter().batching(|it| *it.next().unwrap())
+ .zip_longest(b.iter().cloned());
+ itertools::assert_equal(unfused,
+ vec![Both(1, 1), Right(2), Right(3)]);
+}
+
+#[test]
+fn test_zip_longest_size_hint() {
+ let c = (1..10).cycle();
+ let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let v2 = &[10, 11, 12];
+
+ assert_eq!(c.zip_longest(v.iter()).size_hint(), (std::usize::MAX, None));
+
+ assert_eq!(v.iter().zip_longest(v2.iter()).size_hint(), (10, Some(10)));
+}
+
+#[test]
+fn test_double_ended_zip_longest() {
+ let xs = [1, 2, 3, 4, 5, 6];
+ let ys = [1, 2, 3, 7];
+ let a = xs.iter().copied();
+ let b = ys.iter().copied();
+ let mut it = a.zip_longest(b);
+ assert_eq!(it.next(), Some(Both(1, 1)));
+ assert_eq!(it.next(), Some(Both(2, 2)));
+ assert_eq!(it.next_back(), Some(Left(6)));
+ assert_eq!(it.next_back(), Some(Left(5)));
+ assert_eq!(it.next_back(), Some(Both(4, 7)));
+ assert_eq!(it.next(), Some(Both(3, 3)));
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_double_ended_zip() {
+ let xs = [1, 2, 3, 4, 5, 6];
+ let ys = [1, 2, 3, 7];
+ let a = xs.iter().copied();
+ let b = ys.iter().copied();
+ let mut it = multizip((a, b));
+ assert_eq!(it.next_back(), Some((4, 7)));
+ assert_eq!(it.next_back(), Some((3, 3)));
+ assert_eq!(it.next_back(), Some((2, 2)));
+ assert_eq!(it.next_back(), Some((1, 1)));
+ assert_eq!(it.next_back(), None);
+}
+
+
+#[should_panic]
+#[test]
+fn zip_eq_panic1()
+{
+ let a = [1, 2];
+ let b = [1, 2, 3];
+
+ zip_eq(&a, &b).count();
+}
+
+#[should_panic]
+#[test]
+fn zip_eq_panic2()
+{
+ let a: [i32; 0] = [];
+ let b = [1, 2, 3];
+
+ zip_eq(&a, &b).count();
+}
diff --git a/rust/hw/char/pl011/vendor/meson.build
b/rust/hw/char/pl011/vendor/meson.build
new file mode 100644
index 0000000000..4611d2f11d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/meson.build
@@ -0,0 +1,18 @@
+subdir('arbitrary-int')
+subdir('unicode-ident')
+# subdir('version_check')
+subdir('either')
+
+subdir('itertools')
+subdir('proc-macro2')
+
+subdir('quote')
+
+subdir('proc-macro-error-attr')
+subdir('syn')
+
+subdir('proc-macro-error')
+
+subdir('bilge-impl')
+
+subdir('bilge')
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error-attr/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/.cargo-checksum.json
new file mode 100644
index 0000000000..c30b5418a8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"fbd3ce928441a0b43859bbbe36549f05e7a1ebfee62e5982710671a8f41de527","LICENSE-APACHE":"6fd0f3522047150ca7c1939f02bc4a15662a4741a89bc03ae784eefa18caa299","LICENSE-MIT":"544b3aed1fd723d0cadea567affdcfe0431e43e18d997a718f9d67256b814fde","build.rs":"37b0aca3c4a14dfc050c2df38ae633311d7a1532cdbb8eb57182802c4a1983eb","src/lib.rs":"9e3d13c266376b688642572bb4091e094ff5277fce4bee72bcc3c5f982dd831c","src/parse.rs":"2d8f220f91235be8ed0ddcab55ec3699b9d3b28d538ed24197797cc20194c473","src/settings.rs":"be9382479d7a857b55e5a0b1014f72150c9ee7f2bbb5a5bdeabc0f8de2d95c26"},"package":"a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/Cargo.toml
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/Cargo.toml
new file mode 100644
index 0000000000..a2c766de9b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/Cargo.toml
@@ -0,0 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "proc-macro-error-attr"
+version = "1.0.4"
+authors = ["CreepySkeleton <creepy-skeleton@yandex.ru>"]
+build = "build.rs"
+description = "Attribute macro for proc-macro-error crate"
+license = "MIT OR Apache-2.0"
+repository = "https://gitlab.com/CreepySkeleton/proc-macro-error"
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+proc-macro = true
+[dependencies.proc-macro2]
+version = "1"
+
+[dependencies.quote]
+version = "1"
+[build-dependencies.version_check]
+version = "0.9"
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-APACHE
new file mode 100644
index 0000000000..658240a840
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2019-2020 CreepySkeleton <creepy-skeleton@yandex.ru>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-MIT
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-MIT
new file mode 100644
index 0000000000..fc73e591d7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/LICENSE-MIT
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019-2020 CreepySkeleton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/build.rs
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/build.rs
new file mode 100644
index 0000000000..f2ac6a70ee
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/build.rs
@@ -0,0 +1,5 @@
+fn main() {
+ if version_check::is_max_version("1.36.0").unwrap_or(false) {
+ println!("cargo:rustc-cfg=always_assert_unwind");
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/meson.build
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/meson.build
new file mode 100644
index 0000000000..63cd12ccf2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/meson.build
@@ -0,0 +1,20 @@
+rust = import('rust')
+
+_proc_macro_error_attr_rs = rust.proc_macro(
+ 'proc_macro_error_attr',
+ files('src/lib.rs'),
+ rust_args: rust_args + [
+ '--edition', '2018',
+ '--cfg', 'use_fallback',
+ '--cfg', 'feature="syn-error"',
+ '--cfg', 'feature="proc-macro"'
+ ],
+ dependencies: [
+ dep_proc_macro2,
+ dep_quote,
+ ],
+)
+
+dep_proc_macro_error_attr = declare_dependency(
+ link_with: _proc_macro_error_attr_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/lib.rs
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/lib.rs
new file mode 100644
index 0000000000..ac0ac21a26
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/lib.rs
@@ -0,0 +1,121 @@
+//! This is `#[proc_macro_error]` attribute to be used with
+//! [`proc-macro-error`](https://docs.rs/proc-macro-error/). There you go.
+
+extern crate proc_macro;
+
+use crate::parse::parse_input;
+use crate::parse::Attribute;
+use proc_macro::TokenStream;
+use proc_macro2::{Literal, Span, TokenStream as TokenStream2, TokenTree};
+use quote::{quote, quote_spanned};
+
+use crate::settings::{Setting::*, *};
+
+mod parse;
+mod settings;
+
+type Result<T> = std::result::Result<T, Error>;
+
+struct Error {
+ span: Span,
+ message: String,
+}
+
+impl Error {
+ fn new(span: Span, message: String) -> Self {
+ Error { span, message }
+ }
+
+ fn into_compile_error(self) -> TokenStream2 {
+ let mut message = Literal::string(&self.message);
+ message.set_span(self.span);
+ quote_spanned!(self.span=> compile_error!{#message})
+ }
+}
+
+#[proc_macro_attribute]
+pub fn proc_macro_error(attr: TokenStream, input: TokenStream) -> TokenStream {
+ match impl_proc_macro_error(attr.into(), input.clone().into()) {
+ Ok(ts) => ts,
+ Err(e) => {
+ let error = e.into_compile_error();
+ let input = TokenStream2::from(input);
+
+ quote!(#input #error).into()
+ }
+ }
+}
+
+fn impl_proc_macro_error(attr: TokenStream2, input: TokenStream2) ->
Result<TokenStream> {
+ let (attrs, signature, body) = parse_input(input)?;
+ let mut settings = parse_settings(attr)?;
+
+ let is_proc_macro = is_proc_macro(&attrs);
+ if is_proc_macro {
+ settings.set(AssertUnwindSafe);
+ }
+
+ if detect_proc_macro_hack(&attrs) {
+ settings.set(ProcMacroHack);
+ }
+
+ if settings.is_set(ProcMacroHack) {
+ settings.set(AllowNotMacro);
+ }
+
+ if !(settings.is_set(AllowNotMacro) || is_proc_macro) {
+ return Err(Error::new(
+ Span::call_site(),
+ "#[proc_macro_error] attribute can be used only with procedural
macros\n\n \
+ = hint: if you are really sure that #[proc_macro_error] should be
applied \
+ to this exact function, use #[proc_macro_error(allow_not_macro)]\n"
+ .into(),
+ ));
+ }
+
+ let body = gen_body(body, settings);
+
+ let res = quote! {
+ #(#attrs)*
+ #(#signature)*
+ { #body }
+ };
+ Ok(res.into())
+}
+
+#[cfg(not(always_assert_unwind))]
+fn gen_body(block: TokenTree, settings: Settings) -> proc_macro2::TokenStream {
+ let is_proc_macro_hack = settings.is_set(ProcMacroHack);
+ let closure = if settings.is_set(AssertUnwindSafe) {
+ quote!(::std::panic::AssertUnwindSafe(|| #block ))
+ } else {
+ quote!(|| #block)
+ };
+
+ quote!( ::proc_macro_error::entry_point(#closure, #is_proc_macro_hack) )
+}
+
+// FIXME:
+// proc_macro::TokenStream does not implement UnwindSafe until 1.37.0.
+// Considering this is the closure's return type the unwind safety check would
fail
+// for virtually every closure possible, the check is meaningless.
+#[cfg(always_assert_unwind)]
+fn gen_body(block: TokenTree, settings: Settings) -> proc_macro2::TokenStream {
+ let is_proc_macro_hack = settings.is_set(ProcMacroHack);
+ let closure = quote!(::std::panic::AssertUnwindSafe(|| #block ));
+ quote!( ::proc_macro_error::entry_point(#closure, #is_proc_macro_hack) )
+}
+
+fn detect_proc_macro_hack(attrs: &[Attribute]) -> bool {
+ attrs
+ .iter()
+ .any(|attr| attr.path_is_ident("proc_macro_hack"))
+}
+
+fn is_proc_macro(attrs: &[Attribute]) -> bool {
+ attrs.iter().any(|attr| {
+ attr.path_is_ident("proc_macro")
+ || attr.path_is_ident("proc_macro_derive")
+ || attr.path_is_ident("proc_macro_attribute")
+ })
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/parse.rs
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/parse.rs
new file mode 100644
index 0000000000..6f4663f80e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/parse.rs
@@ -0,0 +1,89 @@
+use crate::{Error, Result};
+use proc_macro2::{Delimiter, Ident, Span, TokenStream, TokenTree};
+use quote::ToTokens;
+use std::iter::Peekable;
+
+pub(crate) fn parse_input(
+ input: TokenStream,
+) -> Result<(Vec<Attribute>, Vec<TokenTree>, TokenTree)> {
+ let mut input = input.into_iter().peekable();
+ let mut attrs = Vec::new();
+
+ while let Some(attr) = parse_next_attr(&mut input)? {
+ attrs.push(attr);
+ }
+
+ let sig = parse_signature(&mut input);
+ let body = input.next().ok_or_else(|| {
+ Error::new(
+ Span::call_site(),
+ "`#[proc_macro_error]` can be applied only to
functions".to_string(),
+ )
+ })?;
+
+ Ok((attrs, sig, body))
+}
+
+fn parse_next_attr(
+ input: &mut Peekable<impl Iterator<Item = TokenTree>>,
+) -> Result<Option<Attribute>> {
+ let shebang = match input.peek() {
+ Some(TokenTree::Punct(ref punct)) if punct.as_char() == '#' =>
input.next().unwrap(),
+ _ => return Ok(None),
+ };
+
+ let group = match input.peek() {
+ Some(TokenTree::Group(ref group)) if group.delimiter() ==
Delimiter::Bracket => {
+ let res = group.clone();
+ input.next();
+ res
+ }
+ other => {
+ let span = other.map_or(Span::call_site(), |tt| tt.span());
+ return Err(Error::new(span, "expected `[`".to_string()));
+ }
+ };
+
+ let path = match group.stream().into_iter().next() {
+ Some(TokenTree::Ident(ident)) => Some(ident),
+ _ => None,
+ };
+
+ Ok(Some(Attribute {
+ shebang,
+ group: TokenTree::Group(group),
+ path,
+ }))
+}
+
+fn parse_signature(input: &mut Peekable<impl Iterator<Item = TokenTree>>) ->
Vec<TokenTree> {
+ let mut sig = Vec::new();
+ loop {
+ match input.peek() {
+ Some(TokenTree::Group(ref group)) if group.delimiter() ==
Delimiter::Brace => {
+ return sig;
+ }
+ None => return sig,
+ _ => sig.push(input.next().unwrap()),
+ }
+ }
+}
+
+pub(crate) struct Attribute {
+ pub(crate) shebang: TokenTree,
+ pub(crate) group: TokenTree,
+ pub(crate) path: Option<Ident>,
+}
+
+impl Attribute {
+ pub(crate) fn path_is_ident(&self, ident: &str) -> bool {
+ self.path.as_ref().map_or(false, |p| *p == ident)
+ }
+}
+
+impl ToTokens for Attribute {
+ fn to_tokens(&self, ts: &mut TokenStream) {
+ self.shebang.to_tokens(ts);
+ self.group.to_tokens(ts);
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/settings.rs
b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/settings.rs
new file mode 100644
index 0000000000..0b7ec766f6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error-attr/src/settings.rs
@@ -0,0 +1,72 @@
+use crate::{Error, Result};
+use proc_macro2::{Ident, Span, TokenStream, TokenTree};
+
+macro_rules! decl_settings {
+ ($($val:expr => $variant:ident),+ $(,)*) => {
+ #[derive(PartialEq)]
+ pub(crate) enum Setting {
+ $($variant),*
+ }
+
+ fn ident_to_setting(ident: Ident) -> Result<Setting> {
+ match &*ident.to_string() {
+ $($val => Ok(Setting::$variant),)*
+ _ => {
+ let possible_vals = [$($val),*]
+ .iter()
+ .map(|v| format!("`{}`", v))
+ .collect::<Vec<_>>()
+ .join(", ");
+
+ Err(Error::new(
+ ident.span(),
+ format!("unknown setting `{}`, expected one of {}",
ident, possible_vals)))
+ }
+ }
+ }
+ };
+}
+
+decl_settings! {
+ "assert_unwind_safe" => AssertUnwindSafe,
+ "allow_not_macro" => AllowNotMacro,
+ "proc_macro_hack" => ProcMacroHack,
+}
+
+pub(crate) fn parse_settings(input: TokenStream) -> Result<Settings> {
+ let mut input = input.into_iter();
+ let mut res = Settings(Vec::new());
+ loop {
+ match input.next() {
+ Some(TokenTree::Ident(ident)) => {
+ res.0.push(ident_to_setting(ident)?);
+ }
+ None => return Ok(res),
+ other => {
+ let span = other.map_or(Span::call_site(), |tt| tt.span());
+ return Err(Error::new(span, "expected
identifier".to_string()));
+ }
+ }
+
+ match input.next() {
+ Some(TokenTree::Punct(ref punct)) if punct.as_char() == ',' => {}
+ None => return Ok(res),
+ other => {
+ let span = other.map_or(Span::call_site(), |tt| tt.span());
+ return Err(Error::new(span, "expected `,`".to_string()));
+ }
+ }
+ }
+}
+
+pub(crate) struct Settings(Vec<Setting>);
+
+impl Settings {
+ pub(crate) fn is_set(&self, setting: Setting) -> bool {
+ self.0.iter().any(|s| *s == setting)
+ }
+
+ pub(crate) fn set(&mut self, setting: Setting) {
+ self.0.push(setting)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/proc-macro-error/.cargo-checksum.json
new file mode 100644
index 0000000000..79bcfa696f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"b84c4baa5fb093c6aaca44b98f9f28ef54d399ef6dd43c91f1dca618ab366b45","Cargo.toml":"50db093e1a4617606939dfb1f098cb59babbea0d7b390e973a3ed6bb1406170d","LICENSE-APACHE":"4665f973ccb9393807a7fb1264add6b3513d19abc6b357e4cb52c6fe59cc6a3b","LICENSE-MIT":"544b3aed1fd723d0cadea567affdcfe0431e43e18d997a718f9d67256b814fde","README.md":"72d59787d0a1f7bf161e292d0bc1bc25fdfb08cd6ad379a34cc3ed1b388d11fa","build.rs":"6238a0ad4f1146fbf55112419609e6449986154cf7ded1b5fdc978b06f4413b3","src/diagnostic.rs":"cb8724bb0bf9d2eee2f7119d0960fd5349edaa80e147abdef060ebf4572eca01","src/dummy.rs":"b44728091ddcdf9786523c01178efcedd83462bfe8bac6b97b1c2ffb19c96d09","src/imp/delegate.rs":"81da3a602a883240161dd98deb52b3b4ae29e626bfd2e1e07ef5e38d1be00679","src/imp/fallback.rs":"c3d333aba1122ac7e26f038f69750aa02e6a1222433a7cffd1c2f961befedd93","src/lib.rs":"e563d5dceaeb81551a5cb2610c1a3ad1a46200a6cbf8c3c3b394d8ac307b8cfa","src/macros.rs":"3be6feccd343cd9dc4bf03780f3107909bf70e02c6c7c72912e4b160dc6a68fc","src/sealed.rs":"dcf569c4c7ce1d372ff51b0fa73fa67f995bdca8e520cb225cde789c71276439","tests/macro-errors.rs":"7f793921dfbec692bfb2bbb067faf0480c0e7eeec83982b5e9fcddd817817616","tests/ok.rs":"a8c1925ac8647d185c7490ed1e33e3ce3203f5945bd3db4dcaf50ea55078df29","tests/runtime-errors.rs":"e53aa7d8e6c0e5128a90e856105eb05e4e7e72ea6db1bd580f3fe439bff62f24","tests/ui/abort.rs":"e209c8dd9dde6bde7440f8795624ad84b0f8486f563c8fe838238818f459bb67","tests/ui/abort.stderr":"dd0605e79be0309f92b251d055f087b0375c48ec60da978df354b48e8563fa10","tests/ui/append_dummy.rs":"ecaf939c8aabd94eef2dd1c10e9515489ba78e4db5b25195e19833b020d2483c","tests/ui/append_dummy.stderr":"ef03b01fc823aba8cfb9eb6d116640ca953fec569e61ed6ed6b7b7fa3bbad686","tests/ui/children_messages.rs":"32299679804133cb5295ed7a9341bf1ab86a4f1277679ee9738a83115c6b1d2b","tests/ui/children_messages.stderr":"dadeb86e1c7094d5fb38664b1766212b3d083fbe04962c137f8281fb3f5d162e","tests/ui/dummy.rs":"ba51c9158cef88ff2ddf0185be26fcd35a641d73c7124fab9ace0bbd546de847","tests/ui/dummy.stderr":"0635fd563d26691d07a2a875111f0b5e155caa45c37ad9cbaefe5fe617eac703","tests/ui/emit.rs":"82aaf06bcee56b7e139bbcba3a92c29448af14974d6806a28c9961aa764026e5","tests/ui/emit.stderr":"d3daa6d304453d436317495b7fc1d9d36bbebb7705bef75a5260d6d8fcfad5b1","tests/ui/explicit_span_range.rs":"3c37d5fc75b2bd460a091acd97a19acc80a40ba8d1d4ac7f36cd2f0e171bf5e7","tests/ui/explicit_span_range.stderr":"d7562847c326badbce2df8546e6f625eef0725b1dd2c786a037cc46357e4d2e8","tests/ui/misuse.rs":"0d66c61ab5c9723cf2f85cd12216751ab09722e9386cc27928034ee17f1c34e3","tests/ui/misuse.stderr":"52568a2208423e8e4050774559f269e79181a350f0805a34880bfa208e08c6bb","tests/ui/multiple_tokens.rs":"74997da1fdd3bce88a04ab42866c651723861fba4f59e826ee602d905398dcca","tests/ui/multiple_tokens.stderr":"e347ef1c18949711ce41957848e255095132f855c94db1e7e28d32e7d2c79a74","tests/ui/not_proc_macro.rs":"ca448d832ccf0cfdcda6f04281d8134a76c61b3ad96437e972b2cb5c6e0844c4","tests/ui/not_proc_macro.stderr":"a22c53a7dd5a03ddfaee5a7fb7fe5d61cb588b2d81a30c1e935b789baf0d2676","tests/ui/option_ext.rs":"1db81c17172f155c0ca8bcf92d55b5852b92107a3ba1d4b2ae6d14020df67f96","tests/ui/option_ext.stderr":"3b363759db60ee4f249dfde4d4571963032d5f0043249de40bd3b38eecc65404","tests/ui/proc_macro_hack.rs":"1d08c3e2c4c331e230c7cdaa2635ca1e43077252f90d3a430dcd091c646a842c","tests/ui/proc_macro_hack.stderr":"65e887dc208b92bfcd44405e76d5d05e315c3c5c5f637070954b7d593c723731","tests/ui/result_ext.rs":"ef398e76aab82a574ca5a988a91353e1a87fcfcb459d30314eceed3cbcf6fcd8","tests/ui/result_ext.stderr":"9e1e387b1378d9ec40ccb29be9f8cdaa5b42060c3f4f9b3c09fb307d5dcf7d85","tests/ui/to_tokens_span.rs":"d017a3c4cd583defe9806cdc51220bde89ced871ddd4d65b7cd089882feb1f61","tests/ui/to_tokens_span.stderr":"0b88e659ab214d6c7dfcd99274d327fe72da4b9bd009477e0e65165ddde65e02","tests/ui/unknown_setting.rs":"16fe9631b51023909497e857a6c674cd216ba9802fbdba360bb8273d6e00fa31","tests/ui/unknown_setting.stderr":"d605f151ce8eba5b2f867667394bd2d2adf0a233145516a9d6b801817521e587","tests/ui/unrelated_panic.rs":"438db25f8f14f1263152545a1c5135e20b3f5063dc4ab223fd8145b891039b24","tests/ui/unrelated_panic.stderr":"04cd814f2bd57d5271f93f90f0dd078b09ee3fd73137245a914d698e4a33ed57"},"package":"da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/CHANGELOG.md
b/rust/hw/char/pl011/vendor/proc-macro-error/CHANGELOG.md
new file mode 100644
index 0000000000..3c422f1c45
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/CHANGELOG.md
@@ -0,0 +1,162 @@
+# v1.0.4 (2020-7-31)
+
+* `SpanRange` facility is now public.
+* Docs have been improved.
+* Introduced the `syn-error` feature so you can opt-out from the `syn`
dependency.
+
+# v1.0.3 (2020-6-26)
+
+* Corrected a few typos.
+* Fixed the `emit_call_site_warning` macro.
+
+# v1.0.2 (2020-4-9)
+
+* An obsolete note was removed from documentation.
+
+# v1.0.1 (2020-4-9)
+
+* `proc-macro-hack` is now well tested and supported. Not sure about
`proc-macro-nested`,
+ please fill a request if you need it.
+* Fixed `emit_call_site_error`.
+* Documentation improvements.
+
+# v1.0.0 (2020-3-25)
+
+I believe the API can be considered stable because it's been a few months
without
+breaking changes, and I also don't think this crate will receive much further
evolution.
+It's perfect, admit it.
+
+Hence, meet the new, stable release!
+
+### Improvements
+
+* Supported nested `#[proc_macro_error]` attributes. Well, you aren't supposed
to do that,
+ but I caught myself doing it by accident on one occasion and the behavior
was... surprising.
+ Better to handle this smooth.
+
+# v0.4.12 (2020-3-23)
+
+* Error message on macros' misuse is now a bit more understandable.
+
+# v0.4.11 (2020-3-02)
+
+* `build.rs` no longer fails when `rustc` date could not be determined,
+ (thanks to [`Fabian
Möller`](https://gitlab.com/CreepySkeleton/proc-macro-error/issues/8)
+ for noticing and to [`Igor
Gnatenko`](https://gitlab.com/CreepySkeleton/proc-macro-error/-/merge_requests/25)
+ for fixing).
+
+# v0.4.10 (2020-2-29)
+
+* `proc-macro-error` doesn't depend on syn\[full\] anymore, the compilation
+ is \~30secs faster.
+
+# v0.4.9 (2020-2-13)
+
+* New function: `append_dummy`.
+
+# v0.4.8 (2020-2-01)
+
+* Support for children messages
+
+# v0.4.7 (2020-1-31)
+
+* Now any type that implements `quote::ToTokens` can be used instead of spans.
+ This allows for high quality error messages.
+
+# v0.4.6 (2020-1-31)
+
+* `From<syn::Error>` implementation doesn't lose span info anymore, see
+ [#6](https://gitlab.com/CreepySkeleton/proc-macro-error/issues/6).
+
+# v0.4.5 (2020-1-20)
+Just a small intermediate release.
+
+* Fix some bugs.
+* Populate license files into subfolders.
+
+# v0.4.4 (2019-11-13)
+* Fix `abort_if_dirty` + warnings bug
+* Allow trailing commas in macros
+
+# v0.4.2 (2019-11-7)
+* FINALLY fixed `__pme__suggestions not found` bug
+
+# v0.4.1 (2019-11-7) YANKED
+* Fixed `__pme__suggestions not found` bug
+* Documentation improvements, links checked
+
+# v0.4.0 (2019-11-6) YANKED
+
+## New features
+* "help" messages that can have their own span on nightly, they
+ inherit parent span on stable.
+ ```rust
+ let cond_help = if condition { Some("some help message") else { None } };
+ abort!(
+ span, // parent span
+ "something's wrong, {} wrongs in total", 10; // main message
+ help = "here's a help for you, {}", "take it"; // unconditional help
message
+ help =? cond_help; // conditional help message, must be Option
+ note = note_span => "don't forget the note, {}", "would you?" // notes
can have their own span but it's effective only on nightly
+ )
+ ```
+* Warnings via `emit_warning` and `emit_warning_call_site`. Nightly only,
they're ignored on stable.
+* Now `proc-macro-error` delegates to `proc_macro::Diagnostic` on nightly.
+
+## Breaking changes
+* `MacroError` is now replaced by `Diagnostic`. Its API resembles
`proc_macro::Diagnostic`.
+* `Diagnostic` does not implement `From<&str/String>` so `Result<T,
&str/String>::abort_or_exit()`
+ won't work anymore (nobody used it anyway).
+* `macro_error!` macro is replaced with `diagnostic!`.
+
+## Improvements
+* Now `proc-macro-error` renders notes exactly just like rustc does.
+* We don't parse a body of a function annotated with `#[proc_macro_error]`
anymore,
+ only looking at the signature. This should somewhat decrease expansion time
for large functions.
+
+# v0.3.3 (2019-10-16)
+* Now you can use any word instead of "help", undocumented.
+
+# v0.3.2 (2019-10-16)
+* Introduced support for "help" messages, undocumented.
+
+# v0.3.0 (2019-10-8)
+
+## The crate has been completely rewritten from scratch!
+
+## Changes (most are breaking):
+* Renamed macros:
+ * `span_error` => `abort`
+ * `call_site_error` => `abort_call_site`
+* `filter_macro_errors` was replaced by `#[proc_macro_error]` attribute.
+* `set_dummy` now takes `TokenStream` instead of `Option<TokenStream>`
+* Support for multiple errors via `emit_error` and `emit_call_site_error`
+* New `macro_error` macro for building errors in format=like style.
+* `MacroError` API had been reconsidered. It also now implements
`quote::ToTokens`.
+
+# v0.2.6 (2019-09-02)
+* Introduce support for dummy implementations via `dummy::set_dummy`
+* `multi::*` is now deprecated, will be completely rewritten in v0.3
+
+# v0.2.0 (2019-08-15)
+
+## Breaking changes
+* `trigger_error` replaced with `MacroError::trigger` and
`filter_macro_error_panics`
+ is hidden from docs.
+ This is not quite a breaking change since users weren't supposed to use
these functions directly anyway.
+* All dependencies are updated to `v1.*`.
+
+## New features
+* Ability to stack multiple errors via `multi::MultiMacroErrors` and emit them
at once.
+
+## Improvements
+* Now `MacroError` implements `std::fmt::Display` instead of
`std::string::ToString`.
+* `MacroError::span` inherent method.
+* `From<MacroError> for proc_macro/proc_macro2::TokenStream` implementations.
+* `AsRef/AsMut<String> for MacroError` implementations.
+
+# v0.1.x (2019-07-XX)
+
+## New features
+* An easy way to report errors inside within a proc-macro via `span_error`,
+ `call_site_error` and `filter_macro_errors`.
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/Cargo.toml
b/rust/hw/char/pl011/vendor/proc-macro-error/Cargo.toml
new file mode 100644
index 0000000000..869585ffc2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/Cargo.toml
@@ -0,0 +1,56 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "proc-macro-error"
+version = "1.0.4"
+authors = ["CreepySkeleton <creepy-skeleton@yandex.ru>"]
+build = "build.rs"
+description = "Almost drop-in replacement to panics in proc-macros"
+readme = "README.md"
+keywords = ["proc-macro", "error", "errors"]
+categories = ["development-tools::procedural-macro-helpers"]
+license = "MIT OR Apache-2.0"
+repository = "https://gitlab.com/CreepySkeleton/proc-macro-error"
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+[dependencies.proc-macro-error-attr]
+version = "=1.0.4"
+
+[dependencies.proc-macro2]
+version = "1"
+
+[dependencies.quote]
+version = "1"
+
+[dependencies.syn]
+version = "1"
+optional = true
+default-features = false
+[dev-dependencies.serde_derive]
+version = "=1.0.107"
+
+[dev-dependencies.toml]
+version = "=0.5.2"
+
+[dev-dependencies.trybuild]
+version = "1.0.19"
+features = ["diff"]
+[build-dependencies.version_check]
+version = "0.9"
+
+[features]
+default = ["syn-error"]
+syn-error = ["syn"]
+[badges.maintenance]
+status = "passively-maintained"
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-APACHE
new file mode 100644
index 0000000000..cc17374b25
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2019-2020 CreepySkeleton <creepy-skeleton@yandex.ru>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-MIT
b/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-MIT
new file mode 100644
index 0000000000..fc73e591d7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/LICENSE-MIT
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019-2020 CreepySkeleton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/README.md
b/rust/hw/char/pl011/vendor/proc-macro-error/README.md
new file mode 100644
index 0000000000..7fbe07c53a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/README.md
@@ -0,0 +1,258 @@
+# Makes error reporting in procedural macros nice and easy
+
+[![travis
ci](https://travis-ci.org/CreepySkeleton/proc-macro-error.svg?branch=master)](https://travis-ci.org/CreepySkeleton/proc-macro-error)
+[![docs.rs](https://docs.rs/proc-macro-error/badge.svg)](https://docs.rs/proc-macro-error)
+[![unsafe
forbidden](https://img.shields.io/badge/unsafe-forbidden-success.svg)](https://github.com/rust-secure-code/safety-dance/)
+
+This crate aims to make error reporting in proc-macros simple and easy to use.
+Migrate from `panic!`-based errors for as little effort as possible!
+
+Also, you can explicitly [append a dummy token stream][crate::dummy] to your
errors.
+
+To achieve his, this crate serves as a tiny shim around
`proc_macro::Diagnostic` and
+`compile_error!`. It detects the most preferable way to emit errors based on
compiler's version.
+When the underlying diagnostic type is finally stabilized, this crate will be
simply
+delegating to it, requiring no changes in your code!
+
+So you can just use this crate and have *both* some of
`proc_macro::Diagnostic` functionality
+available on stable ahead of time and your error-reporting code future-proof.
+
+```toml
+[dependencies]
+proc-macro-error = "1.0"
+```
+
+*Supports rustc 1.31 and up*
+
+[Documentation and guide][guide]
+
+## Quick example
+
+Code:
+
+```rust
+#[proc_macro]
+#[proc_macro_error]
+pub fn make_fn(input: TokenStream) -> TokenStream {
+ let mut input = TokenStream2::from(input).into_iter();
+ let name = input.next().unwrap();
+ if let Some(second) = input.next() {
+ abort! { second,
+ "I don't like this part!";
+ note = "I see what you did there...";
+ help = "I need only one part, you know?";
+ }
+ }
+
+ quote!( fn #name() {} ).into()
+}
+```
+
+This is how the error is rendered in a terminal:
+
+<p align="center">
+<img
src="https://user-images.githubusercontent.com/50968528/78830016-d3b46a80-79d6-11ea-9de2-972e8d7904ef.png"
width="600">
+</p>
+
+And this is what your users will see in their IDE:
+
+<p align="center">
+<img
src="https://user-images.githubusercontent.com/50968528/78830547-a9af7800-79d7-11ea-822e-59e29bda335c.png"
width="600">
+</p>
+
+## Examples
+
+### Panic-like usage
+
+```rust
+use proc_macro_error::{
+ proc_macro_error,
+ abort,
+ abort_call_site,
+ ResultExt,
+ OptionExt,
+};
+use proc_macro::TokenStream;
+use syn::{DeriveInput, parse_macro_input};
+use quote::quote;
+
+// This is your main entry point
+#[proc_macro]
+// This attribute *MUST* be placed on top of the #[proc_macro] function
+#[proc_macro_error]
+pub fn make_answer(input: TokenStream) -> TokenStream {
+ let input = parse_macro_input!(input as DeriveInput);
+
+ if let Err(err) = some_logic(&input) {
+ // we've got a span to blame, let's use it
+ // This immediately aborts the proc-macro and shows the error
+ //
+ // You can use `proc_macro::Span`, `proc_macro2::Span`, and
+ // anything that implements `quote::ToTokens` (almost every type from
+ // `syn` and `proc_macro2`)
+ abort!(err, "You made an error, go fix it: {}", err.msg);
+ }
+
+ // `Result` has some handy shortcuts if your error type implements
+ // `Into<Diagnostic>`. `Option` has one unconditionally.
+ more_logic(&input).expect_or_abort("What a careless user, behave!");
+
+ if !more_logic_for_logic_god(&input) {
+ // We don't have an exact location this time,
+ // so just highlight the proc-macro invocation itself
+ abort_call_site!(
+ "Bad, bad user! Now go stand in the corner and think about what
you did!");
+ }
+
+ // Now all the processing is done, return `proc_macro::TokenStream`
+ quote!(/* stuff */).into()
+}
+```
+
+### `proc_macro::Diagnostic`-like usage
+
+```rust
+use proc_macro_error::*;
+use proc_macro::TokenStream;
+use syn::{spanned::Spanned, DeriveInput, ItemStruct, Fields, Attribute ,
parse_macro_input};
+use quote::quote;
+
+fn process_attrs(attrs: &[Attribute]) -> Vec<Attribute> {
+ attrs
+ .iter()
+ .filter_map(|attr| match process_attr(attr) {
+ Ok(res) => Some(res),
+ Err(msg) => {
+ emit_error!(attr, "Invalid attribute: {}", msg);
+ None
+ }
+ })
+ .collect()
+}
+
+fn process_fields(_attrs: &Fields) -> Vec<TokenStream> {
+ // processing fields in pretty much the same way as attributes
+ unimplemented!()
+}
+
+#[proc_macro]
+#[proc_macro_error]
+pub fn make_answer(input: TokenStream) -> TokenStream {
+ let input = parse_macro_input!(input as ItemStruct);
+ let attrs = process_attrs(&input.attrs);
+
+ // abort right now if some errors were encountered
+ // at the attributes processing stage
+ abort_if_dirty();
+
+ let fields = process_fields(&input.fields);
+
+ // no need to think about emitted errors
+ // #[proc_macro_error] will handle them for you
+ //
+ // just return a TokenStream as you normally would
+ quote!(/* stuff */).into()
+}
+```
+
+## Real world examples
+
+*
[`structopt-derive`](https://github.com/TeXitoi/structopt/tree/master/structopt-derive)
+ (abort-like usage)
+* [`auto-impl`](https://github.com/auto-impl-rs/auto_impl/) (emit-like usage)
+
+## Limitations
+
+- Warnings are emitted only on nightly, they are ignored on stable.
+- "help" suggestions can't have their own span info on stable,
+ (essentially inheriting the parent span).
+- If your macro happens to trigger a panic, no errors will be displayed. This
is not a
+ technical limitation but rather intentional design. `panic` is not for error
reporting.
+
+## MSRV policy
+
+`proc_macro_error` will always be compatible with proc-macro Holy Trinity:
+`proc_macro2`, `syn`, `quote` crates. In other words, if the Trinity is
available
+to you - `proc_macro_error` is available too.
+
+> **Important!**
+>
+> If you want to use `#[proc_macro_error]` with `synstructure`, you're going
+> to have to put the attribute inside the `decl_derive!` invocation.
Unfortunately,
+> due to some bug in pre-1.34 rustc, putting proc-macro attributes inside macro
+> invocations doesn't work, so your MSRV is effectively 1.34.
+
+## Motivation
+
+Error handling in proc-macros sucks. There's not much of a choice today:
+you either "bubble up" the error up to the top-level of the macro and convert
it to
+a [`compile_error!`][compl_err] invocation or just use a good old panic. Both
these ways suck:
+
+- Former sucks because it's quite redundant to unroll a proper error handling
+ just for critical errors that will crash the macro anyway; so people mostly
+ choose not to bother with it at all and use panic. Simple `.expect` is too
tempting.
+
+ Also, if you do decide to implement this `Result`-based architecture in
your macro
+ you're going to have to rewrite it entirely once
[`proc_macro::Diagnostic`][] is finally
+ stable. Not cool.
+
+- Later sucks because there's no way to carry out the span info via `panic!`.
+ `rustc` will highlight the invocation itself but not some specific token
inside it.
+
+ Furthermore, panics aren't for error-reporting at all; panics are for
bug-detecting
+ (like unwrapping on `None` or out-of-range indexing) or for early
development stages
+ when you need a prototype ASAP so error handling can wait. Mixing these
usages only
+ messes things up.
+
+- There is [`proc_macro::Diagnostic`][] which is awesome but it has been
experimental
+ for more than a year and is unlikely to be stabilized any time soon.
+
+ This crate's API is intentionally designed to be compatible with
`proc_macro::Diagnostic`
+ and delegates to it whenever possible. Once `Diagnostics` is stable this
crate
+ will **always** delegate to it, no code changes will be required on user
side.
+
+That said, we need a solution, but this solution must meet these conditions:
+
+- It must be better than `panic!`. The main point: it must offer a way to
carry the span information
+ over to user.
+- It must take as little effort as possible to migrate from `panic!`. Ideally,
a new
+ macro with similar semantics plus ability to carry out span info.
+- It must maintain compatibility with [`proc_macro::Diagnostic`][] .
+- **It must be usable on stable**.
+
+This crate aims to provide such a mechanism. All you have to do is annotate
your top-level
+`#[proc_macro]` function with `#[proc_macro_error]` attribute and change
panics to
+[`abort!`]/[`abort_call_site!`] where appropriate, see [the Guide][guide].
+
+## Disclaimer
+Please note that **this crate is not intended to be used in any way other
+than error reporting in procedural macros**, use `Result` and `?` (possibly
along with one of the
+many helpers out there) for anything else.
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
+
+
+[compl_err]: https://doc.rust-lang.org/std/macro.compile_error.html
+[`proc_macro::Diagnostic`]:
https://doc.rust-lang.org/proc_macro/struct.Diagnostic.html
+
+[crate::dummy]:
https://docs.rs/proc-macro-error/1/proc_macro_error/dummy/index.html
+[crate::multi]:
https://docs.rs/proc-macro-error/1/proc_macro_error/multi/index.html
+
+[`abort_call_site!`]:
https://docs.rs/proc-macro-error/1/proc_macro_error/macro.abort_call_site.html
+[`abort!`]:
https://docs.rs/proc-macro-error/1/proc_macro_error/macro.abort.html
+[guide]: https://docs.rs/proc-macro-error
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/build.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/build.rs
new file mode 100644
index 0000000000..3c1196f269
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/build.rs
@@ -0,0 +1,11 @@
+fn main() {
+ if !version_check::is_feature_flaggable().unwrap_or(false) {
+ println!("cargo:rustc-cfg=use_fallback");
+ }
+
+ if version_check::is_max_version("1.38.0").unwrap_or(false)
+ || !version_check::Channel::read().unwrap().is_stable()
+ {
+ println!("cargo:rustc-cfg=skip_ui_tests");
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/meson.build
b/rust/hw/char/pl011/vendor/proc-macro-error/meson.build
new file mode 100644
index 0000000000..db5d09f5db
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/meson.build
@@ -0,0 +1,22 @@
+_proc_macro_error_rs = static_library(
+ 'proc_macro_error',
+ files('src/lib.rs'),
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2018',
+ '--cfg', 'use_fallback',
+ '--cfg', 'feature="syn-error"',
+ '--cfg', 'feature="proc-macro"',
+ '-A', 'non_fmt_panics'
+ ],
+ dependencies: [
+ dep_proc_macro_error_attr,
+ dep_proc_macro2,
+ dep_quote,
+ dep_syn,
+ ],
+)
+
+dep_proc_macro_error = declare_dependency(
+ link_with: _proc_macro_error_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/diagnostic.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/diagnostic.rs
new file mode 100644
index 0000000000..983e6174fe
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/diagnostic.rs
@@ -0,0 +1,349 @@
+use crate::{abort_now, check_correctness, sealed::Sealed, SpanRange};
+use proc_macro2::Span;
+use proc_macro2::TokenStream;
+
+use quote::{quote_spanned, ToTokens};
+
+/// Represents a diagnostic level
+///
+/// # Warnings
+///
+/// Warnings are ignored on stable/beta
+#[derive(Debug, PartialEq)]
+pub enum Level {
+ Error,
+ Warning,
+ #[doc(hidden)]
+ NonExhaustive,
+}
+
+/// Represents a single diagnostic message
+#[derive(Debug)]
+pub struct Diagnostic {
+ pub(crate) level: Level,
+ pub(crate) span_range: SpanRange,
+ pub(crate) msg: String,
+ pub(crate) suggestions: Vec<(SuggestionKind, String, Option<SpanRange>)>,
+ pub(crate) children: Vec<(SpanRange, String)>,
+}
+
+/// A collection of methods that do not exist in `proc_macro::Diagnostic`
+/// but still useful to have around.
+///
+/// This trait is sealed and cannot be implemented outside of
`proc_macro_error`.
+pub trait DiagnosticExt: Sealed {
+ /// Create a new diagnostic message that points to the `span_range`.
+ ///
+ /// This function is the same as `Diagnostic::spanned` but produces
considerably
+ /// better error messages for multi-token spans on stable.
+ fn spanned_range(span_range: SpanRange, level: Level, message: String) ->
Self;
+
+ /// Add another error message to self such that it will be emitted right
after
+ /// the main message.
+ ///
+ /// This function is the same as `Diagnostic::span_error` but produces
considerably
+ /// better error messages for multi-token spans on stable.
+ fn span_range_error(self, span_range: SpanRange, msg: String) -> Self;
+
+ /// Attach a "help" note to your main message, the note will have it's own
span on nightly.
+ ///
+ /// This function is the same as `Diagnostic::span_help` but produces
considerably
+ /// better error messages for multi-token spans on stable.
+ ///
+ /// # Span
+ ///
+ /// The span is ignored on stable, the note effectively inherits its
parent's (main message) span
+ fn span_range_help(self, span_range: SpanRange, msg: String) -> Self;
+
+ /// Attach a note to your main message, the note will have it's own span
on nightly.
+ ///
+ /// This function is the same as `Diagnostic::span_note` but produces
considerably
+ /// better error messages for multi-token spans on stable.
+ ///
+ /// # Span
+ ///
+ /// The span is ignored on stable, the note effectively inherits its
parent's (main message) span
+ fn span_range_note(self, span_range: SpanRange, msg: String) -> Self;
+}
+
+impl DiagnosticExt for Diagnostic {
+ fn spanned_range(span_range: SpanRange, level: Level, message: String) ->
Self {
+ Diagnostic {
+ level,
+ span_range,
+ msg: message,
+ suggestions: vec![],
+ children: vec![],
+ }
+ }
+
+ fn span_range_error(mut self, span_range: SpanRange, msg: String) -> Self {
+ self.children.push((span_range, msg));
+ self
+ }
+
+ fn span_range_help(mut self, span_range: SpanRange, msg: String) -> Self {
+ self.suggestions
+ .push((SuggestionKind::Help, msg, Some(span_range)));
+ self
+ }
+
+ fn span_range_note(mut self, span_range: SpanRange, msg: String) -> Self {
+ self.suggestions
+ .push((SuggestionKind::Note, msg, Some(span_range)));
+ self
+ }
+}
+
+impl Diagnostic {
+ /// Create a new diagnostic message that points to `Span::call_site()`
+ pub fn new(level: Level, message: String) -> Self {
+ Diagnostic::spanned(Span::call_site(), level, message)
+ }
+
+ /// Create a new diagnostic message that points to the `span`
+ pub fn spanned(span: Span, level: Level, message: String) -> Self {
+ Diagnostic::spanned_range(
+ SpanRange {
+ first: span,
+ last: span,
+ },
+ level,
+ message,
+ )
+ }
+
+ /// Add another error message to self such that it will be emitted right
after
+ /// the main message.
+ pub fn span_error(self, span: Span, msg: String) -> Self {
+ self.span_range_error(
+ SpanRange {
+ first: span,
+ last: span,
+ },
+ msg,
+ )
+ }
+
+ /// Attach a "help" note to your main message, the note will have it's own
span on nightly.
+ ///
+ /// # Span
+ ///
+ /// The span is ignored on stable, the note effectively inherits its
parent's (main message) span
+ pub fn span_help(self, span: Span, msg: String) -> Self {
+ self.span_range_help(
+ SpanRange {
+ first: span,
+ last: span,
+ },
+ msg,
+ )
+ }
+
+ /// Attach a "help" note to your main message.
+ pub fn help(mut self, msg: String) -> Self {
+ self.suggestions.push((SuggestionKind::Help, msg, None));
+ self
+ }
+
+ /// Attach a note to your main message, the note will have it's own span
on nightly.
+ ///
+ /// # Span
+ ///
+ /// The span is ignored on stable, the note effectively inherits its
parent's (main message) span
+ pub fn span_note(self, span: Span, msg: String) -> Self {
+ self.span_range_note(
+ SpanRange {
+ first: span,
+ last: span,
+ },
+ msg,
+ )
+ }
+
+ /// Attach a note to your main message
+ pub fn note(mut self, msg: String) -> Self {
+ self.suggestions.push((SuggestionKind::Note, msg, None));
+ self
+ }
+
+ /// The message of main warning/error (no notes attached)
+ pub fn message(&self) -> &str {
+ &self.msg
+ }
+
+ /// Abort the proc-macro's execution and display the diagnostic.
+ ///
+ /// # Warnings
+ ///
+ /// Warnings are not emitted on stable and beta, but this function will
abort anyway.
+ pub fn abort(self) -> ! {
+ self.emit();
+ abort_now()
+ }
+
+ /// Display the diagnostic while not aborting macro execution.
+ ///
+ /// # Warnings
+ ///
+ /// Warnings are ignored on stable/beta
+ pub fn emit(self) {
+ check_correctness();
+ crate::imp::emit_diagnostic(self);
+ }
+}
+
+/// **NOT PUBLIC API! NOTHING TO SEE HERE!!!**
+#[doc(hidden)]
+impl Diagnostic {
+ pub fn span_suggestion(self, span: Span, suggestion: &str, msg: String) ->
Self {
+ match suggestion {
+ "help" | "hint" => self.span_help(span, msg),
+ _ => self.span_note(span, msg),
+ }
+ }
+
+ pub fn suggestion(self, suggestion: &str, msg: String) -> Self {
+ match suggestion {
+ "help" | "hint" => self.help(msg),
+ _ => self.note(msg),
+ }
+ }
+}
+
+impl ToTokens for Diagnostic {
+ fn to_tokens(&self, ts: &mut TokenStream) {
+ use std::borrow::Cow;
+
+ fn ensure_lf(buf: &mut String, s: &str) {
+ if s.ends_with('\n') {
+ buf.push_str(s);
+ } else {
+ buf.push_str(s);
+ buf.push('\n');
+ }
+ }
+
+ fn diag_to_tokens(
+ span_range: SpanRange,
+ level: &Level,
+ msg: &str,
+ suggestions: &[(SuggestionKind, String, Option<SpanRange>)],
+ ) -> TokenStream {
+ if *level == Level::Warning {
+ return TokenStream::new();
+ }
+
+ let message = if suggestions.is_empty() {
+ Cow::Borrowed(msg)
+ } else {
+ let mut message = String::new();
+ ensure_lf(&mut message, msg);
+ message.push('\n');
+
+ for (kind, note, _span) in suggestions {
+ message.push_str(" = ");
+ message.push_str(kind.name());
+ message.push_str(": ");
+ ensure_lf(&mut message, note);
+ }
+ message.push('\n');
+
+ Cow::Owned(message)
+ };
+
+ let mut msg = proc_macro2::Literal::string(&message);
+ msg.set_span(span_range.last);
+ let group = quote_spanned!(span_range.last=> { #msg } );
+ quote_spanned!(span_range.first=> compile_error!#group)
+ }
+
+ ts.extend(diag_to_tokens(
+ self.span_range,
+ &self.level,
+ &self.msg,
+ &self.suggestions,
+ ));
+ ts.extend(
+ self.children
+ .iter()
+ .map(|(span_range, msg)| diag_to_tokens(*span_range,
&Level::Error, &msg, &[])),
+ );
+ }
+}
+
+#[derive(Debug)]
+pub(crate) enum SuggestionKind {
+ Help,
+ Note,
+}
+
+impl SuggestionKind {
+ fn name(&self) -> &'static str {
+ match self {
+ SuggestionKind::Note => "note",
+ SuggestionKind::Help => "help",
+ }
+ }
+}
+
+#[cfg(feature = "syn-error")]
+impl From<syn::Error> for Diagnostic {
+ fn from(err: syn::Error) -> Self {
+ use proc_macro2::{Delimiter, TokenTree};
+
+ fn gut_error(ts: &mut impl Iterator<Item = TokenTree>) ->
Option<(SpanRange, String)> {
+ let first = match ts.next() {
+ // compile_error
+ None => return None,
+ Some(tt) => tt.span(),
+ };
+ ts.next().unwrap(); // !
+
+ let lit = match ts.next().unwrap() {
+ TokenTree::Group(group) => {
+ // Currently `syn` builds `compile_error!` invocations
+ // exclusively in `ident{"..."}` (braced) form which is not
+ // followed by `;` (semicolon).
+ //
+ // But if it changes to `ident("...");` (parenthesized)
+ // or `ident["..."];` (bracketed) form,
+ // we will need to skip the `;` as well.
+ // Highly unlikely, but better safe than sorry.
+
+ if group.delimiter() == Delimiter::Parenthesis
+ || group.delimiter() == Delimiter::Bracket
+ {
+ ts.next().unwrap(); // ;
+ }
+
+ match group.stream().into_iter().next().unwrap() {
+ TokenTree::Literal(lit) => lit,
+ _ => unreachable!(),
+ }
+ }
+ _ => unreachable!(),
+ };
+
+ let last = lit.span();
+ let mut msg = lit.to_string();
+
+ // "abc" => abc
+ msg.pop();
+ msg.remove(0);
+
+ Some((SpanRange { first, last }, msg))
+ }
+
+ let mut ts = err.to_compile_error().into_iter();
+
+ let (span_range, msg) = gut_error(&mut ts).unwrap();
+ let mut res = Diagnostic::spanned_range(span_range, Level::Error, msg);
+
+ while let Some((span_range, msg)) = gut_error(&mut ts) {
+ res = res.span_range_error(span_range, msg);
+ }
+
+ res
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/dummy.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/dummy.rs
new file mode 100644
index 0000000000..571a595aa9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/dummy.rs
@@ -0,0 +1,150 @@
+//! Facility to emit dummy implementations (or whatever) in case
+//! an error happen.
+//!
+//! `compile_error!` does not abort a compilation right away. This means
+//! `rustc` doesn't just show you the error and abort, it carries on the
+//! compilation process looking for other errors to report.
+//!
+//! Let's consider an example:
+//!
+//! ```rust,ignore
+//! use proc_macro::TokenStream;
+//! use proc_macro_error::*;
+//!
+//! trait MyTrait {
+//! fn do_thing();
+//! }
+//!
+//! // this proc macro is supposed to generate MyTrait impl
+//! #[proc_macro_derive(MyTrait)]
+//! #[proc_macro_error]
+//! fn example(input: TokenStream) -> TokenStream {
+//! // somewhere deep inside
+//! abort!(span, "something's wrong");
+//!
+//! // this implementation will be generated if no error happened
+//! quote! {
+//! impl MyTrait for #name {
+//! fn do_thing() {/* whatever */}
+//! }
+//! }
+//! }
+//!
+//! // ================
+//! // in main.rs
+//!
+//! // this derive triggers an error
+//! #[derive(MyTrait)] // first BOOM!
+//! struct Foo;
+//!
+//! fn main() {
+//! Foo::do_thing(); // second BOOM!
+//! }
+//! ```
+//!
+//! The problem is: the generated token stream contains only `compile_error!`
+//! invocation, the impl was not generated. That means user will see two
compilation
+//! errors:
+//!
+//! ```text
+//! error: something's wrong
+//! --> $DIR/probe.rs:9:10
+//! |
+//! 9 |#[proc_macro_derive(MyTrait)]
+//! | ^^^^^^^
+//!
+//! error[E0599]: no function or associated item named `do_thing` found for
type `Foo` in the current scope
+//! --> src\main.rs:3:10
+//! |
+//! 1 | struct Foo;
+//! | ----------- function or associated item `do_thing` not found for this
+//! 2 | fn main() {
+//! 3 | Foo::do_thing(); // second BOOM!
+//! | ^^^^^^^^ function or associated item not found in `Foo`
+//! ```
+//!
+//! But the second error is meaningless! We definitely need to fix this.
+//!
+//! Most used approach in cases like this is "dummy implementation" -
+//! omit `impl MyTrait for #name` and fill functions bodies with
`unimplemented!()`.
+//!
+//! This is how you do it:
+//!
+//! ```rust,ignore
+//! use proc_macro::TokenStream;
+//! use proc_macro_error::*;
+//!
+//! trait MyTrait {
+//! fn do_thing();
+//! }
+//!
+//! // this proc macro is supposed to generate MyTrait impl
+//! #[proc_macro_derive(MyTrait)]
+//! #[proc_macro_error]
+//! fn example(input: TokenStream) -> TokenStream {
+//! // first of all - we set a dummy impl which will be appended to
+//! // `compile_error!` invocations in case a trigger does happen
+//! set_dummy(quote! {
+//! impl MyTrait for #name {
+//! fn do_thing() { unimplemented!() }
+//! }
+//! });
+//!
+//! // somewhere deep inside
+//! abort!(span, "something's wrong");
+//!
+//! // this implementation will be generated if no error happened
+//! quote! {
+//! impl MyTrait for #name {
+//! fn do_thing() {/* whatever */}
+//! }
+//! }
+//! }
+//!
+//! // ================
+//! // in main.rs
+//!
+//! // this derive triggers an error
+//! #[derive(MyTrait)] // first BOOM!
+//! struct Foo;
+//!
+//! fn main() {
+//! Foo::do_thing(); // no more errors!
+//! }
+//! ```
+
+use proc_macro2::TokenStream;
+use std::cell::RefCell;
+
+use crate::check_correctness;
+
+thread_local! {
+ static DUMMY_IMPL: RefCell<Option<TokenStream>> = RefCell::new(None);
+}
+
+/// Sets dummy token stream which will be appended to `compile_error!(msg);...`
+/// invocations in case you'll emit any errors.
+///
+/// See [guide](../index.html#guide).
+pub fn set_dummy(dummy: TokenStream) -> Option<TokenStream> {
+ check_correctness();
+ DUMMY_IMPL.with(|old_dummy| old_dummy.replace(Some(dummy)))
+}
+
+/// Same as [`set_dummy`] but, instead of resetting, appends tokens to the
+/// existing dummy (if any). Behaves as `set_dummy` if no dummy is present.
+pub fn append_dummy(dummy: TokenStream) {
+ check_correctness();
+ DUMMY_IMPL.with(|old_dummy| {
+ let mut cell = old_dummy.borrow_mut();
+ if let Some(ts) = cell.as_mut() {
+ ts.extend(dummy);
+ } else {
+ *cell = Some(dummy);
+ }
+ });
+}
+
+pub(crate) fn cleanup() -> Option<TokenStream> {
+ DUMMY_IMPL.with(|old_dummy| old_dummy.replace(None))
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/delegate.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/delegate.rs
new file mode 100644
index 0000000000..07def2b98e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/delegate.rs
@@ -0,0 +1,69 @@
+//! This implementation uses [`proc_macro::Diagnostic`], nightly only.
+
+use std::cell::Cell;
+
+use proc_macro::{Diagnostic as PDiag, Level as PLevel};
+
+use crate::{
+ abort_now, check_correctness,
+ diagnostic::{Diagnostic, Level, SuggestionKind},
+};
+
+pub fn abort_if_dirty() {
+ check_correctness();
+ if IS_DIRTY.with(|c| c.get()) {
+ abort_now()
+ }
+}
+
+pub(crate) fn cleanup() -> Vec<Diagnostic> {
+ IS_DIRTY.with(|c| c.set(false));
+ vec![]
+}
+
+pub(crate) fn emit_diagnostic(diag: Diagnostic) {
+ let Diagnostic {
+ level,
+ span_range,
+ msg,
+ suggestions,
+ children,
+ } = diag;
+
+ let span = span_range.collapse().unwrap();
+
+ let level = match level {
+ Level::Warning => PLevel::Warning,
+ Level::Error => {
+ IS_DIRTY.with(|c| c.set(true));
+ PLevel::Error
+ }
+ _ => unreachable!(),
+ };
+
+ let mut res = PDiag::spanned(span, level, msg);
+
+ for (kind, msg, span) in suggestions {
+ res = match (kind, span) {
+ (SuggestionKind::Note, Some(span_range)) => {
+ res.span_note(span_range.collapse().unwrap(), msg)
+ }
+ (SuggestionKind::Help, Some(span_range)) => {
+ res.span_help(span_range.collapse().unwrap(), msg)
+ }
+ (SuggestionKind::Note, None) => res.note(msg),
+ (SuggestionKind::Help, None) => res.help(msg),
+ }
+ }
+
+ for (span_range, msg) in children {
+ let span = span_range.collapse().unwrap();
+ res = res.span_error(span, msg);
+ }
+
+ res.emit()
+}
+
+thread_local! {
+ static IS_DIRTY: Cell<bool> = Cell::new(false);
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/fallback.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/fallback.rs
new file mode 100644
index 0000000000..ad1f730bfc
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/imp/fallback.rs
@@ -0,0 +1,30 @@
+//! This implementation uses self-written stable facilities.
+
+use crate::{
+ abort_now, check_correctness,
+ diagnostic::{Diagnostic, Level},
+};
+use std::cell::RefCell;
+
+pub fn abort_if_dirty() {
+ check_correctness();
+ ERR_STORAGE.with(|storage| {
+ if !storage.borrow().is_empty() {
+ abort_now()
+ }
+ });
+}
+
+pub(crate) fn cleanup() -> Vec<Diagnostic> {
+ ERR_STORAGE.with(|storage| storage.replace(Vec::new()))
+}
+
+pub(crate) fn emit_diagnostic(diag: Diagnostic) {
+ if diag.level == Level::Error {
+ ERR_STORAGE.with(|storage| storage.borrow_mut().push(diag));
+ }
+}
+
+thread_local! {
+ static ERR_STORAGE: RefCell<Vec<Diagnostic>> = RefCell::new(Vec::new());
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/lib.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/lib.rs
new file mode 100644
index 0000000000..fb867fdc03
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/lib.rs
@@ -0,0 +1,560 @@
+//! # proc-macro-error
+//!
+//! This crate aims to make error reporting in proc-macros simple and easy to
use.
+//! Migrate from `panic!`-based errors for as little effort as possible!
+//!
+//! (Also, you can explicitly [append a dummy token stream](dummy/index.html)
to your errors).
+//!
+//! To achieve his, this crate serves as a tiny shim around
`proc_macro::Diagnostic` and
+//! `compile_error!`. It detects the best way of emitting available based on
compiler's version.
+//! When the underlying diagnostic type is finally stabilized, this crate will
simply be
+//! delegating to it requiring no changes in your code!
+//!
+//! So you can just use this crate and have *both* some of
`proc_macro::Diagnostic` functionality
+//! available on stable ahead of time *and* your error-reporting code
future-proof.
+//!
+//! ## Cargo features
+//!
+//! This crate provides *enabled by default* `syn-error` feature that gates
+//! `impl From<syn::Error> for Diagnostic` conversion. If you don't use `syn`
and want
+//! to cut off some of compilation time, you can disable it via
+//!
+//! ```toml
+//! [dependencies]
+//! proc-macro-error = { version = "1", default-features = false }
+//! ```
+//!
+//! ***Please note that disabling this feature makes sense only if you don't
depend on `syn`
+//! directly or indirectly, and you very likely do.**
+//!
+//! ## Real world examples
+//!
+//! *
[`structopt-derive`](https://github.com/TeXitoi/structopt/tree/master/structopt-derive)
+//! (abort-like usage)
+//! * [`auto-impl`](https://github.com/auto-impl-rs/auto_impl/) (emit-like
usage)
+//!
+//! ## Limitations
+//!
+//! - Warnings are emitted only on nightly, they are ignored on stable.
+//! - "help" suggestions can't have their own span info on stable,
+//! (essentially inheriting the parent span).
+//! - If a panic occurs somewhere in your macro no errors will be displayed.
This is not a
+//! technical limitation but rather intentional design. `panic` is not for
error reporting.
+//!
+//! ### `#[proc_macro_error]` attribute
+//!
+//! **This attribute MUST be present on the top level of your macro** (the
function
+//! annotated with any of `#[proc_macro]`, `#[proc_macro_derive]`,
`#[proc_macro_attribute]`).
+//!
+//! This attribute performs the setup and cleanup necessary to make things
work.
+//!
+//! In most cases you'll need the simple `#[proc_macro_error]` form without any
+//! additional settings. Feel free to [skip the "Syntax" section](#macros).
+//!
+//! #### Syntax
+//!
+//! `#[proc_macro_error]` or `#[proc_macro_error(settings...)]`, where
`settings...`
+//! is a comma-separated list of:
+//!
+//! - `proc_macro_hack`:
+//!
+//! In order to correctly cooperate with `#[proc_macro_hack]`,
`#[proc_macro_error]`
+//! attribute must be placed *before* (above) it, like this:
+//!
+//! ```no_run
+//! # use proc_macro2::TokenStream;
+//! # const IGNORE: &str = "
+//! #[proc_macro_error]
+//! #[proc_macro_hack]
+//! #[proc_macro]
+//! # ";
+//! fn my_macro(input: TokenStream) -> TokenStream {
+//! unimplemented!()
+//! }
+//! ```
+//!
+//! If, for some reason, you can't place it like that you can use
+//! `#[proc_macro_error(proc_macro_hack)]` instead.
+//!
+//! # Note
+//!
+//! If `proc-macro-hack` was detected (by any means) `allow_not_macro`
+//! and `assert_unwind_safe` will be applied automatically.
+//!
+//! - `allow_not_macro`:
+//!
+//! By default, the attribute checks that it's applied to a proc-macro.
+//! If none of `#[proc_macro]`, `#[proc_macro_derive]` nor
`#[proc_macro_attribute]` are
+//! present it will panic. It's the intention - this crate is supposed to
be used only with
+//! proc-macros.
+//!
+//! This setting is made to bypass the check, useful in certain
circumstances.
+//!
+//! Pay attention: the function this attribute is applied to must return
+//! `proc_macro::TokenStream`.
+//!
+//! This setting is implied if `proc-macro-hack` was detected.
+//!
+//! - `assert_unwind_safe`:
+//!
+//! By default, your code must be [unwind safe]. If your code is not
unwind safe,
+//! but you believe it's correct, you can use this setting to bypass the
check.
+//! You would need this for code that uses `lazy_static` or `thread_local`
with
+//! `Cell/RefCell` inside (and the like).
+//!
+//! This setting is implied if `#[proc_macro_error]` is applied to a
function
+//! marked as `#[proc_macro]`, `#[proc_macro_derive]` or
`#[proc_macro_attribute]`.
+//!
+//! This setting is also implied if `proc-macro-hack` was detected.
+//!
+//! ## Macros
+//!
+//! Most of the time you want to use the macros. Syntax is described in the
next section below.
+//!
+//! You'll need to decide how you want to emit errors:
+//!
+//! * Emit the error and abort. Very much panic-like usage. Served by
[`abort!`] and
+//! [`abort_call_site!`].
+//! * Emit the error but do not abort right away, looking for other errors to
report.
+//! Served by [`emit_error!`] and [`emit_call_site_error!`].
+//!
+//! You **can** mix these usages.
+//!
+//! `abort` and `emit_error` take a "source span" as the first argument. This
source
+//! will be used to highlight the place the error originates from. It must be
one of:
+//!
+//! * *Something* that implements [`ToTokens`] (most types in `syn` and
`proc-macro2` do).
+//! This source is the preferable one since it doesn't lose span information
on multi-token
+//! spans, see [this
issue](https://gitlab.com/CreepySkeleton/proc-macro-error/-/issues/6)
+//! for details.
+//! * [`proc_macro::Span`]
+//! * [`proc-macro2::Span`]
+//!
+//! The rest is your message in format-like style.
+//!
+//! See [the next section](#syntax-1) for detailed syntax.
+//!
+//! - [`abort!`]:
+//!
+//! Very much panic-like usage - abort right away and show the error.
+//! Expands to [`!`] (never type).
+//!
+//! - [`abort_call_site!`]:
+//!
+//! Shortcut for `abort!(Span::call_site(), ...)`. Expands to [`!`] (never
type).
+//!
+//! - [`emit_error!`]:
+//!
+//! [`proc_macro::Diagnostic`]-like usage - emit the error but keep going,
+//! looking for other errors to report.
+//! The compilation will fail nonetheless. Expands to [`()`] (unit type).
+//!
+//! - [`emit_call_site_error!`]:
+//!
+//! Shortcut for `emit_error!(Span::call_site(), ...)`. Expands to [`()`]
(unit type).
+//!
+//! - [`emit_warning!`]:
+//!
+//! Like `emit_error!` but emit a warning instead of error. The
compilation won't fail
+//! because of warnings.
+//! Expands to [`()`] (unit type).
+//!
+//! **Beware**: warnings are nightly only, they are completely ignored on
stable.
+//!
+//! - [`emit_call_site_warning!`]:
+//!
+//! Shortcut for `emit_warning!(Span::call_site(), ...)`. Expands to
[`()`] (unit type).
+//!
+//! - [`diagnostic`]:
+//!
+//! Build an instance of `Diagnostic` in format-like style.
+//!
+//! #### Syntax
+//!
+//! All the macros have pretty much the same syntax:
+//!
+//! 1. ```ignore
+//! abort!(single_expr)
+//! ```
+//! Shortcut for `Diagnostic::from(expr).abort()`.
+//!
+//! 2. ```ignore
+//! abort!(span, message)
+//! ```
+//! The first argument is an expression the span info should be taken from.
+//!
+//! The second argument is the error message, it must implement
[`ToString`].
+//!
+//! 3. ```ignore
+//! abort!(span, format_literal, format_args...)
+//! ```
+//!
+//! This form is pretty much the same as 2, except
`format!(format_literal, format_args...)`
+//! will be used to for the message instead of [`ToString`].
+//!
+//! That's it. `abort!`, `emit_warning`, `emit_error` share this exact syntax.
+//!
+//! `abort_call_site!`, `emit_call_site_warning`, `emit_call_site_error` lack
1 form
+//! and do not take span in 2'th and 3'th forms. Those are essentially
shortcuts for
+//! `macro!(Span::call_site(), args...)`.
+//!
+//! `diagnostic!` requires a [`Level`] instance between `span` and second
argument
+//! (1'th form is the same).
+//!
+//! > **Important!**
+//! >
+//! > If you have some type from `proc_macro` or `syn` to point to, do not
call `.span()`
+//! > on it but rather use it directly:
+//! > ```no_run
+//! > # use proc_macro_error::abort;
+//! > # let input = proc_macro2::TokenStream::new();
+//! > let ty: syn::Type = syn::parse2(input).unwrap();
+//! > abort!(ty, "BOOM");
+//! > // ^^ <-- avoid .span()
+//! > ```
+//! >
+//! > `.span()` calls work too, but you may experience regressions in message
quality.
+//!
+//! #### Note attachments
+//!
+//! 3. Every macro can have "note" attachments (only 2 and 3 form).
+//! ```ignore
+//! let opt_help = if have_some_info { Some("did you mean `this`?") } else {
None };
+//!
+//! abort!(
+//! span, message; // <--- attachments start with `;` (semicolon)
+//!
+//! help = "format {} {}", "arg1", "arg2"; // <--- every attachment ends
with `;`,
+//! // maybe except the last
one
+//!
+//! note = "to_string"; // <--- one arg uses `.to_string()` instead of
`format!()`
+//!
+//! yay = "I see what {} did here", "you"; // <--- "help =" and "hint ="
are mapped
+//! // to Diagnostic::help,
+//! // anything else is
Diagnostic::note
+//!
+//! wow = note_span => "custom span"; // <--- attachments can have their
own span
+//! // it takes effect only on
nightly though
+//!
+//! hint =? opt_help; // <-- "optional" attachment, get displayed only
if `Some`
+//! // must be single `Option` expression
+//!
+//! note =? note_span => opt_help // <-- optional attachments can have
custom spans too
+//! );
+//! ```
+//!
+
+//! ### Diagnostic type
+//!
+//! [`Diagnostic`] type is intentionally designed to be API compatible with
[`proc_macro::Diagnostic`].
+//! Not all API is implemented, only the part that can be reasonably
implemented on stable.
+//!
+//!
+//! [`abort!`]: macro.abort.html
+//! [`abort_call_site!`]: macro.abort_call_site.html
+//! [`emit_warning!`]: macro.emit_warning.html
+//! [`emit_error!`]: macro.emit_error.html
+//! [`emit_call_site_warning!`]: macro.emit_call_site_error.html
+//! [`emit_call_site_error!`]: macro.emit_call_site_warning.html
+//! [`diagnostic!`]: macro.diagnostic.html
+//! [`Diagnostic`]: struct.Diagnostic.html
+//!
+//! [`proc_macro::Span`]: https://doc.rust-lang.org/proc_macro/struct.Span.html
+//! [`proc_macro::Diagnostic`]:
https://doc.rust-lang.org/proc_macro/struct.Diagnostic.html
+//!
+//! [unwind safe]:
https://doc.rust-lang.org/std/panic/trait.UnwindSafe.html#what-is-unwind-safety
+//! [`!`]: https://doc.rust-lang.org/std/primitive.never.html
+//! [`()`]: https://doc.rust-lang.org/std/primitive.unit.html
+//! [`ToString`]: https://doc.rust-lang.org/std/string/trait.ToString.html
+//!
+//! [`proc-macro2::Span`]:
https://docs.rs/proc-macro2/1.0.10/proc_macro2/struct.Span.html
+//! [`ToTokens`]: https://docs.rs/quote/1.0.3/quote/trait.ToTokens.html
+//!
+
+#![cfg_attr(not(use_fallback), feature(proc_macro_diagnostic))]
+#![forbid(unsafe_code)]
+#![allow(clippy::needless_doctest_main)]
+
+extern crate proc_macro;
+
+pub use crate::{
+ diagnostic::{Diagnostic, DiagnosticExt, Level},
+ dummy::{append_dummy, set_dummy},
+};
+pub use proc_macro_error_attr::proc_macro_error;
+
+use proc_macro2::Span;
+use quote::{quote, ToTokens};
+
+use std::cell::Cell;
+use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
+
+pub mod dummy;
+
+mod diagnostic;
+mod macros;
+mod sealed;
+
+#[cfg(use_fallback)]
+#[path = "imp/fallback.rs"]
+mod imp;
+
+#[cfg(not(use_fallback))]
+#[path = "imp/delegate.rs"]
+mod imp;
+
+#[derive(Debug, Clone, Copy)]
+pub struct SpanRange {
+ pub first: Span,
+ pub last: Span,
+}
+
+impl SpanRange {
+ /// Create a range with the `first` and `last` spans being the same.
+ pub fn single_span(span: Span) -> Self {
+ SpanRange {
+ first: span,
+ last: span,
+ }
+ }
+
+ /// Create a `SpanRange` resolving at call site.
+ pub fn call_site() -> Self {
+ SpanRange::single_span(Span::call_site())
+ }
+
+ /// Construct span range from a `TokenStream`. This method always
preserves all the
+ /// range.
+ ///
+ /// ### Note
+ ///
+ /// If the stream is empty, the result is `SpanRange::call_site()`. If the
stream
+ /// consists of only one `TokenTree`, the result is
`SpanRange::single_span(tt.span())`
+ /// that doesn't lose anything.
+ pub fn from_tokens(ts: &dyn ToTokens) -> Self {
+ let mut spans = ts.to_token_stream().into_iter().map(|tt| tt.span());
+ let first = spans.next().unwrap_or_else(|| Span::call_site());
+ let last = spans.last().unwrap_or(first);
+
+ SpanRange { first, last }
+ }
+
+ /// Join two span ranges. The resulting range will start at `self.first`
and end at
+ /// `other.last`.
+ pub fn join_range(self, other: SpanRange) -> Self {
+ SpanRange {
+ first: self.first,
+ last: other.last,
+ }
+ }
+
+ /// Collapse the range into single span, preserving as much information as
possible.
+ pub fn collapse(self) -> Span {
+ self.first.join(self.last).unwrap_or(self.first)
+ }
+}
+
+/// This traits expands `Result<T, Into<Diagnostic>>` with some handy
shortcuts.
+pub trait ResultExt {
+ type Ok;
+
+ /// Behaves like `Result::unwrap`: if self is `Ok` yield the contained
value,
+ /// otherwise abort macro execution via `abort!`.
+ fn unwrap_or_abort(self) -> Self::Ok;
+
+ /// Behaves like `Result::expect`: if self is `Ok` yield the contained
value,
+ /// otherwise abort macro execution via `abort!`.
+ /// If it aborts then resulting error message will be preceded with
`message`.
+ fn expect_or_abort(self, msg: &str) -> Self::Ok;
+}
+
+/// This traits expands `Option` with some handy shortcuts.
+pub trait OptionExt {
+ type Some;
+
+ /// Behaves like `Option::expect`: if self is `Some` yield the contained
value,
+ /// otherwise abort macro execution via `abort_call_site!`.
+ /// If it aborts the `message` will be used for
[`compile_error!`][compl_err] invocation.
+ ///
+ /// [compl_err]: https://doc.rust-lang.org/std/macro.compile_error.html
+ fn expect_or_abort(self, msg: &str) -> Self::Some;
+}
+
+/// Abort macro execution and display all the emitted errors, if any.
+///
+/// Does nothing if no errors were emitted (warnings do not count).
+pub fn abort_if_dirty() {
+ imp::abort_if_dirty();
+}
+
+impl<T, E: Into<Diagnostic>> ResultExt for Result<T, E> {
+ type Ok = T;
+
+ fn unwrap_or_abort(self) -> T {
+ match self {
+ Ok(res) => res,
+ Err(e) => e.into().abort(),
+ }
+ }
+
+ fn expect_or_abort(self, message: &str) -> T {
+ match self {
+ Ok(res) => res,
+ Err(e) => {
+ let mut e = e.into();
+ e.msg = format!("{}: {}", message, e.msg);
+ e.abort()
+ }
+ }
+ }
+}
+
+impl<T> OptionExt for Option<T> {
+ type Some = T;
+
+ fn expect_or_abort(self, message: &str) -> T {
+ match self {
+ Some(res) => res,
+ None => abort_call_site!(message),
+ }
+ }
+}
+
+/// This is the entry point for a proc-macro.
+///
+/// **NOT PUBLIC API, SUBJECT TO CHANGE WITHOUT ANY NOTICE**
+#[doc(hidden)]
+pub fn entry_point<F>(f: F, proc_macro_hack: bool) -> proc_macro::TokenStream
+where
+ F: FnOnce() -> proc_macro::TokenStream + UnwindSafe,
+{
+ ENTERED_ENTRY_POINT.with(|flag| flag.set(flag.get() + 1));
+ let caught = catch_unwind(f);
+ let dummy = dummy::cleanup();
+ let err_storage = imp::cleanup();
+ ENTERED_ENTRY_POINT.with(|flag| flag.set(flag.get() - 1));
+
+ let gen_error = || {
+ if proc_macro_hack {
+ quote! {{
+ macro_rules! proc_macro_call {
+ () => ( unimplemented!() )
+ }
+
+ #(#err_storage)*
+ #dummy
+
+ unimplemented!()
+ }}
+ } else {
+ quote!( #(#err_storage)* #dummy )
+ }
+ };
+
+ match caught {
+ Ok(ts) => {
+ if err_storage.is_empty() {
+ ts
+ } else {
+ gen_error().into()
+ }
+ }
+
+ Err(boxed) => match boxed.downcast::<AbortNow>() {
+ Ok(_) => gen_error().into(),
+ Err(boxed) => resume_unwind(boxed),
+ },
+ }
+}
+
+fn abort_now() -> ! {
+ check_correctness();
+ panic!(AbortNow)
+}
+
+thread_local! {
+ static ENTERED_ENTRY_POINT: Cell<usize> = Cell::new(0);
+}
+
+struct AbortNow;
+
+fn check_correctness() {
+ if ENTERED_ENTRY_POINT.with(|flag| flag.get()) == 0 {
+ panic!(
+ "proc-macro-error API cannot be used outside of `entry_point`
invocation, \
+ perhaps you forgot to annotate your #[proc_macro] function with
`#[proc_macro_error]"
+ );
+ }
+}
+
+/// **ALL THE STUFF INSIDE IS NOT PUBLIC API!!!**
+#[doc(hidden)]
+pub mod __export {
+ // reexports for use in macros
+ pub extern crate proc_macro;
+ pub extern crate proc_macro2;
+
+ use proc_macro2::Span;
+ use quote::ToTokens;
+
+ use crate::SpanRange;
+
+ // inspired by
+ //
https://github.com/dtolnay/case-studies/blob/master/autoref-specialization/README.md#simple-application
+
+ pub trait SpanAsSpanRange {
+ #[allow(non_snake_case)]
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange;
+ }
+
+ pub trait Span2AsSpanRange {
+ #[allow(non_snake_case)]
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange;
+ }
+
+ pub trait ToTokensAsSpanRange {
+ #[allow(non_snake_case)]
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange;
+ }
+
+ pub trait SpanRangeAsSpanRange {
+ #[allow(non_snake_case)]
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange;
+ }
+
+ impl<T: ToTokens> ToTokensAsSpanRange for &T {
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange {
+ let mut ts = self.to_token_stream().into_iter();
+ let first = ts
+ .next()
+ .map(|tt| tt.span())
+ .unwrap_or_else(Span::call_site);
+ let last = ts.last().map(|tt| tt.span()).unwrap_or(first);
+ SpanRange { first, last }
+ }
+ }
+
+ impl Span2AsSpanRange for Span {
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange {
+ SpanRange {
+ first: *self,
+ last: *self,
+ }
+ }
+ }
+
+ impl SpanAsSpanRange for proc_macro::Span {
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange {
+ SpanRange {
+ first: self.clone().into(),
+ last: self.clone().into(),
+ }
+ }
+ }
+
+ impl SpanRangeAsSpanRange for SpanRange {
+ fn
FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange(&self) ->
SpanRange {
+ *self
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/macros.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/macros.rs
new file mode 100644
index 0000000000..747b684d56
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/macros.rs
@@ -0,0 +1,288 @@
+// FIXME: this can be greatly simplified via $()?
+// as soon as MRSV hits 1.32
+
+/// Build [`Diagnostic`](struct.Diagnostic.html) instance from provided
arguments.
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! diagnostic {
+ // from alias
+ ($err:expr) => { $crate::Diagnostic::from($err) };
+
+ // span, message, help
+ ($span:expr, $level:expr, $fmt:expr, $($args:expr),+ ; $($rest:tt)+) => {{
+ #[allow(unused_imports)]
+ use $crate::__export::{
+ ToTokensAsSpanRange,
+ Span2AsSpanRange,
+ SpanAsSpanRange,
+ SpanRangeAsSpanRange
+ };
+ use $crate::DiagnosticExt;
+ let span_range =
(&$span).FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange();
+
+ let diag = $crate::Diagnostic::spanned_range(
+ span_range,
+ $level,
+ format!($fmt, $($args),*)
+ );
+ $crate::__pme__suggestions!(diag $($rest)*);
+ diag
+ }};
+
+ ($span:expr, $level:expr, $msg:expr ; $($rest:tt)+) => {{
+ #[allow(unused_imports)]
+ use $crate::__export::{
+ ToTokensAsSpanRange,
+ Span2AsSpanRange,
+ SpanAsSpanRange,
+ SpanRangeAsSpanRange
+ };
+ use $crate::DiagnosticExt;
+ let span_range =
(&$span).FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange();
+
+ let diag = $crate::Diagnostic::spanned_range(span_range, $level,
$msg.to_string());
+ $crate::__pme__suggestions!(diag $($rest)*);
+ diag
+ }};
+
+ // span, message, no help
+ ($span:expr, $level:expr, $fmt:expr, $($args:expr),+) => {{
+ #[allow(unused_imports)]
+ use $crate::__export::{
+ ToTokensAsSpanRange,
+ Span2AsSpanRange,
+ SpanAsSpanRange,
+ SpanRangeAsSpanRange
+ };
+ use $crate::DiagnosticExt;
+ let span_range =
(&$span).FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange();
+
+ $crate::Diagnostic::spanned_range(
+ span_range,
+ $level,
+ format!($fmt, $($args),*)
+ )
+ }};
+
+ ($span:expr, $level:expr, $msg:expr) => {{
+ #[allow(unused_imports)]
+ use $crate::__export::{
+ ToTokensAsSpanRange,
+ Span2AsSpanRange,
+ SpanAsSpanRange,
+ SpanRangeAsSpanRange
+ };
+ use $crate::DiagnosticExt;
+ let span_range =
(&$span).FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange();
+
+ $crate::Diagnostic::spanned_range(span_range, $level, $msg.to_string())
+ }};
+
+
+ // trailing commas
+
+ ($span:expr, $level:expr, $fmt:expr, $($args:expr),+, ; $($rest:tt)+) => {
+ $crate::diagnostic!($span, $level, $fmt, $($args),* ; $($rest)*)
+ };
+ ($span:expr, $level:expr, $msg:expr, ; $($rest:tt)+) => {
+ $crate::diagnostic!($span, $level, $msg ; $($rest)*)
+ };
+ ($span:expr, $level:expr, $fmt:expr, $($args:expr),+,) => {
+ $crate::diagnostic!($span, $level, $fmt, $($args),*)
+ };
+ ($span:expr, $level:expr, $msg:expr,) => {
+ $crate::diagnostic!($span, $level, $msg)
+ };
+ // ($err:expr,) => { $crate::diagnostic!($err) };
+}
+
+/// Abort proc-macro execution right now and display the error.
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+#[macro_export]
+macro_rules! abort {
+ ($err:expr) => {
+ $crate::diagnostic!($err).abort()
+ };
+
+ ($span:expr, $($tts:tt)*) => {
+ $crate::diagnostic!($span, $crate::Level::Error, $($tts)*).abort()
+ };
+}
+
+/// Shortcut for `abort!(Span::call_site(), msg...)`. This macro
+/// is still preferable over plain panic, panics are not for error reporting.
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! abort_call_site {
+ ($($tts:tt)*) => {
+ $crate::abort!($crate::__export::proc_macro2::Span::call_site(),
$($tts)*)
+ };
+}
+
+/// Emit an error while not aborting the proc-macro right away.
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! emit_error {
+ ($err:expr) => {
+ $crate::diagnostic!($err).emit()
+ };
+
+ ($span:expr, $($tts:tt)*) => {{
+ let level = $crate::Level::Error;
+ $crate::diagnostic!($span, level, $($tts)*).emit()
+ }};
+}
+
+/// Shortcut for `emit_error!(Span::call_site(), ...)`. This macro
+/// is still preferable over plain panic, panics are not for error reporting..
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! emit_call_site_error {
+ ($($tts:tt)*) => {
+ $crate::emit_error!($crate::__export::proc_macro2::Span::call_site(),
$($tts)*)
+ };
+}
+
+/// Emit a warning. Warnings are not errors and compilation won't fail because
of them.
+///
+/// **Does nothing on stable**
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! emit_warning {
+ ($span:expr, $($tts:tt)*) => {
+ $crate::diagnostic!($span, $crate::Level::Warning, $($tts)*).emit()
+ };
+}
+
+/// Shortcut for `emit_warning!(Span::call_site(), ...)`.
+///
+/// **Does nothing on stable**
+///
+/// # Syntax
+///
+/// See [the guide](index.html#guide).
+///
+#[macro_export]
+macro_rules! emit_call_site_warning {
+ ($($tts:tt)*) => {{
+
$crate::emit_warning!($crate::__export::proc_macro2::Span::call_site(),
$($tts)*)
+ }};
+}
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __pme__suggestions {
+ ($var:ident) => ();
+
+ ($var:ident $help:ident =? $msg:expr) => {
+ let $var = if let Some(msg) = $msg {
+ $var.suggestion(stringify!($help), msg.to_string())
+ } else {
+ $var
+ };
+ };
+ ($var:ident $help:ident =? $span:expr => $msg:expr) => {
+ let $var = if let Some(msg) = $msg {
+ $var.span_suggestion($span.into(), stringify!($help),
msg.to_string())
+ } else {
+ $var
+ };
+ };
+
+ ($var:ident $help:ident =? $msg:expr ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help =? $msg);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+ ($var:ident $help:ident =? $span:expr => $msg:expr ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help =? $span => $msg);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+
+
+ ($var:ident $help:ident = $msg:expr) => {
+ let $var = $var.suggestion(stringify!($help), $msg.to_string());
+ };
+ ($var:ident $help:ident = $fmt:expr, $($args:expr),+) => {
+ let $var = $var.suggestion(
+ stringify!($help),
+ format!($fmt, $($args),*)
+ );
+ };
+ ($var:ident $help:ident = $span:expr => $msg:expr) => {
+ let $var = $var.span_suggestion($span.into(), stringify!($help),
$msg.to_string());
+ };
+ ($var:ident $help:ident = $span:expr => $fmt:expr, $($args:expr),+) => {
+ let $var = $var.span_suggestion(
+ $span.into(),
+ stringify!($help),
+ format!($fmt, $($args),*)
+ );
+ };
+
+ ($var:ident $help:ident = $msg:expr ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $msg);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+ ($var:ident $help:ident = $fmt:expr, $($args:expr),+ ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $fmt, $($args),*);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+ ($var:ident $help:ident = $span:expr => $msg:expr ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $span => $msg);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+ ($var:ident $help:ident = $span:expr => $fmt:expr, $($args:expr),+ ;
$($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $span => $fmt, $($args),*);
+ $crate::__pme__suggestions!($var $($rest)*);
+ };
+
+ // trailing commas
+
+ ($var:ident $help:ident = $msg:expr,) => {
+ $crate::__pme__suggestions!($var $help = $msg)
+ };
+ ($var:ident $help:ident = $fmt:expr, $($args:expr),+,) => {
+ $crate::__pme__suggestions!($var $help = $fmt, $($args)*)
+ };
+ ($var:ident $help:ident = $span:expr => $msg:expr,) => {
+ $crate::__pme__suggestions!($var $help = $span => $msg)
+ };
+ ($var:ident $help:ident = $span:expr => $fmt:expr, $($args:expr),*,) => {
+ $crate::__pme__suggestions!($var $help = $span => $fmt, $($args)*)
+ };
+ ($var:ident $help:ident = $msg:expr, ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $msg; $($rest)*)
+ };
+ ($var:ident $help:ident = $fmt:expr, $($args:expr),+, ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $fmt, $($args),*; $($rest)*)
+ };
+ ($var:ident $help:ident = $span:expr => $msg:expr, ; $($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $span => $msg; $($rest)*)
+ };
+ ($var:ident $help:ident = $span:expr => $fmt:expr, $($args:expr),+, ;
$($rest:tt)*) => {
+ $crate::__pme__suggestions!($var $help = $span => $fmt, $($args),*;
$($rest)*)
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/src/sealed.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/src/sealed.rs
new file mode 100644
index 0000000000..a2d5081e55
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/src/sealed.rs
@@ -0,0 +1,3 @@
+pub trait Sealed {}
+
+impl Sealed for crate::Diagnostic {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/macro-errors.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/macro-errors.rs
new file mode 100644
index 0000000000..dd60f88a80
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/macro-errors.rs
@@ -0,0 +1,8 @@
+extern crate trybuild;
+
+#[cfg_attr(skip_ui_tests, ignore)]
+#[test]
+fn ui() {
+ let t = trybuild::TestCases::new();
+ t.compile_fail("tests/ui/*.rs");
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ok.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ok.rs
new file mode 100644
index 0000000000..cf64c027f8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ok.rs
@@ -0,0 +1,10 @@
+extern crate test_crate;
+
+use test_crate::*;
+
+ok!(it_works);
+
+#[test]
+fn check_it_works() {
+ it_works();
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/runtime-errors.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/runtime-errors.rs
new file mode 100644
index 0000000000..13108a2d91
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/runtime-errors.rs
@@ -0,0 +1,13 @@
+use proc_macro_error::*;
+
+#[test]
+#[should_panic = "proc-macro-error API cannot be used outside of"]
+fn missing_attr_emit() {
+ emit_call_site_error!("You won't see me");
+}
+
+#[test]
+#[should_panic = "proc-macro-error API cannot be used outside of"]
+fn missing_attr_abort() {
+ abort_call_site!("You won't see me");
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.rs
new file mode 100644
index 0000000000..f63118251e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.rs
@@ -0,0 +1,11 @@
+extern crate test_crate;
+use test_crate::*;
+
+abort_from!(one, two);
+abort_to_string!(one, two);
+abort_format!(one, two);
+direct_abort!(one, two);
+abort_notes!(one, two);
+abort_call_site_test!(one, two);
+
+fn main() {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.stderr
new file mode 100644
index 0000000000..c5399d9d91
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/abort.stderr
@@ -0,0 +1,48 @@
+error: abort!(span, from) test
+ --> $DIR/abort.rs:4:13
+ |
+4 | abort_from!(one, two);
+ | ^^^
+
+error: abort!(span, single_expr) test
+ --> $DIR/abort.rs:5:18
+ |
+5 | abort_to_string!(one, two);
+ | ^^^
+
+error: abort!(span, expr1, expr2) test
+ --> $DIR/abort.rs:6:15
+ |
+6 | abort_format!(one, two);
+ | ^^^
+
+error: Diagnostic::abort() test
+ --> $DIR/abort.rs:7:15
+ |
+7 | direct_abort!(one, two);
+ | ^^^
+
+error: This is an error
+
+ = note: simple note
+ = help: simple help
+ = help: simple hint
+ = note: simple yay
+ = note: format note
+ = note: Some note
+ = note: spanned simple note
+ = note: spanned format note
+ = note: Some note
+
+ --> $DIR/abort.rs:8:14
+ |
+8 | abort_notes!(one, two);
+ | ^^^
+
+error: abort_call_site! test
+ --> $DIR/abort.rs:9:1
+ |
+9 | abort_call_site_test!(one, two);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.rs
new file mode 100644
index 0000000000..53d6feacc1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.rs
@@ -0,0 +1,13 @@
+extern crate test_crate;
+use test_crate::*;
+
+enum NeedDefault {
+ A,
+ B
+}
+
+append_dummy!(need_default);
+
+fn main() {
+ let _ = NeedDefault::default();
+}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.stderr
new file mode 100644
index 0000000000..8a47ddaac4
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/append_dummy.stderr
@@ -0,0 +1,5 @@
+error: append_dummy test
+ --> $DIR/append_dummy.rs:9:15
+ |
+9 | append_dummy!(need_default);
+ | ^^^^^^^^^^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.rs
new file mode 100644
index 0000000000..fb9e6dc697
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+use test_crate::*;
+
+children_messages!(one, two, three, four);
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.stderr
new file mode 100644
index 0000000000..3b49d83165
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/children_messages.stderr
@@ -0,0 +1,23 @@
+error: main macro message
+ --> $DIR/children_messages.rs:4:20
+ |
+4 | children_messages!(one, two, three, four);
+ | ^^^
+
+error: child message
+ --> $DIR/children_messages.rs:4:25
+ |
+4 | children_messages!(one, two, three, four);
+ | ^^^
+
+error: main syn::Error
+ --> $DIR/children_messages.rs:4:30
+ |
+4 | children_messages!(one, two, three, four);
+ | ^^^^^
+
+error: child syn::Error
+ --> $DIR/children_messages.rs:4:37
+ |
+4 | children_messages!(one, two, three, four);
+ | ^^^^
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.rs
new file mode 100644
index 0000000000..caa4827886
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.rs
@@ -0,0 +1,13 @@
+extern crate test_crate;
+use test_crate::*;
+
+enum NeedDefault {
+ A,
+ B
+}
+
+dummy!(need_default);
+
+fn main() {
+ let _ = NeedDefault::default();
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.stderr
new file mode 100644
index 0000000000..bae078afa8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/dummy.stderr
@@ -0,0 +1,5 @@
+error: set_dummy test
+ --> $DIR/dummy.rs:9:8
+ |
+9 | dummy!(need_default);
+ | ^^^^^^^^^^^^
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.rs
new file mode 100644
index 0000000000..c5c7db095f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.rs
@@ -0,0 +1,7 @@
+extern crate test_crate;
+use test_crate::*;
+
+emit!(one, two, three, four, five);
+emit_notes!(one, two);
+
+fn main() {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.stderr
new file mode 100644
index 0000000000..9484bd628b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/emit.stderr
@@ -0,0 +1,48 @@
+error: emit!(span, from) test
+ --> $DIR/emit.rs:4:7
+ |
+4 | emit!(one, two, three, four, five);
+ | ^^^
+
+error: emit!(span, expr1, expr2) test
+ --> $DIR/emit.rs:4:12
+ |
+4 | emit!(one, two, three, four, five);
+ | ^^^
+
+error: emit!(span, single_expr) test
+ --> $DIR/emit.rs:4:17
+ |
+4 | emit!(one, two, three, four, five);
+ | ^^^^^
+
+error: Diagnostic::emit() test
+ --> $DIR/emit.rs:4:24
+ |
+4 | emit!(one, two, three, four, five);
+ | ^^^^
+
+error: emit_call_site_error!(expr) test
+ --> $DIR/emit.rs:4:1
+ |
+4 | emit!(one, two, three, four, five);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
+
+error: This is an error
+
+ = note: simple note
+ = help: simple help
+ = help: simple hint
+ = note: simple yay
+ = note: format note
+ = note: Some note
+ = note: spanned simple note
+ = note: spanned format note
+ = note: Some note
+
+ --> $DIR/emit.rs:5:13
+ |
+5 | emit_notes!(one, two);
+ | ^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.rs
new file mode 100644
index 0000000000..82bbebcc55
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+use test_crate::*;
+
+explicit_span_range!(one, two, three, four);
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.stderr
new file mode 100644
index 0000000000..781a71e76a
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/explicit_span_range.stderr
@@ -0,0 +1,5 @@
+error: explicit SpanRange
+ --> $DIR/explicit_span_range.rs:4:22
+ |
+4 | explicit_span_range!(one, two, three, four);
+ | ^^^^^^^^^^^^^^^
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.rs
new file mode 100644
index 0000000000..e6d2d24971
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.rs
@@ -0,0 +1,11 @@
+extern crate proc_macro_error;
+use proc_macro_error::abort;
+
+struct Foo;
+
+#[allow(unused)]
+fn foo() {
+ abort!(Foo, "BOOM");
+}
+
+fn main() {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.stderr
new file mode 100644
index 0000000000..8eaf6456fd
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/misuse.stderr
@@ -0,0 +1,13 @@
+error[E0599]: no method named
`FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange` found for
reference `&Foo` in the current scope
+ --> $DIR/misuse.rs:8:5
+ |
+4 | struct Foo;
+ | ----------- doesn't satisfy `Foo: quote::to_tokens::ToTokens`
+...
+8 | abort!(Foo, "BOOM");
+ | ^^^^^^^^^^^^^^^^^^^^ method not found in `&Foo`
+ |
+ = note: the method
`FIRST_ARG_MUST_EITHER_BE_Span_OR_IMPLEMENT_ToTokens_OR_BE_SpanRange` exists
but the following trait bounds were not satisfied:
+ `Foo: quote::to_tokens::ToTokens`
+ which is required by `&Foo:
proc_macro_error::__export::ToTokensAsSpanRange`
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.rs
new file mode 100644
index 0000000000..215928f6f4
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+
+#[test_crate::multiple_tokens]
+type T = ();
+
+fn main() {}
\ No newline at end of file
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.stderr
new file mode 100644
index 0000000000..c6172c6cc6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/multiple_tokens.stderr
@@ -0,0 +1,5 @@
+error: ...
+ --> $DIR/multiple_tokens.rs:4:1
+ |
+4 | type T = ();
+ | ^^^^^^^^^^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.rs
new file mode 100644
index 0000000000..e241c5cd28
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.rs
@@ -0,0 +1,4 @@
+use proc_macro_error::proc_macro_error;
+
+#[proc_macro_error]
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.stderr
new file mode 100644
index 0000000000..f19f01bd8e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/not_proc_macro.stderr
@@ -0,0 +1,10 @@
+error: #[proc_macro_error] attribute can be used only with procedural macros
+
+ = hint: if you are really sure that #[proc_macro_error] should be applied to
this exact function, use #[proc_macro_error(allow_not_macro)]
+
+ --> $DIR/not_proc_macro.rs:3:1
+ |
+3 | #[proc_macro_error]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in an attribute macro (in Nightly builds, run
with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.rs
new file mode 100644
index 0000000000..dfbfc03835
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+use test_crate::*;
+
+option_ext!(one, two);
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.stderr
new file mode 100644
index 0000000000..91b151ec2f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/option_ext.stderr
@@ -0,0 +1,7 @@
+error: Option::expect_or_abort() test
+ --> $DIR/option_ext.rs:4:1
+ |
+4 | option_ext!(one, two);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.rs
new file mode 100644
index 0000000000..2504bdd401
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.rs
@@ -0,0 +1,10 @@
+// Adapted from
https://github.com/dtolnay/proc-macro-hack/blob/master/example/src/main.rs
+// Licensed under either of Apache License, Version 2.0 or MIT license at your
option.
+
+use proc_macro_hack_test::add_one;
+
+fn main() {
+ let two = 2;
+ let nine = add_one!(two) + add_one!(2 + 3);
+ println!("nine = {}", nine);
+}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.stderr
new file mode 100644
index 0000000000..0e984f918d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/proc_macro_hack.stderr
@@ -0,0 +1,26 @@
+error: BOOM
+ --> $DIR/proc_macro_hack.rs:8:25
+ |
+8 | let nine = add_one!(two) + add_one!(2 + 3);
+ | ^^^
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
+
+error: BOOM
+ --> $DIR/proc_macro_hack.rs:8:41
+ |
+8 | let nine = add_one!(two) + add_one!(2 + 3);
+ | ^^^^^
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
+
+warning: unreachable expression
+ --> $DIR/proc_macro_hack.rs:8:32
+ |
+8 | let nine = add_one!(two) + add_one!(2 + 3);
+ | ------------- ^^^^^^^^^^^^^^^ unreachable expression
+ | |
+ | any code following this expression is unreachable
+ |
+ = note: `#[warn(unreachable_code)]` on by default
+ = note: this warning originates in a macro (in Nightly builds, run with -Z
macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.rs
new file mode 100644
index 0000000000..bdd560dba9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.rs
@@ -0,0 +1,7 @@
+extern crate test_crate;
+use test_crate::*;
+
+result_unwrap_or_abort!(one, two);
+result_expect_or_abort!(one, two);
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.stderr
new file mode 100644
index 0000000000..f2dc0e4235
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/result_ext.stderr
@@ -0,0 +1,11 @@
+error: Result::unwrap_or_abort() test
+ --> $DIR/result_ext.rs:4:25
+ |
+4 | result_unwrap_or_abort!(one, two);
+ | ^^^
+
+error: BOOM: Result::expect_or_abort() test
+ --> $DIR/result_ext.rs:5:25
+ |
+5 | result_expect_or_abort!(one, two);
+ | ^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.rs
new file mode 100644
index 0000000000..a7c3fc976c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+use test_crate::*;
+
+to_tokens_span!(std::option::Option);
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.stderr
new file mode 100644
index 0000000000..b8c4968263
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/to_tokens_span.stderr
@@ -0,0 +1,11 @@
+error: whole type
+ --> $DIR/to_tokens_span.rs:4:17
+ |
+4 | to_tokens_span!(std::option::Option);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: explicit .span()
+ --> $DIR/to_tokens_span.rs:4:17
+ |
+4 | to_tokens_span!(std::option::Option);
+ | ^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.rs
new file mode 100644
index 0000000000..d8e58eaf87
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.rs
@@ -0,0 +1,4 @@
+use proc_macro_error::proc_macro_error;
+
+#[proc_macro_error(allow_not_macro, assert_unwind_safe, trololo)]
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.stderr
new file mode 100644
index 0000000000..a55de0b31b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unknown_setting.stderr
@@ -0,0 +1,5 @@
+error: unknown setting `trololo`, expected one of `assert_unwind_safe`,
`allow_not_macro`, `proc_macro_hack`
+ --> $DIR/unknown_setting.rs:3:57
+ |
+3 | #[proc_macro_error(allow_not_macro, assert_unwind_safe, trololo)]
+ | ^^^^^^^
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.rs
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.rs
new file mode 100644
index 0000000000..c74e3e0623
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.rs
@@ -0,0 +1,6 @@
+extern crate test_crate;
+use test_crate::*;
+
+unrelated_panic!();
+
+fn main() {}
diff --git
a/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.stderr
b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.stderr
new file mode 100644
index 0000000000..d46d689f2f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro-error/tests/ui/unrelated_panic.stderr
@@ -0,0 +1,7 @@
+error: proc macro panicked
+ --> $DIR/unrelated_panic.rs:4:1
+ |
+4 | unrelated_panic!();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: message: unrelated panic test
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/proc-macro2/.cargo-checksum.json
new file mode 100644
index 0000000000..83f4c8a5ec
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"cdfebba5c7483fd052619894a923bd5aa5959a0fcfd7a2fc7e695c6a6231e87a","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c609b6865476d6c35879784e9155367a97a0da496aa5c3c61488440a20f59883","build.rs":"c385804afdf08a6292ed1f44afec6cfd0d9600410030ab5dc5bba842fbf0b6b3","build/probe.rs":"971fd2178dc506ccdc5c2065c37b77696a4aee8e00330ca52625db4a857f68d3","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"29f094473279a29b71c3cc9f5fa27c2e2c30c670390cf7e4b7cf451486cc857e","src/fallback.rs":"be1ce5e32c88c29d41d2ab663375951817d52decce3dc9e335ec22378be8fa65","src/lib.rs":"4bd042e054d240332664d67f537419a4fa5e29a4c020d1fac3b6f1f58378ae49","src/location.rs":"9225c5a55f03b56cce42bc55ceb509e8216a5e0b24c94aa1cd071b04e3d6c15f","src/marker.rs":"c11c5a1be8bdf18be3fcd224393f350a9aae7ce282e19ce583c84910c6903a8f","src/parse.rs":"4b77cddbc2752bc4d38a65acd8b96b6786c5220d19b1e1b37810257b5d24132d","src/rcvec.rs":"1c3c48c4f819927cc445ae15ca3bb06775feff2fd1cb21901ae4c40c7e6b4e82","src/wrapper.rs":"e41df9abc846b40f0cf01150d22b91944d07cde93bc72aa34798101652675844","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"473e962ee1aa0633dd5cf9a973b3bbd0ef43b740d4b7f6d008ff455a6b89d386","tests/test.rs":"2e7106f582367d168638be7364d4e9aadbe0affca8b51dd80f0b3977cc2fcf83","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"08fb1d6bcf867707dfa18d30fceb18c58e8c44c89e058d8d6bfd2b281c77e14e"},"package":"ec96c6a92621310b51366f1e28d05ef11489516e93be030060e5fc12024a49d6"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/Cargo.toml
b/rust/hw/char/pl011/vendor/proc-macro2/Cargo.toml
new file mode 100644
index 0000000000..193a898a8c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/Cargo.toml
@@ -0,0 +1,104 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.56"
+name = "proc-macro2"
+version = "1.0.84"
+authors = [
+ "David Tolnay <dtolnay@gmail.com>",
+ "Alex Crichton <alex@alexcrichton.com>",
+]
+build = "build.rs"
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "A substitute implementation of the compiler's `proc_macro` API
to decouple token-based libraries from the procedural macro use case."
+documentation = "https://docs.rs/proc-macro2"
+readme = "README.md"
+keywords = [
+ "macros",
+ "syn",
+]
+categories = ["development-tools::procedural-macro-helpers"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/proc-macro2"
+
+[package.metadata.docs.rs]
+rustc-args = [
+ "--cfg",
+ "procmacro2_semver_exempt",
+]
+rustdoc-args = [
+ "--cfg",
+ "procmacro2_semver_exempt",
+ "--generate-link-to-definition",
+]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[package.metadata.playground]
+features = ["span-locations"]
+
+[lib]
+name = "proc_macro2"
+path = "src/lib.rs"
+doc-scrape-examples = false
+
+[[test]]
+name = "comments"
+path = "tests/comments.rs"
+
+[[test]]
+name = "test_fmt"
+path = "tests/test_fmt.rs"
+
+[[test]]
+name = "features"
+path = "tests/features.rs"
+
+[[test]]
+name = "marker"
+path = "tests/marker.rs"
+
+[[test]]
+name = "test_size"
+path = "tests/test_size.rs"
+
+[[test]]
+name = "test"
+path = "tests/test.rs"
+
+[dependencies.unicode-ident]
+version = "1.0"
+
+[dev-dependencies.flate2]
+version = "1.0"
+
+[dev-dependencies.quote]
+version = "1.0"
+default-features = false
+
+[dev-dependencies.rayon]
+version = "1.0"
+
+[dev-dependencies.rustversion]
+version = "1"
+
+[dev-dependencies.tar]
+version = "0.4"
+
+[features]
+default = ["proc-macro"]
+nightly = []
+proc-macro = []
+span-locations = []
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-APACHE
new file mode 100644
index 0000000000..1b5ec8b78e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-MIT
b/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-MIT
new file mode 100644
index 0000000000..31aa79387f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/README.md
b/rust/hw/char/pl011/vendor/proc-macro2/README.md
new file mode 100644
index 0000000000..3a29ce8b89
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/README.md
@@ -0,0 +1,94 @@
+# proc-macro2
+
+[<img alt="github"
src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github"
height="20">](https://github.com/dtolnay/proc-macro2)
+[<img alt="crates.io"
src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust"
height="20">](https://crates.io/crates/proc-macro2)
+[<img alt="docs.rs"
src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs"
height="20">](https://docs.rs/proc-macro2)
+[<img alt="build status"
src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge"
height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
+
+A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
+This library serves two purposes:
+
+- **Bring proc-macro-like functionality to other contexts like build.rs and
+ main.rs.** Types from `proc_macro` are entirely specific to procedural macros
+ and cannot ever exist in code outside of a procedural macro. Meanwhile
+ `proc_macro2` types may exist anywhere including non-macro code. By
developing
+ foundational libraries like [syn] and [quote] against `proc_macro2` rather
+ than `proc_macro`, the procedural macro ecosystem becomes easily applicable
to
+ many other use cases and we avoid reimplementing non-macro equivalents of
+ those libraries.
+
+- **Make procedural macros unit testable.** As a consequence of being specific
+ to procedural macros, nothing that uses `proc_macro` can be executed from a
+ unit test. In order for helper libraries or components of a macro to be
+ testable in isolation, they must be implemented using `proc_macro2`.
+
+[syn]: https://github.com/dtolnay/syn
+[quote]: https://github.com/dtolnay/quote
+
+## Usage
+
+```toml
+[dependencies]
+proc-macro2 = "1.0"
+```
+
+The skeleton of a typical procedural macro typically looks like this:
+
+```rust
+extern crate proc_macro;
+
+#[proc_macro_derive(MyDerive)]
+pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+ let input = proc_macro2::TokenStream::from(input);
+
+ let output: proc_macro2::TokenStream = {
+ /* transform input */
+ };
+
+ proc_macro::TokenStream::from(output)
+}
+```
+
+If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
+parse errors correctly back to the compiler when parsing fails.
+
+[`parse_macro_input!`]:
https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html
+
+## Unstable features
+
+The default feature set of proc-macro2 tracks the most recent stable compiler
+API. Functionality in `proc_macro` that is not yet stable is not exposed by
+proc-macro2 by default.
+
+To opt into the additional APIs available in the most recent nightly compiler,
+the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
+polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable
APIs
+that track the nightly compiler, minor versions of proc-macro2 may make
breaking
+changes to them at any time.
+
+```
+RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
+```
+
+Note that this must not only be done for your crate, but for any crate that
+depends on your crate. This infectious nature is intentional, as it serves as a
+reminder that you are outside of the normal semver guarantees.
+
+Semver exempt methods are marked as such in the proc-macro2 documentation.
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/build.rs
b/rust/hw/char/pl011/vendor/proc-macro2/build.rs
new file mode 100644
index 0000000000..0a95c22661
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/build.rs
@@ -0,0 +1,227 @@
+// rustc-cfg emitted by the build script:
+//
+// "wrap_proc_macro"
+// Wrap types from libproc_macro rather than polyfilling the whole API.
+// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
+// because we can't emulate the unstable API without emulating everything
+// else. Also enabled unconditionally on nightly, in which case the
+// procmacro2_semver_exempt surface area is implemented by using the
+// nightly-only proc_macro API.
+//
+// "hygiene"
+// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
+// and Span::located_at. Enabled on Rust 1.45+.
+//
+// "proc_macro_span"
+// Enable non-dummy behavior of Span::start and Span::end methods which
+// requires an unstable compiler feature. Enabled when building with
+// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
+// features.
+//
+// "super_unstable"
+// Implement the semver exempt API in terms of the nightly-only proc_macro
+// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
+//
+// "span_locations"
+// Provide methods Span::start and Span::end which give the line/column
+// location of a token. Enabled by procmacro2_semver_exempt or the
+// "span-locations" Cargo cfg. This is behind a cfg because tracking
+// location inside spans is a performance hit.
+//
+// "is_available"
+// Use proc_macro::is_available() to detect if the proc macro API is
+// available or needs to be polyfilled instead of trying to use the proc
+// macro API and catching a panic if it isn't available. Enabled on Rust
+// 1.57+.
+
+#![allow(unknown_lints)]
+#![allow(unexpected_cfgs)]
+
+use std::env;
+use std::ffi::OsString;
+use std::iter;
+use std::path::Path;
+use std::process::{self, Command, Stdio};
+use std::str;
+
+fn main() {
+ let rustc = rustc_minor_version().unwrap_or(u32::MAX);
+
+ if rustc >= 80 {
+ println!("cargo:rustc-check-cfg=cfg(fuzzing)");
+ println!("cargo:rustc-check-cfg=cfg(no_is_available)");
+ println!("cargo:rustc-check-cfg=cfg(no_literal_byte_character)");
+ println!("cargo:rustc-check-cfg=cfg(no_literal_c_string)");
+ println!("cargo:rustc-check-cfg=cfg(no_source_text)");
+ println!("cargo:rustc-check-cfg=cfg(proc_macro_span)");
+ println!("cargo:rustc-check-cfg=cfg(procmacro2_backtrace)");
+ println!("cargo:rustc-check-cfg=cfg(procmacro2_nightly_testing)");
+ println!("cargo:rustc-check-cfg=cfg(procmacro2_semver_exempt)");
+ println!("cargo:rustc-check-cfg=cfg(randomize_layout)");
+ println!("cargo:rustc-check-cfg=cfg(span_locations)");
+ println!("cargo:rustc-check-cfg=cfg(super_unstable)");
+ println!("cargo:rustc-check-cfg=cfg(wrap_proc_macro)");
+ }
+
+ let docs_rs = env::var_os("DOCS_RS").is_some();
+ let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs;
+ if semver_exempt {
+ // https://github.com/dtolnay/proc-macro2/issues/147
+ println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+ }
+
+ if semver_exempt || cfg!(feature = "span-locations") {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
+ if rustc < 57 {
+ println!("cargo:rustc-cfg=no_is_available");
+ }
+
+ if rustc < 66 {
+ println!("cargo:rustc-cfg=no_source_text");
+ }
+
+ if rustc < 79 {
+ println!("cargo:rustc-cfg=no_literal_byte_character");
+ println!("cargo:rustc-cfg=no_literal_c_string");
+ }
+
+ if !cfg!(feature = "proc-macro") {
+ println!("cargo:rerun-if-changed=build.rs");
+ return;
+ }
+
+ println!("cargo:rerun-if-changed=build/probe.rs");
+
+ let proc_macro_span;
+ let consider_rustc_bootstrap;
+ if compile_probe(false) {
+ // This is a nightly or dev compiler, so it supports unstable features
+ // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if
+ // RUSTC_BOOTSTRAP is changed.
+ proc_macro_span = true;
+ consider_rustc_bootstrap = false;
+ } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") {
+ if compile_probe(true) {
+ // This is a stable or beta compiler for which the user has set
+ // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script
+ // if they change it.
+ proc_macro_span = true;
+ consider_rustc_bootstrap = true;
+ } else if rustc_bootstrap == "1" {
+ // This compiler does not support the proc macro Span API in the
+ // form that proc-macro2 expects. No need to pay attention to
+ // RUSTC_BOOTSTRAP.
+ proc_macro_span = false;
+ consider_rustc_bootstrap = false;
+ } else {
+ // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is
+ // set to restrict the use of unstable features by this crate.
+ proc_macro_span = false;
+ consider_rustc_bootstrap = true;
+ }
+ } else {
+ // Without RUSTC_BOOTSTRAP, this compiler does not support the proc
+ // macro Span API in the form that proc-macro2 expects, but try again
if
+ // the user turns on unstable features.
+ proc_macro_span = false;
+ consider_rustc_bootstrap = true;
+ }
+
+ if proc_macro_span || !semver_exempt {
+ println!("cargo:rustc-cfg=wrap_proc_macro");
+ }
+
+ if proc_macro_span {
+ println!("cargo:rustc-cfg=proc_macro_span");
+ }
+
+ if semver_exempt && proc_macro_span {
+ println!("cargo:rustc-cfg=super_unstable");
+ }
+
+ if consider_rustc_bootstrap {
+ println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP");
+ }
+}
+
+fn compile_probe(rustc_bootstrap: bool) -> bool {
+ if env::var_os("RUSTC_STAGE").is_some() {
+ // We are running inside rustc bootstrap. This is a highly non-standard
+ // environment with issues such as:
+ //
+ // https://github.com/rust-lang/cargo/issues/11138
+ // https://github.com/rust-lang/rust/issues/114839
+ //
+ // Let's just not use nightly features here.
+ return false;
+ }
+
+ let rustc = cargo_env_var("RUSTC");
+ let out_dir = cargo_env_var("OUT_DIR");
+ let probefile = Path::new("build").join("probe.rs");
+
+ let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper|
!wrapper.is_empty());
+ let rustc_workspace_wrapper =
+ env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper|
!wrapper.is_empty());
+ let mut rustc = rustc_wrapper
+ .into_iter()
+ .chain(rustc_workspace_wrapper)
+ .chain(iter::once(rustc));
+ let mut cmd = Command::new(rustc.next().unwrap());
+ cmd.args(rustc);
+
+ if !rustc_bootstrap {
+ cmd.env_remove("RUSTC_BOOTSTRAP");
+ }
+
+ cmd.stderr(Stdio::null())
+ .arg("--edition=2021")
+ .arg("--crate-name=proc_macro2")
+ .arg("--crate-type=lib")
+ .arg("--cap-lints=allow")
+ .arg("--emit=dep-info,metadata")
+ .arg("--out-dir")
+ .arg(out_dir)
+ .arg(probefile);
+
+ if let Some(target) = env::var_os("TARGET") {
+ cmd.arg("--target").arg(target);
+ }
+
+ // If Cargo wants to set RUSTFLAGS, use that.
+ if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
+ if !rustflags.is_empty() {
+ for arg in rustflags.split('\x1f') {
+ cmd.arg(arg);
+ }
+ }
+ }
+
+ match cmd.status() {
+ Ok(status) => status.success(),
+ Err(_) => false,
+ }
+}
+
+fn rustc_minor_version() -> Option<u32> {
+ let rustc = cargo_env_var("RUSTC");
+ let output = Command::new(rustc).arg("--version").output().ok()?;
+ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+ pieces.next()?.parse().ok()
+}
+
+fn cargo_env_var(key: &str) -> OsString {
+ env::var_os(key).unwrap_or_else(|| {
+ eprintln!(
+ "Environment variable ${} is not set during execution of build
script",
+ key,
+ );
+ process::exit(1);
+ })
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/build/probe.rs
b/rust/hw/char/pl011/vendor/proc-macro2/build/probe.rs
new file mode 100644
index 0000000000..2c4947a0b8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/build/probe.rs
@@ -0,0 +1,25 @@
+// This code exercises the surface area that we expect of Span's unstable API.
+// If the current toolchain is able to compile it, then proc-macro2 is able to
+// offer these APIs too.
+
+#![feature(proc_macro_span)]
+
+extern crate proc_macro;
+
+use core::ops::{Range, RangeBounds};
+use proc_macro::{Literal, Span};
+
+pub fn byte_range(this: &Span) -> Range<usize> {
+ this.byte_range()
+}
+
+pub fn join(this: &Span, other: Span) -> Option<Span> {
+ this.join(other)
+}
+
+pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) ->
Option<Span> {
+ this.subspan(range)
+}
+
+// Include in sccache cache key.
+const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/meson.build
b/rust/hw/char/pl011/vendor/proc-macro2/meson.build
new file mode 100644
index 0000000000..2a97df4a70
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/meson.build
@@ -0,0 +1,19 @@
+_proc_macro2_rs = static_library(
+ 'proc_macro2',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ '--cfg', 'feature="proc-macro"',
+ '--cfg', 'span_locations',
+ '--cfg', 'wrap_proc_macro',
+ ],
+ dependencies: [
+ dep_unicode_ident,
+ ],
+)
+
+dep_proc_macro2 = declare_dependency(
+ link_with: _proc_macro2_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/rust-toolchain.toml
b/rust/hw/char/pl011/vendor/proc-macro2/rust-toolchain.toml
new file mode 100644
index 0000000000..20fe888c30
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/rust-toolchain.toml
@@ -0,0 +1,2 @@
+[toolchain]
+components = ["rust-src"]
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/detection.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/detection.rs
new file mode 100644
index 0000000000..beba7b2373
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/detection.rs
@@ -0,0 +1,75 @@
+use core::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Once;
+
+static WORKS: AtomicUsize = AtomicUsize::new(0);
+static INIT: Once = Once::new();
+
+pub(crate) fn inside_proc_macro() -> bool {
+ match WORKS.load(Ordering::Relaxed) {
+ 1 => return false,
+ 2 => return true,
+ _ => {}
+ }
+
+ INIT.call_once(initialize);
+ inside_proc_macro()
+}
+
+pub(crate) fn force_fallback() {
+ WORKS.store(1, Ordering::Relaxed);
+}
+
+pub(crate) fn unforce_fallback() {
+ initialize();
+}
+
+#[cfg(not(no_is_available))]
+fn initialize() {
+ let available = proc_macro::is_available();
+ WORKS.store(available as usize + 1, Ordering::Relaxed);
+}
+
+// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+// then use catch_unwind to determine whether the compiler's proc_macro is
+// working. When proc-macro2 is used from outside of a procedural macro all
+// of the proc_macro crate's APIs currently panic.
+//
+// The Once is to prevent the possibility of this ordering:
+//
+// thread 1 calls take_hook, gets the user's original hook
+// thread 1 calls set_hook with the null hook
+// thread 2 calls take_hook, thinks null hook is the original hook
+// thread 2 calls set_hook with the null hook
+// thread 1 calls set_hook with the actual original hook
+// thread 2 calls set_hook with what it thinks is the original hook
+//
+// in which the user's hook has been lost.
+//
+// There is still a race condition where a panic in a different thread can
+// happen during the interval that the user's original panic hook is
+// unregistered such that their hook is incorrectly not called. This is
+// sufficiently unlikely and less bad than printing panic messages to stderr
+// on correct use of this crate. Maybe there is a libstd feature request
+// here. For now, if a user needs to guarantee that this failure mode does
+// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+// the main thread before launching any other threads.
+#[cfg(no_is_available)]
+fn initialize() {
+ use std::panic::{self, PanicInfo};
+
+ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+
+ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+ let sanity_check = &*null_hook as *const PanicHook;
+ let original_hook = panic::take_hook();
+ panic::set_hook(null_hook);
+
+ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
+ WORKS.store(works as usize + 1, Ordering::Relaxed);
+
+ let hopefully_null_hook = panic::take_hook();
+ panic::set_hook(original_hook);
+ if sanity_check != &*hopefully_null_hook {
+ panic!("observed race condition in proc_macro2::inside_proc_macro");
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/extra.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/extra.rs
new file mode 100644
index 0000000000..522a90e136
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/extra.rs
@@ -0,0 +1,151 @@
+//! Items which do not have a correspondence to any API in the proc_macro
crate,
+//! but are necessary to include in proc-macro2.
+
+use crate::fallback;
+use crate::imp;
+use crate::marker::{ProcMacroAutoTraits, MARKER};
+use crate::Span;
+use core::fmt::{self, Debug};
+
+/// Invalidate any `proc_macro2::Span` that exist on the current thread.
+///
+/// The implementation of `Span` uses thread-local data structures and this
+/// function clears them. Calling any method on a `Span` on the current thread
+/// created prior to the invalidation will return incorrect values or crash.
+///
+/// This function is useful for programs that process more than 2<sup>32</sup>
+/// bytes of Rust source code on the same thread. Just like rustc, proc-macro2
+/// uses 32-bit source locations, and these wrap around when the total source
+/// code processed by the same thread exceeds 2<sup>32</sup> bytes (4
+/// gigabytes). After a wraparound, `Span` methods such as `source_text()` can
+/// return wrong data.
+///
+/// # Example
+///
+/// As of late 2023, there is 200 GB of Rust code published on crates.io.
+/// Looking at just the newest version of every crate, it is 16 GB of code. So
a
+/// workload that involves parsing it all would overflow a 32-bit source
+/// location unless spans are being invalidated.
+///
+/// ```
+/// use flate2::read::GzDecoder;
+/// use std::ffi::OsStr;
+/// use std::io::{BufReader, Read};
+/// use std::str::FromStr;
+/// use tar::Archive;
+///
+/// rayon::scope(|s| {
+/// for krate in every_version_of_every_crate() {
+/// s.spawn(move |_| {
+/// proc_macro2::extra::invalidate_current_thread_spans();
+///
+/// let reader = BufReader::new(krate);
+/// let tar = GzDecoder::new(reader);
+/// let mut archive = Archive::new(tar);
+/// for entry in archive.entries().unwrap() {
+/// let mut entry = entry.unwrap();
+/// let path = entry.path().unwrap();
+/// if path.extension() != Some(OsStr::new("rs")) {
+/// continue;
+/// }
+/// let mut content = String::new();
+/// entry.read_to_string(&mut content).unwrap();
+/// match proc_macro2::TokenStream::from_str(&content) {
+/// Ok(tokens) => {/* ... */},
+/// Err(_) => continue,
+/// }
+/// }
+/// });
+/// }
+/// });
+/// #
+/// # fn every_version_of_every_crate() -> Vec<std::fs::File> {
+/// # Vec::new()
+/// # }
+/// ```
+///
+/// # Panics
+///
+/// This function is not applicable to and will panic if called from a
+/// procedural macro.
+#[cfg(span_locations)]
+#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+pub fn invalidate_current_thread_spans() {
+ crate::imp::invalidate_current_thread_spans();
+}
+
+/// An object that holds a [`Group`]'s `span_open()` and `span_close()`
together
+/// in a more compact representation than holding those 2 spans individually.
+///
+/// [`Group`]: crate::Group
+#[derive(Copy, Clone)]
+pub struct DelimSpan {
+ inner: DelimSpanEnum,
+ _marker: ProcMacroAutoTraits,
+}
+
+#[derive(Copy, Clone)]
+enum DelimSpanEnum {
+ #[cfg(wrap_proc_macro)]
+ Compiler {
+ join: proc_macro::Span,
+ open: proc_macro::Span,
+ close: proc_macro::Span,
+ },
+ Fallback(fallback::Span),
+}
+
+impl DelimSpan {
+ pub(crate) fn new(group: &imp::Group) -> Self {
+ #[cfg(wrap_proc_macro)]
+ let inner = match group {
+ imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
+ join: group.span(),
+ open: group.span_open(),
+ close: group.span_close(),
+ },
+ imp::Group::Fallback(group) =>
DelimSpanEnum::Fallback(group.span()),
+ };
+
+ #[cfg(not(wrap_proc_macro))]
+ let inner = DelimSpanEnum::Fallback(group.span());
+
+ DelimSpan {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ /// Returns a span covering the entire delimited group.
+ pub fn join(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler { join, .. } =>
Span::_new(imp::Span::Compiler(*join)),
+ DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
+ }
+ }
+
+ /// Returns a span for the opening punctuation of the group only.
+ pub fn open(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler { open, .. } =>
Span::_new(imp::Span::Compiler(*open)),
+ DelimSpanEnum::Fallback(span) =>
Span::_new_fallback(span.first_byte()),
+ }
+ }
+
+ /// Returns a span for the closing punctuation of the group only.
+ pub fn close(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler { close, .. } =>
Span::_new(imp::Span::Compiler(*close)),
+ DelimSpanEnum::Fallback(span) =>
Span::_new_fallback(span.last_byte()),
+ }
+ }
+}
+
+impl Debug for DelimSpan {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.join(), f)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/fallback.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/fallback.rs
new file mode 100644
index 0000000000..2d1c991997
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/fallback.rs
@@ -0,0 +1,1226 @@
+#[cfg(span_locations)]
+use crate::location::LineColumn;
+use crate::parse::{self, Cursor};
+use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut};
+use crate::{Delimiter, Spacing, TokenTree};
+#[cfg(all(span_locations, not(fuzzing)))]
+use alloc::collections::BTreeMap;
+#[cfg(all(span_locations, not(fuzzing)))]
+use core::cell::RefCell;
+#[cfg(span_locations)]
+use core::cmp;
+use core::fmt::{self, Debug, Display, Write};
+use core::mem::ManuallyDrop;
+#[cfg(span_locations)]
+use core::ops::Range;
+use core::ops::RangeBounds;
+use core::ptr;
+use core::str::{self, FromStr};
+use std::ffi::CStr;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::PathBuf;
+
+/// Force use of proc-macro2's fallback implementation of the API for now, even
+/// if the compiler's implementation is available.
+pub fn force() {
+ #[cfg(wrap_proc_macro)]
+ crate::detection::force_fallback();
+}
+
+/// Resume using the compiler's implementation of the proc macro API if it is
+/// available.
+pub fn unforce() {
+ #[cfg(wrap_proc_macro)]
+ crate::detection::unforce_fallback();
+}
+
+#[derive(Clone)]
+pub(crate) struct TokenStream {
+ inner: RcVec<TokenTree>,
+}
+
+#[derive(Debug)]
+pub(crate) struct LexError {
+ pub(crate) span: Span,
+}
+
+impl LexError {
+ pub(crate) fn span(&self) -> Span {
+ self.span
+ }
+
+ pub(crate) fn call_site() -> Self {
+ LexError {
+ span: Span::call_site(),
+ }
+ }
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream {
+ inner: RcVecBuilder::new().build(),
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
+
+ fn take_inner(self) -> RcVecBuilder<TokenTree> {
+ let nodrop = ManuallyDrop::new(self);
+ unsafe { ptr::read(&nodrop.inner) }.make_owned()
+ }
+}
+
+fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) {
+ // https://github.com/dtolnay/proc-macro2/issues/235
+ match token {
+ TokenTree::Literal(crate::Literal {
+ #[cfg(wrap_proc_macro)]
+ inner: crate::imp::Literal::Fallback(literal),
+ #[cfg(not(wrap_proc_macro))]
+ inner: literal,
+ ..
+ }) if literal.repr.starts_with('-') => {
+ push_negative_literal(vec, literal);
+ }
+ _ => vec.push(token),
+ }
+
+ #[cold]
+ fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal:
Literal) {
+ literal.repr.remove(0);
+ let mut punct = crate::Punct::new('-', Spacing::Alone);
+ punct.set_span(crate::Span::_new_fallback(literal.span));
+ vec.push(TokenTree::Punct(punct));
+ vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
+ }
+}
+
+// Nonrecursive to prevent stack overflow.
+impl Drop for TokenStream {
+ fn drop(&mut self) {
+ let mut inner = match self.inner.get_mut() {
+ Some(inner) => inner,
+ None => return,
+ };
+ while let Some(token) = inner.pop() {
+ let group = match token {
+ TokenTree::Group(group) => group.inner,
+ _ => continue,
+ };
+ #[cfg(wrap_proc_macro)]
+ let group = match group {
+ crate::imp::Group::Fallback(group) => group,
+ crate::imp::Group::Compiler(_) => continue,
+ };
+ inner.extend(group.stream.take_inner());
+ }
+ }
+}
+
+pub(crate) struct TokenStreamBuilder {
+ inner: RcVecBuilder<TokenTree>,
+}
+
+impl TokenStreamBuilder {
+ pub fn new() -> Self {
+ TokenStreamBuilder {
+ inner: RcVecBuilder::new(),
+ }
+ }
+
+ pub fn with_capacity(cap: usize) -> Self {
+ TokenStreamBuilder {
+ inner: RcVecBuilder::with_capacity(cap),
+ }
+ }
+
+ pub fn push_token_from_parser(&mut self, tt: TokenTree) {
+ self.inner.push(tt);
+ }
+
+ pub fn build(self) -> TokenStream {
+ TokenStream {
+ inner: self.inner.build(),
+ }
+ }
+}
+
+#[cfg(span_locations)]
+fn get_cursor(src: &str) -> Cursor {
+ #[cfg(fuzzing)]
+ return Cursor { rest: src, off: 1 };
+
+ // Create a dummy file & add it to the source map
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| {
+ let mut sm = sm.borrow_mut();
+ let span = sm.add_file(src);
+ Cursor {
+ rest: src,
+ off: span.lo,
+ }
+ })
+}
+
+#[cfg(not(span_locations))]
+fn get_cursor(src: &str) -> Cursor {
+ Cursor { rest: src }
+}
+
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ // Create a dummy file & add it to the source map
+ let mut cursor = get_cursor(src);
+
+ // Strip a byte order mark if present
+ const BYTE_ORDER_MARK: &str = "\u{feff}";
+ if cursor.starts_with(BYTE_ORDER_MARK) {
+ cursor = cursor.advance(BYTE_ORDER_MARK.len());
+ }
+
+ parse::token_stream(cursor)
+ }
+}
+
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+ if i != 0 && !joint {
+ write!(f, " ")?;
+ }
+ joint = false;
+ match tt {
+ TokenTree::Group(tt) => Display::fmt(tt, f),
+ TokenTree::Ident(tt) => Display::fmt(tt, f),
+ TokenTree::Punct(tt) => {
+ joint = tt.spacing() == Spacing::Joint;
+ Display::fmt(tt, f)
+ }
+ TokenTree::Literal(tt) => Display::fmt(tt, f),
+ }?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
+ inner
+ .to_string()
+ .parse()
+ .expect("compiler token stream parse failed")
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<TokenStream> for proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
+ inner
+ .to_string()
+ .parse()
+ .expect("failed to parse to compiler tokens")
+ }
+}
+
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> Self {
+ let mut stream = RcVecBuilder::new();
+ push_token_from_proc_macro(stream.as_mut(), tree);
+ TokenStream {
+ inner: stream.build(),
+ }
+ }
+}
+
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
+ let mut stream = TokenStream::new();
+ stream.extend(tokens);
+ stream
+ }
+}
+
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = RcVecBuilder::new();
+
+ for stream in streams {
+ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v.build() }
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
+ let mut vec = self.inner.make_mut();
+ tokens
+ .into_iter()
+ .for_each(|token| push_token_from_proc_macro(vec.as_mut(), token));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ self.inner.make_mut().extend(streams.into_iter().flatten());
+ }
+}
+
+pub(crate) type TokenTreeIter = RcVecIntoIter<TokenTree>;
+
+impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+ fn into_iter(self) -> TokenTreeIter {
+ self.take_inner().into_iter()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[derive(Clone, PartialEq, Eq)]
+pub(crate) struct SourceFile {
+ path: PathBuf,
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl SourceFile {
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> PathBuf {
+ self.path.clone()
+ }
+
+ pub fn is_real(&self) -> bool {
+ false
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+#[cfg(all(span_locations, not(fuzzing)))]
+thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // Start with a single dummy file which all call_site() and def_site()
+ // spans reference.
+ files: vec![FileInfo {
+ source_text: String::new(),
+ span: Span { lo: 0, hi: 0 },
+ lines: vec![0],
+ char_index_to_byte_offset: BTreeMap::new(),
+ }],
+ });
+}
+
+#[cfg(span_locations)]
+pub(crate) fn invalidate_current_thread_spans() {
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| sm.borrow_mut().files.truncate(1));
+}
+
+#[cfg(all(span_locations, not(fuzzing)))]
+struct FileInfo {
+ source_text: String,
+ span: Span,
+ lines: Vec<usize>,
+ char_index_to_byte_offset: BTreeMap<usize, usize>,
+}
+
+#[cfg(all(span_locations, not(fuzzing)))]
+impl FileInfo {
+ fn offset_line_column(&self, offset: usize) -> LineColumn {
+ assert!(self.span_within(Span {
+ lo: offset as u32,
+ hi: offset as u32,
+ }));
+ let offset = offset - self.span.lo as usize;
+ match self.lines.binary_search(&offset) {
+ Ok(found) => LineColumn {
+ line: found + 1,
+ column: 0,
+ },
+ Err(idx) => LineColumn {
+ line: idx,
+ column: offset - self.lines[idx - 1],
+ },
+ }
+ }
+
+ fn span_within(&self, span: Span) -> bool {
+ span.lo >= self.span.lo && span.hi <= self.span.hi
+ }
+
+ fn byte_range(&mut self, span: Span) -> Range<usize> {
+ let lo_char = (span.lo - self.span.lo) as usize;
+
+ // Look up offset of the largest already-computed char index that is
+ // less than or equal to the current requested one. We resume counting
+ // chars from that point.
+ let (&last_char_index, &last_byte_offset) = self
+ .char_index_to_byte_offset
+ .range(..=lo_char)
+ .next_back()
+ .unwrap_or((&0, &0));
+
+ let lo_byte = if last_char_index == lo_char {
+ last_byte_offset
+ } else {
+ let total_byte_offset = match self.source_text[last_byte_offset..]
+ .char_indices()
+ .nth(lo_char - last_char_index)
+ {
+ Some((additional_offset, _ch)) => last_byte_offset +
additional_offset,
+ None => self.source_text.len(),
+ };
+ self.char_index_to_byte_offset
+ .insert(lo_char, total_byte_offset);
+ total_byte_offset
+ };
+
+ let trunc_lo = &self.source_text[lo_byte..];
+ let char_len = (span.hi - span.lo) as usize;
+ lo_byte..match trunc_lo.char_indices().nth(char_len) {
+ Some((offset, _ch)) => lo_byte + offset,
+ None => self.source_text.len(),
+ }
+ }
+
+ fn source_text(&mut self, span: Span) -> String {
+ let byte_range = self.byte_range(span);
+ self.source_text[byte_range].to_owned()
+ }
+}
+
+/// Computes the offsets of each line in the given source string
+/// and the total number of characters
+#[cfg(all(span_locations, not(fuzzing)))]
+fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+ let mut total = 0;
+
+ for ch in s.chars() {
+ total += 1;
+ if ch == '\n' {
+ lines.push(total);
+ }
+ }
+
+ (total, lines)
+}
+
+#[cfg(all(span_locations, not(fuzzing)))]
+struct SourceMap {
+ files: Vec<FileInfo>,
+}
+
+#[cfg(all(span_locations, not(fuzzing)))]
+impl SourceMap {
+ fn next_start_pos(&self) -> u32 {
+ // Add 1 so there's always space between files.
+ //
+ // We'll always have at least 1 file, as we initialize our files list
+ // with a dummy file.
+ self.files.last().unwrap().span.hi + 1
+ }
+
+ fn add_file(&mut self, src: &str) -> Span {
+ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ let span = Span {
+ lo,
+ hi: lo + (len as u32),
+ };
+
+ self.files.push(FileInfo {
+ source_text: src.to_owned(),
+ span,
+ lines,
+ // Populated lazily by source_text().
+ char_index_to_byte_offset: BTreeMap::new(),
+ });
+
+ span
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ fn filepath(&self, span: Span) -> PathBuf {
+ for (i, file) in self.files.iter().enumerate() {
+ if file.span_within(span) {
+ return PathBuf::from(if i == 0 {
+ "<unspecified>".to_owned()
+ } else {
+ format!("<parsed string {}>", i)
+ });
+ }
+ }
+ unreachable!("Invalid span with no related FileInfo!");
+ }
+
+ fn fileinfo(&self, span: Span) -> &FileInfo {
+ for file in &self.files {
+ if file.span_within(span) {
+ return file;
+ }
+ }
+ unreachable!("Invalid span with no related FileInfo!");
+ }
+
+ fn fileinfo_mut(&mut self, span: Span) -> &mut FileInfo {
+ for file in &mut self.files {
+ if file.span_within(span) {
+ return file;
+ }
+ }
+ unreachable!("Invalid span with no related FileInfo!");
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub(crate) struct Span {
+ #[cfg(span_locations)]
+ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+ pub(crate) hi: u32,
+}
+
+impl Span {
+ #[cfg(not(span_locations))]
+ pub fn call_site() -> Self {
+ Span {}
+ }
+
+ #[cfg(span_locations)]
+ pub fn call_site() -> Self {
+ Span { lo: 0, hi: 0 }
+ }
+
+ pub fn mixed_site() -> Self {
+ Span::call_site()
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Self {
+ Span::call_site()
+ }
+
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+ // caller wants line/column information from.
+ *self
+ }
+
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn source_file(&self) -> SourceFile {
+ #[cfg(fuzzing)]
+ return SourceFile {
+ path: PathBuf::from("<unspecified>"),
+ };
+
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| {
+ let sm = sm.borrow();
+ let path = sm.filepath(*self);
+ SourceFile { path }
+ })
+ }
+
+ #[cfg(span_locations)]
+ pub fn byte_range(&self) -> Range<usize> {
+ #[cfg(fuzzing)]
+ return 0..0;
+
+ #[cfg(not(fuzzing))]
+ {
+ if self.is_call_site() {
+ 0..0
+ } else {
+ SOURCE_MAP.with(|sm|
sm.borrow_mut().fileinfo_mut(*self).byte_range(*self))
+ }
+ }
+ }
+
+ #[cfg(span_locations)]
+ pub fn start(&self) -> LineColumn {
+ #[cfg(fuzzing)]
+ return LineColumn { line: 0, column: 0 };
+
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| {
+ let sm = sm.borrow();
+ let fi = sm.fileinfo(*self);
+ fi.offset_line_column(self.lo as usize)
+ })
+ }
+
+ #[cfg(span_locations)]
+ pub fn end(&self) -> LineColumn {
+ #[cfg(fuzzing)]
+ return LineColumn { line: 0, column: 0 };
+
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| {
+ let sm = sm.borrow();
+ let fi = sm.fileinfo(*self);
+ fi.offset_line_column(self.hi as usize)
+ })
+ }
+
+ #[cfg(not(span_locations))]
+ pub fn join(&self, _other: Span) -> Option<Span> {
+ Some(Span {})
+ }
+
+ #[cfg(span_locations)]
+ pub fn join(&self, other: Span) -> Option<Span> {
+ #[cfg(fuzzing)]
+ return {
+ let _ = other;
+ None
+ };
+
+ #[cfg(not(fuzzing))]
+ SOURCE_MAP.with(|sm| {
+ let sm = sm.borrow();
+ // If `other` is not within the same FileInfo as us, return None.
+ if !sm.fileinfo(*self).span_within(other) {
+ return None;
+ }
+ Some(Span {
+ lo: cmp::min(self.lo, other.lo),
+ hi: cmp::max(self.hi, other.hi),
+ })
+ })
+ }
+
+ #[cfg(not(span_locations))]
+ pub fn source_text(&self) -> Option<String> {
+ None
+ }
+
+ #[cfg(span_locations)]
+ pub fn source_text(&self) -> Option<String> {
+ #[cfg(fuzzing)]
+ return None;
+
+ #[cfg(not(fuzzing))]
+ {
+ if self.is_call_site() {
+ None
+ } else {
+ Some(SOURCE_MAP.with(|sm|
sm.borrow_mut().fileinfo_mut(*self).source_text(*self)))
+ }
+ }
+ }
+
+ #[cfg(not(span_locations))]
+ pub(crate) fn first_byte(self) -> Self {
+ self
+ }
+
+ #[cfg(span_locations)]
+ pub(crate) fn first_byte(self) -> Self {
+ Span {
+ lo: self.lo,
+ hi: cmp::min(self.lo.saturating_add(1), self.hi),
+ }
+ }
+
+ #[cfg(not(span_locations))]
+ pub(crate) fn last_byte(self) -> Self {
+ self
+ }
+
+ #[cfg(span_locations)]
+ pub(crate) fn last_byte(self) -> Self {
+ Span {
+ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
+ hi: self.hi,
+ }
+ }
+
+ #[cfg(span_locations)]
+ fn is_call_site(&self) -> bool {
+ self.lo == 0 && self.hi == 0
+ }
+}
+
+impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+}
+
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct,
span: Span) {
+ #[cfg(span_locations)]
+ {
+ if span.is_call_site() {
+ return;
+ }
+ }
+
+ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+}
+
+impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
+ Group {
+ delimiter,
+ stream,
+ span: Span::call_site(),
+ }
+ }
+
+ pub fn delimiter(&self) -> Delimiter {
+ self.delimiter
+ }
+
+ pub fn stream(&self) -> TokenStream {
+ self.stream.clone()
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn span_open(&self) -> Span {
+ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl Display for Group {
+ // We attempt to match libproc_macro's formatting.
+ // Empty parens: ()
+ // Nonempty parens: (...)
+ // Empty brackets: []
+ // Nonempty brackets: [...]
+ // Empty braces: { }
+ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+ f.write_str(open)?;
+ Display::fmt(&self.stream, f)?;
+ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty()
{
+ f.write_str(" ")?;
+ }
+ f.write_str(close)?;
+
+ Ok(())
+ }
+}
+
+impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct Ident {
+ sym: Box<str>,
+ span: Span,
+ raw: bool,
+}
+
+impl Ident {
+ #[track_caller]
+ pub fn new_checked(string: &str, span: Span) -> Self {
+ validate_ident(string);
+ Ident::new_unchecked(string, span)
+ }
+
+ pub fn new_unchecked(string: &str, span: Span) -> Self {
+ Ident {
+ sym: Box::from(string),
+ span,
+ raw: false,
+ }
+ }
+
+ #[track_caller]
+ pub fn new_raw_checked(string: &str, span: Span) -> Self {
+ validate_ident_raw(string);
+ Ident::new_raw_unchecked(string, span)
+ }
+
+ pub fn new_raw_unchecked(string: &str, span: Span) -> Self {
+ Ident {
+ sym: Box::from(string),
+ span,
+ raw: true,
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+pub(crate) fn is_ident_start(c: char) -> bool {
+ c == '_' || unicode_ident::is_xid_start(c)
+}
+
+pub(crate) fn is_ident_continue(c: char) -> bool {
+ unicode_ident::is_xid_continue(c)
+}
+
+#[track_caller]
+fn validate_ident(string: &str) {
+ if string.is_empty() {
+ panic!("Ident is not allowed to be empty; use Option<Ident>");
+ }
+
+ if string.bytes().all(|digit| b'0' <= digit && digit <= b'9') {
+ panic!("Ident cannot be a number; use Literal instead");
+ }
+
+ fn ident_ok(string: &str) -> bool {
+ let mut chars = string.chars();
+ let first = chars.next().unwrap();
+ if !is_ident_start(first) {
+ return false;
+ }
+ for ch in chars {
+ if !is_ident_continue(ch) {
+ return false;
+ }
+ }
+ true
+ }
+
+ if !ident_ok(string) {
+ panic!("{:?} is not a valid Ident", string);
+ }
+}
+
+#[track_caller]
+fn validate_ident_raw(string: &str) {
+ validate_ident(string);
+
+ match string {
+ "_" | "super" | "self" | "Self" | "crate" => {
+ panic!("`r#{}` cannot be a raw identifier", string);
+ }
+ _ => {}
+ }
+}
+
+impl PartialEq for Ident {
+ fn eq(&self, other: &Ident) -> bool {
+ self.sym == other.sym && self.raw == other.raw
+ }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+ T: ?Sized + AsRef<str>,
+{
+ fn eq(&self, other: &T) -> bool {
+ let other = other.as_ref();
+ if self.raw {
+ other.starts_with("r#") && *self.sym == other[2..]
+ } else {
+ *self.sym == *other
+ }
+ }
+}
+
+impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+ f.write_str("r#")?;
+ }
+ Display::fmt(&self.sym, f)
+ }
+}
+
+#[allow(clippy::missing_fields_in_debug)]
+impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+ debug.finish()
+ }
+
+ // Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct Literal {
+ pub(crate) repr: String,
+ span: Span,
+}
+
+macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(format!(concat!("{}", stringify!($kind)), n))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(n.to_string())
+ }
+ )*)
+}
+
+impl Literal {
+ pub(crate) fn _new(repr: String) -> Self {
+ Literal {
+ repr,
+ span: Span::call_site(),
+ }
+ }
+
+ pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self {
+ Literal::_new(repr.to_owned())
+ }
+
+ suffixed_numbers! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+
+ f32_suffixed => f32,
+ f64_suffixed => f64,
+ }
+
+ unsuffixed_numbers! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn string(string: &str) -> Literal {
+ let mut repr = String::with_capacity(string.len() + 2);
+ repr.push('"');
+ escape_utf8(string, &mut repr);
+ repr.push('"');
+ Literal::_new(repr)
+ }
+
+ pub fn character(ch: char) -> Literal {
+ let mut repr = String::new();
+ repr.push('\'');
+ if ch == '"' {
+ // escape_debug turns this into '\"' which is unnecessary.
+ repr.push(ch);
+ } else {
+ repr.extend(ch.escape_debug());
+ }
+ repr.push('\'');
+ Literal::_new(repr)
+ }
+
+ pub fn byte_character(byte: u8) -> Literal {
+ let mut repr = "b'".to_string();
+ #[allow(clippy::match_overlapping_arm)]
+ match byte {
+ b'\0' => repr.push_str(r"\0"),
+ b'\t' => repr.push_str(r"\t"),
+ b'\n' => repr.push_str(r"\n"),
+ b'\r' => repr.push_str(r"\r"),
+ b'\'' => repr.push_str(r"\'"),
+ b'\\' => repr.push_str(r"\\"),
+ b'\x20'..=b'\x7E' => repr.push(byte as char),
+ _ => {
+ let _ = write!(repr, r"\x{:02X}", byte);
+ }
+ }
+ repr.push('\'');
+ Literal::_new(repr)
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut repr = "b\"".to_string();
+ let mut bytes = bytes.iter();
+ while let Some(&b) = bytes.next() {
+ #[allow(clippy::match_overlapping_arm)]
+ match b {
+ b'\0' => repr.push_str(match bytes.as_slice().first() {
+ // circumvent clippy::octal_escapes lint
+ Some(b'0'..=b'7') => r"\x00",
+ _ => r"\0",
+ }),
+ b'\t' => repr.push_str(r"\t"),
+ b'\n' => repr.push_str(r"\n"),
+ b'\r' => repr.push_str(r"\r"),
+ b'"' => repr.push_str("\\\""),
+ b'\\' => repr.push_str(r"\\"),
+ b'\x20'..=b'\x7E' => repr.push(b as char),
+ _ => {
+ let _ = write!(repr, r"\x{:02X}", b);
+ }
+ }
+ }
+ repr.push('"');
+ Literal::_new(repr)
+ }
+
+ pub fn c_string(string: &CStr) -> Literal {
+ let mut repr = "c\"".to_string();
+ let mut bytes = string.to_bytes();
+ while !bytes.is_empty() {
+ let (valid, invalid) = match str::from_utf8(bytes) {
+ Ok(all_valid) => {
+ bytes = b"";
+ (all_valid, bytes)
+ }
+ Err(utf8_error) => {
+ let (valid, rest) =
bytes.split_at(utf8_error.valid_up_to());
+ let valid = str::from_utf8(valid).unwrap();
+ let invalid = utf8_error
+ .error_len()
+ .map_or(rest, |error_len| &rest[..error_len]);
+ bytes = &bytes[valid.len() + invalid.len()..];
+ (valid, invalid)
+ }
+ };
+ escape_utf8(valid, &mut repr);
+ for &byte in invalid {
+ let _ = write!(repr, r"\x{:02X}", byte);
+ }
+ }
+ repr.push('"');
+ Literal::_new(repr)
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ #[cfg(not(span_locations))]
+ {
+ let _ = range;
+ None
+ }
+
+ #[cfg(span_locations)]
+ {
+ use core::ops::Bound;
+
+ let lo = match range.start_bound() {
+ Bound::Included(start) => {
+ let start = u32::try_from(*start).ok()?;
+ self.span.lo.checked_add(start)?
+ }
+ Bound::Excluded(start) => {
+ let start = u32::try_from(*start).ok()?;
+ self.span.lo.checked_add(start)?.checked_add(1)?
+ }
+ Bound::Unbounded => self.span.lo,
+ };
+ let hi = match range.end_bound() {
+ Bound::Included(end) => {
+ let end = u32::try_from(*end).ok()?;
+ self.span.lo.checked_add(end)?.checked_add(1)?
+ }
+ Bound::Excluded(end) => {
+ let end = u32::try_from(*end).ok()?;
+ self.span.lo.checked_add(end)?
+ }
+ Bound::Unbounded => self.span.hi,
+ };
+ if lo <= hi && hi <= self.span.hi {
+ Some(Span { lo, hi })
+ } else {
+ None
+ }
+ }
+ }
+}
+
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(repr: &str) -> Result<Self, Self::Err> {
+ let mut cursor = get_cursor(repr);
+ #[cfg(span_locations)]
+ let lo = cursor.off;
+
+ let negative = cursor.starts_with_char('-');
+ if negative {
+ cursor = cursor.advance(1);
+ if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) {
+ return Err(LexError::call_site());
+ }
+ }
+
+ if let Ok((rest, mut literal)) = parse::literal(cursor) {
+ if rest.is_empty() {
+ if negative {
+ literal.repr.insert(0, '-');
+ }
+ literal.span = Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: rest.off,
+ };
+ return Ok(literal);
+ }
+ }
+ Err(LexError::call_site())
+ }
+}
+
+impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.repr, f)
+ }
+}
+
+impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.repr));
+ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+}
+
+fn escape_utf8(string: &str, repr: &mut String) {
+ let mut chars = string.chars();
+ while let Some(ch) = chars.next() {
+ if ch == '\0' {
+ repr.push_str(
+ if chars
+ .as_str()
+ .starts_with(|next| '0' <= next && next <= '7')
+ {
+ // circumvent clippy::octal_escapes lint
+ r"\x00"
+ } else {
+ r"\0"
+ },
+ );
+ } else if ch == '\'' {
+ // escape_debug turns this into "\'" which is unnecessary.
+ repr.push(ch);
+ } else {
+ repr.extend(ch.escape_debug());
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/lib.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/lib.rs
new file mode 100644
index 0000000000..d7bfa50f4f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/lib.rs
@@ -0,0 +1,1369 @@
+//!
[![github]](https://github.com/dtolnay/proc-macro2) [![crates-io]](https://crates.io/crates/proc-macro2) [![docs-rs]](crate)
+//!
+//! [github]:
https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]:
https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]:
https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! A wrapper around the procedural macro API of the compiler's [`proc_macro`]
+//! crate. This library serves two purposes:
+//!
+//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
+//!
+//! - **Bring proc-macro-like functionality to other contexts like build.rs and
+//! main.rs.** Types from `proc_macro` are entirely specific to procedural
+//! macros and cannot ever exist in code outside of a procedural macro.
+//! Meanwhile `proc_macro2` types may exist anywhere including non-macro
code.
+//! By developing foundational libraries like [syn] and [quote] against
+//! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem
+//! becomes easily applicable to many other use cases and we avoid
+//! reimplementing non-macro equivalents of those libraries.
+//!
+//! - **Make procedural macros unit testable.** As a consequence of being
+//! specific to procedural macros, nothing that uses `proc_macro` can be
+//! executed from a unit test. In order for helper libraries or components of
+//! a macro to be testable in isolation, they must be implemented using
+//! `proc_macro2`.
+//!
+//! [syn]: https://github.com/dtolnay/syn
+//! [quote]: https://github.com/dtolnay/quote
+//!
+//! # Usage
+//!
+//! The skeleton of a typical procedural macro typically looks like this:
+//!
+//! ```
+//! extern crate proc_macro;
+//!
+//! # const IGNORE: &str = stringify! {
+//! #[proc_macro_derive(MyDerive)]
+//! # };
+//! # #[cfg(wrap_proc_macro)]
+//! pub fn my_derive(input: proc_macro::TokenStream) ->
proc_macro::TokenStream {
+//! let input = proc_macro2::TokenStream::from(input);
+//!
+//! let output: proc_macro2::TokenStream = {
+//! /* transform input */
+//! # input
+//! };
+//!
+//! proc_macro::TokenStream::from(output)
+//! }
+//! ```
+//!
+//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to
+//! propagate parse errors correctly back to the compiler when parsing fails.
+//!
+//! [`parse_macro_input!`]:
https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html
+//!
+//! # Unstable features
+//!
+//! The default feature set of proc-macro2 tracks the most recent stable
+//! compiler API. Functionality in `proc_macro` that is not yet stable is not
+//! exposed by proc-macro2 by default.
+//!
+//! To opt into the additional APIs available in the most recent nightly
+//! compiler, the `procmacro2_semver_exempt` config flag must be passed to
+//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As
+//! these are unstable APIs that track the nightly compiler, minor versions of
+//! proc-macro2 may make breaking changes to them at any time.
+//!
+//! ```sh
+//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
+//! ```
+//!
+//! Note that this must not only be done for your crate, but for any crate that
+//! depends on your crate. This infectious nature is intentional, as it serves
+//! as a reminder that you are outside of the normal semver guarantees.
+//!
+//! Semver exempt methods are marked as such in the proc-macro2 documentation.
+//!
+//! # Thread-Safety
+//!
+//! Most types in this crate are `!Sync` because the underlying compiler
+//! types make use of thread-local memory, meaning they cannot be accessed from
+//! a different thread.
+
+// Proc-macro2 types in rustdoc of other crates get linked to here.
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.84")]
+#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+#![cfg_attr(super_unstable, feature(proc_macro_def_site))]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![deny(unsafe_op_in_unsafe_fn)]
+#![allow(
+ clippy::cast_lossless,
+ clippy::cast_possible_truncation,
+ clippy::checked_conversions,
+ clippy::doc_markdown,
+ clippy::incompatible_msrv,
+ clippy::items_after_statements,
+ clippy::iter_without_into_iter,
+ clippy::let_underscore_untyped,
+ clippy::manual_assert,
+ clippy::manual_range_contains,
+ clippy::missing_safety_doc,
+ clippy::must_use_candidate,
+ clippy::needless_doctest_main,
+ clippy::new_without_default,
+ clippy::return_self_not_must_use,
+ clippy::shadow_unrelated,
+ clippy::trivially_copy_pass_by_ref,
+ clippy::unnecessary_wraps,
+ clippy::unused_self,
+ clippy::used_underscore_binding,
+ clippy::vec_init_then_push
+)]
+
+#[cfg(all(procmacro2_semver_exempt, wrap_proc_macro, not(super_unstable)))]
+compile_error! {"\
+ Something is not right. If you've tried to turn on \
+ procmacro2_semver_exempt, you need to ensure that it \
+ is turned on for the compilation of the proc-macro2 \
+ build script as well.
+"}
+
+#[cfg(all(
+ procmacro2_nightly_testing,
+ feature = "proc-macro",
+ not(proc_macro_span)
+))]
+compile_error! {"\
+ Build script probe failed to compile.
+"}
+
+extern crate alloc;
+
+#[cfg(feature = "proc-macro")]
+extern crate proc_macro;
+
+mod marker;
+mod parse;
+mod rcvec;
+
+#[cfg(wrap_proc_macro)]
+mod detection;
+
+// Public for proc_macro2::fallback::force() and unforce(), but those are quite
+// a niche use case so we omit it from rustdoc.
+#[doc(hidden)]
+pub mod fallback;
+
+pub mod extra;
+
+#[cfg(not(wrap_proc_macro))]
+use crate::fallback as imp;
+#[path = "wrapper.rs"]
+#[cfg(wrap_proc_macro)]
+mod imp;
+
+#[cfg(span_locations)]
+mod location;
+
+use crate::extra::DelimSpan;
+use crate::marker::{ProcMacroAutoTraits, MARKER};
+use core::cmp::Ordering;
+use core::fmt::{self, Debug, Display};
+use core::hash::{Hash, Hasher};
+#[cfg(span_locations)]
+use core::ops::Range;
+use core::ops::RangeBounds;
+use core::str::FromStr;
+use std::error::Error;
+use std::ffi::CStr;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::PathBuf;
+
+#[cfg(span_locations)]
+#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+pub use crate::location::LineColumn;
+
+/// An abstract stream of tokens, or more concretely a sequence of token trees.
+///
+/// This type provides interfaces for iterating over token trees and for
+/// collecting token trees into one stream.
+///
+/// Token stream is both the input and output of `#[proc_macro]`,
+/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream {
+ inner: imp::TokenStream,
+ _marker: ProcMacroAutoTraits,
+}
+
+/// Error returned from `TokenStream::from_str`.
+pub struct LexError {
+ inner: imp::LexError,
+ _marker: ProcMacroAutoTraits,
+}
+
+impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> Self {
+ TokenStream {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ fn _new_fallback(inner: fallback::TokenStream) -> Self {
+ TokenStream {
+ inner: inner.into(),
+ _marker: MARKER,
+ }
+ }
+
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> Self {
+ TokenStream::_new(imp::TokenStream::new())
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+/// `TokenStream::default()` returns an empty stream,
+/// i.e. this is equivalent with `TokenStream::new()`.
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a
token
+/// stream.
+///
+/// May fail for a number of reasons, for example, if the string contains
+/// unbalanced delimiters or characters not existing in the language.
+///
+/// NOTE: Some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+ _marker: MARKER,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))]
+impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
+ TokenStream::_new(inner.into())
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))]
+impl From<TokenStream> for proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
+ inner.inner.into()
+ }
+}
+
+impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> Self {
+ TokenStream::_new(imp::TokenStream::from(token))
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+ self.inner.extend(streams);
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ self.inner
+ .extend(streams.into_iter().map(|stream| stream.inner));
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+ TokenStream::_new(streams.into_iter().collect())
+ }
+}
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly
+/// convertible back into the same token stream (modulo spans), except for
+/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+/// numeric literals.
+impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, f)
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+impl LexError {
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+}
+
+impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, f)
+ }
+}
+
+impl Error for LexError {}
+
+/// The source file of a given `Span`.
+///
+/// This type is semver exempt and not exposed by default.
+#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro),
super_unstable)))]
+#[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))]
+#[derive(Clone, PartialEq, Eq)]
+pub struct SourceFile {
+ inner: imp::SourceFile,
+ _marker: ProcMacroAutoTraits,
+}
+
+#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro),
super_unstable)))]
+impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ /// Get the path to this source file.
+ ///
+ /// ### Note
+ ///
+ /// If the code span associated with this `SourceFile` was generated by an
+ /// external macro, this may not be an actual path on the filesystem. Use
+ /// [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if
+ /// `--remap-path-prefix` was passed on the command line, the path as given
+ /// may not actually be valid.
+ ///
+ /// [`is_real`]: #method.is_real
+ pub fn path(&self) -> PathBuf {
+ self.inner.path()
+ }
+
+ /// Returns `true` if this source file is a real source file, and not
+ /// generated by an external macro's expansion.
+ pub fn is_real(&self) -> bool {
+ self.inner.is_real()
+ }
+}
+
+#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro),
super_unstable)))]
+impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span {
+ inner: imp::Span,
+ _marker: ProcMacroAutoTraits,
+}
+
+impl Span {
+ fn _new(inner: imp::Span) -> Self {
+ Span {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ fn _new_fallback(inner: fallback::Span) -> Self {
+ Span {
+ inner: inner.into(),
+ _marker: MARKER,
+ }
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ ///
+ /// Identifiers created with this span will be resolved as if they were
+ /// written directly at the macro call location (call-site hygiene) and
+ /// other code at the macro call site will be able to refer to them as
well.
+ pub fn call_site() -> Self {
+ Span::_new(imp::Span::call_site())
+ }
+
+ /// The span located at the invocation of the procedural macro, but with
+ /// local variables, labels, and `$crate` resolved at the definition site
+ /// of the macro. This is the same hygiene behavior as `macro_rules`.
+ pub fn mixed_site() -> Self {
+ Span::_new(imp::Span::mixed_site())
+ }
+
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))]
+ pub fn def_site() -> Self {
+ Span::_new(imp::Span::def_site())
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+
+ /// Convert `proc_macro2::Span` to `proc_macro::Span`.
+ ///
+ /// This method is available when building with a nightly compiler, or when
+ /// building with rustc 1.29+ *without* semver exempt features.
+ ///
+ /// # Panics
+ ///
+ /// Panics if called from outside of a procedural macro. Unlike
+ /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within
+ /// the context of a procedural macro invocation.
+ #[cfg(wrap_proc_macro)]
+ pub fn unwrap(self) -> proc_macro::Span {
+ self.inner.unwrap()
+ }
+
+ // Soft deprecated. Please use Span::unwrap.
+ #[cfg(wrap_proc_macro)]
+ #[doc(hidden)]
+ pub fn unstable(self) -> proc_macro::Span {
+ self.unwrap()
+ }
+
+ /// The original source file into which this span points.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro),
super_unstable)))]
+ #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))]
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile::_new(self.inner.source_file())
+ }
+
+ /// Returns the span's byte position range in the source file.
+ ///
+ /// This method requires the `"span-locations"` feature to be enabled.
+ ///
+ /// When executing in a procedural macro context, the returned range is
only
+ /// accurate if compiled with a nightly toolchain. The stable toolchain
does
+ /// not have this information available. When executing outside of a
+ /// procedural macro, such as main.rs or build.rs, the byte range is always
+ /// accurate regardless of toolchain.
+ #[cfg(span_locations)]
+ #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+ pub fn byte_range(&self) -> Range<usize> {
+ self.inner.byte_range()
+ }
+
+ /// Get the starting line/column in the source file for this span.
+ ///
+ /// This method requires the `"span-locations"` feature to be enabled.
+ ///
+ /// When executing in a procedural macro context, the returned line/column
+ /// are only meaningful if compiled with a nightly toolchain. The stable
+ /// toolchain does not have this information available. When executing
+ /// outside of a procedural macro, such as main.rs or build.rs, the
+ /// line/column are always meaningful regardless of toolchain.
+ #[cfg(span_locations)]
+ #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+ pub fn start(&self) -> LineColumn {
+ self.inner.start()
+ }
+
+ /// Get the ending line/column in the source file for this span.
+ ///
+ /// This method requires the `"span-locations"` feature to be enabled.
+ ///
+ /// When executing in a procedural macro context, the returned line/column
+ /// are only meaningful if compiled with a nightly toolchain. The stable
+ /// toolchain does not have this information available. When executing
+ /// outside of a procedural macro, such as main.rs or build.rs, the
+ /// line/column are always meaningful regardless of toolchain.
+ #[cfg(span_locations)]
+ #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+ pub fn end(&self) -> LineColumn {
+ self.inner.end()
+ }
+
+ /// Create a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ ///
+ /// Warning: the underlying [`proc_macro::Span::join`] method is
+ /// nightly-only. When called from within a procedural macro not using a
+ /// nightly compiler, this method will always return `None`.
+ ///
+ /// [`proc_macro::Span::join`]:
https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.inner.join(other.inner).map(Span::_new)
+ }
+
+ /// Compares two spans to see if they're equal.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))]
+ pub fn eq(&self, other: &Span) -> bool {
+ self.inner.eq(&other.inner)
+ }
+
+ /// Returns the source text behind a span. This preserves the original
+ /// source code, including spaces and comments. It only returns a result if
+ /// the span corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens
+ /// and not on this source text. The result of this function is a best
+ /// effort to be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.inner.source_text()
+ }
+}
+
+/// Prints a span in a form convenient for debugging.
+impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match self {
+ TokenTree::Group(t) => t.span(),
+ TokenTree::Ident(t) => t.span(),
+ TokenTree::Punct(t) => t.span(),
+ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not
configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match self {
+ TokenTree::Group(t) => t.set_span(span),
+ TokenTree::Ident(t) => t.set_span(span),
+ TokenTree::Punct(t) => t.set_span(span),
+ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> Self {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> Self {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> Self {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> Self {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly
+/// convertible back into the same token tree (modulo spans), except for
+/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+/// numeric literals.
+impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ TokenTree::Group(t) => Display::fmt(t, f),
+ TokenTree::Ident(t) => Display::fmt(t, f),
+ TokenTree::Punct(t) => Display::fmt(t, f),
+ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match self {
+ TokenTree::Group(t) => Debug::fmt(t, f),
+ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug,
t.span().inner);
+ debug.finish()
+ }
+ TokenTree::Punct(t) => Debug::fmt(t, f),
+ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by
+/// `Delimiter`s.
+#[derive(Clone)]
+pub struct Group {
+ inner: imp::Group,
+}
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `∅ ... ∅`
+ ///
+ /// An invisible delimiter, that may, for example, appear around tokens
+ /// coming from a "macro variable" `$var`. It is important to preserve
+ /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters may not survive roundtrip of a token stream
through
+ /// a string.
+ ///
+ /// <div class="warning">
+ ///
+ /// Note: rustc currently can ignore the grouping of tokens delimited by
`None` in the output
+ /// of a proc_macro. Only `None`-delimited groups created by a macro_rules
macro in the input
+ /// of a proc_macro macro are preserved, and only in very specific
circumstances.
+ /// Any `None`-delimited groups (re)created by a proc_macro will therefore
not preserve
+ /// operator priorities as indicated above. The other `Delimiter` variants
should be used
+ /// instead in this context. This is a rustc bug. For details, see
+ /// [rust-lang/rust#67062](https://github.com/rust-lang/rust/issues/67062).
+ ///
+ /// </div>
+ None,
+}
+
+impl Group {
+ fn _new(inner: imp::Group) -> Self {
+ Group { inner }
+ }
+
+ fn _new_fallback(inner: fallback::Group) -> Self {
+ Group {
+ inner: inner.into(),
+ }
+ }
+
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
+ Group {
+ inner: imp::Group::new(delimiter, stream.inner),
+ }
+ }
+
+ /// Returns the punctuation used as the delimiter for this group: a set of
+ /// parentheses, square brackets, or curly braces.
+ pub fn delimiter(&self) -> Delimiter {
+ self.inner.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream::_new(self.inner.stream())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span::_new(self.inner.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span::_new(self.inner.span_close())
+ }
+
+ /// Returns an object that holds this group's `span_open()` and
+ /// `span_close()` together (in a more compact representation than holding
+ /// those 2 spans individually).
+ pub fn delim_span(&self) -> DelimSpan {
+ DelimSpan::new(&self.inner)
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens
spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, formatter)
+ }
+}
+
+impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, formatter)
+ }
+}
+
+/// A `Punct` is a single punctuation character like `+`, `-` or `#`.
+///
+/// Multicharacter operators like `+=` are represented as two instances of
+/// `Punct` with different forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct {
+ ch: char,
+ spacing: Spacing,
+ span: Span,
+}
+
+/// Whether a `Punct` is followed immediately by another `Punct` or followed by
+/// another token or whitespace.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum Spacing {
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
+ Alone,
+ /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form
+ /// lifetimes `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ ///
+ /// The `ch` argument must be a valid punctuation character permitted by
the
+ /// language, otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Self {
+ Punct {
+ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+ /// it's immediately followed by another `Punct` in the token stream, so
+ /// they can potentially be combined into a multicharacter operator
+ /// (`Joint`), or it's followed by some other token or whitespace (`Alone`)
+ /// so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.spacing
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly
+/// convertible back into the same character.
+impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.ch, f)
+ }
+}
+
+impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+ }
+}
+
+/// A word of Rust code, which may be a keyword or legal variable name.
+///
+/// An identifier consists of at least one Unicode code point, the first of
+/// which has the XID_Start property and the rest of which have the
XID_Continue
+/// property.
+///
+/// - The empty string is not an identifier. Use `Option<Ident>`.
+/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
+///
+/// An identifier constructed with `Ident::new` is permitted to be a Rust
+/// keyword, though parsing one through its [`Parse`] implementation rejects
+/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
+/// behaviour of `Ident::new`.
+///
+/// [`Parse`]: https://docs.rs/syn/2.0/syn/parse/trait.Parse.html
+///
+/// # Examples
+///
+/// A new ident can be created from a string using the `Ident::new` function.
+/// A span must be provided explicitly which governs the name resolution
+/// behavior of the resulting identifier.
+///
+/// ```
+/// use proc_macro2::{Ident, Span};
+///
+/// fn main() {
+/// let call_ident = Ident::new("calligraphy", Span::call_site());
+///
+/// println!("{}", call_ident);
+/// }
+/// ```
+///
+/// An ident can be interpolated into a token stream using the `quote!` macro.
+///
+/// ```
+/// use proc_macro2::{Ident, Span};
+/// use quote::quote;
+///
+/// fn main() {
+/// let ident = Ident::new("demo", Span::call_site());
+///
+/// // Create a variable binding whose name is this ident.
+/// let expanded = quote! { let #ident = 10; };
+///
+/// // Create a variable binding with a slightly different name.
+/// let temp_ident = Ident::new(&format!("new_{}", ident),
Span::call_site());
+/// let expanded = quote! { let #temp_ident = 10; };
+/// }
+/// ```
+///
+/// A string representation of the ident is available through the `to_string()`
+/// method.
+///
+/// ```
+/// # use proc_macro2::{Ident, Span};
+/// #
+/// # let ident = Ident::new("another_identifier", Span::call_site());
+/// #
+/// // Examine the ident as a string.
+/// let ident_string = ident.to_string();
+/// if ident_string.len() > 60 {
+/// println!("Very long identifier: {}", ident_string)
+/// }
+/// ```
+#[derive(Clone)]
+pub struct Ident {
+ inner: imp::Ident,
+ _marker: ProcMacroAutoTraits,
+}
+
+impl Ident {
+ fn _new(inner: imp::Ident) -> Self {
+ Ident {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ ///
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language, otherwise the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene
information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site"
+ /// hygiene meaning that identifiers created with this span will be
resolved
+ /// as if they were written directly at the location of the macro call, and
+ /// other code at the macro call site will be able to refer to them as
well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to
+ /// "definition-site" hygiene meaning that identifiers created with this
+ /// span will be resolved at the location of the macro definition and other
+ /// code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the input string is neither a keyword nor a legal variable
+ /// name. If you are not sure whether the string contains an identifier and
+ /// need to handle an error case, use
+ /// <a href="https://docs.rs/syn/2.0/syn/fn.parse_str.html"><code
+ /// style="padding-right:0;">syn::parse_str</code></a><code
+ /// style="padding-left:0;">::<Ident></code>
+ /// rather than `Ident::new`.
+ #[track_caller]
+ pub fn new(string: &str, span: Span) -> Self {
+ Ident::_new(imp::Ident::new_checked(string, span.inner))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The
+ /// `string` argument must be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path
+ /// segments (e.g. `self`, `super`) are not supported, and will cause a
+ /// panic.
+ #[track_caller]
+ pub fn new_raw(string: &str, span: Span) -> Self {
+ Ident::_new(imp::Ident::new_raw_checked(string, span.inner))
+ }
+
+ /// Returns the span of this `Ident`.
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene
+ /// context.
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner);
+ }
+}
+
+impl PartialEq for Ident {
+ fn eq(&self, other: &Ident) -> bool {
+ self.inner == other.inner
+ }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+ T: ?Sized + AsRef<str>,
+{
+ fn eq(&self, other: &T) -> bool {
+ self.inner == other
+ }
+}
+
+impl Eq for Ident {}
+
+impl PartialOrd for Ident {
+ fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for Ident {
+ fn cmp(&self, other: &Ident) -> Ordering {
+ self.to_string().cmp(&other.to_string())
+ }
+}
+
+impl Hash for Ident {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.to_string().hash(hasher);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
back
+/// into the same identifier.
+impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, f)
+ }
+}
+
+impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
+/// byte character (`b'a'`), an integer or floating point number with or
without
+/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+///
+/// Boolean literals like `true` and `false` do not belong here, they are
+/// `Ident`s.
+#[derive(Clone)]
+pub struct Literal {
+ inner: imp::Literal,
+ _marker: ProcMacroAutoTraits,
+}
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end. Literals created from negative numbers
may
+ /// not survive roundtrips through `TokenStream` or strings and may be
+ /// broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(imp::Literal::$name(n))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`. Literals created from negative
numbers
+ /// may not survive roundtrips through `TokenStream` or strings and may
+ /// be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(imp::Literal::$name(n))
+ }
+ )*)
+}
+
+impl Literal {
+ fn _new(inner: imp::Literal) -> Self {
+ Literal {
+ inner,
+ _marker: MARKER,
+ }
+ }
+
+ fn _new_fallback(inner: fallback::Literal) -> Self {
+ Literal {
+ inner: inner.into(),
+ _marker: MARKER,
+ }
+ }
+
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed`
where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers may not survive round-trips
+ /// through `TokenStream` or strings and may be broken into two tokens (`-`
+ /// and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for example
+ /// if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f64_unsuffixed(f))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler. Literals created from negative numbers may not survive
+ /// round-trips through `TokenStream` or strings and may be broken into two
+ /// tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for example
+ /// if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(f: f64) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f64_suffixed(f))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed`
where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers may not survive round-trips
+ /// through `TokenStream` or strings and may be broken into two tokens (`-`
+ /// and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for example
+ /// if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f32_unsuffixed(f))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler. Literals created from negative numbers may not survive
+ /// round-trips through `TokenStream` or strings and may be broken into two
+ /// tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for example
+ /// if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(f: f32) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f32_suffixed(f))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal::_new(imp::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal::_new(imp::Literal::character(ch))
+ }
+
+ /// Byte character literal.
+ pub fn byte_character(byte: u8) -> Literal {
+ Literal::_new(imp::Literal::byte_character(byte))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal::_new(imp::Literal::byte_string(bytes))
+ }
+
+ /// C string literal.
+ pub fn c_string(string: &CStr) -> Literal {
+ Literal::_new(imp::Literal::c_string(string))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only
+ /// the source bytes in range `range`. Returns `None` if the would-be
+ /// trimmed span is outside the bounds of `self`.
+ ///
+ /// Warning: the underlying [`proc_macro::Literal::subspan`] method is
+ /// nightly-only. When called from within a procedural macro not using a
+ /// nightly compiler, this method will always return `None`.
+ ///
+ /// [`proc_macro::Literal::subspan`]:
https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.inner.subspan(range).map(Span::_new)
+ }
+
+ // Intended for the `quote!` macro to use when constructing a proc-macro2
+ // token out of a macro_rules $:literal token, which is already known to be
+ // a valid literal. This avoids reparsing/validating the literal's string
+ // representation. This is not public API other than for quote.
+ #[doc(hidden)]
+ pub unsafe fn from_str_unchecked(repr: &str) -> Self {
+ Literal::_new(unsafe { imp::Literal::from_str_unchecked(repr) })
+ }
+}
+
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(repr: &str) -> Result<Self, LexError> {
+ repr.parse().map(Literal::_new).map_err(|inner| LexError {
+ inner,
+ _marker: MARKER,
+ })
+ }
+}
+
+impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, f)
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as
iterators.
+pub mod token_stream {
+ use crate::marker::{ProcMacroAutoTraits, MARKER};
+ use crate::{imp, TokenTree};
+ use core::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+ /// The iteration is "shallow", e.g. the iterator doesn't recurse into
+ /// delimited groups, and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+ _marker: ProcMacroAutoTraits,
+ }
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+ }
+
+ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+ _marker: MARKER,
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/location.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/location.rs
new file mode 100644
index 0000000000..7190e2d052
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/location.rs
@@ -0,0 +1,29 @@
+use core::cmp::Ordering;
+
+/// A line-column pair representing the start or end of a `Span`.
+///
+/// This type is semver exempt and not exposed by default.
+#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends
+ /// (inclusive).
+ pub line: usize,
+ /// The 0-indexed column (in UTF-8 characters) in the source file on which
+ /// the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line
+ .cmp(&other.line)
+ .then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/marker.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/marker.rs
new file mode 100644
index 0000000000..23b94ce6fa
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/marker.rs
@@ -0,0 +1,17 @@
+use alloc::rc::Rc;
+use core::marker::PhantomData;
+use core::panic::{RefUnwindSafe, UnwindSafe};
+
+// Zero sized marker with the correct set of autotrait impls we want all proc
+// macro types to have.
+#[derive(Copy, Clone)]
+#[cfg_attr(
+ all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)),
+ derive(PartialEq, Eq)
+)]
+pub(crate) struct ProcMacroAutoTraits(PhantomData<Rc<()>>);
+
+pub(crate) const MARKER: ProcMacroAutoTraits =
ProcMacroAutoTraits(PhantomData);
+
+impl UnwindSafe for ProcMacroAutoTraits {}
+impl RefUnwindSafe for ProcMacroAutoTraits {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/parse.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/parse.rs
new file mode 100644
index 0000000000..07239bc3ad
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/parse.rs
@@ -0,0 +1,996 @@
+use crate::fallback::{
+ self, is_ident_continue, is_ident_start, Group, LexError, Literal, Span,
TokenStream,
+ TokenStreamBuilder,
+};
+use crate::{Delimiter, Punct, Spacing, TokenTree};
+use core::char;
+use core::str::{Bytes, CharIndices, Chars};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub(crate) struct Cursor<'a> {
+ pub rest: &'a str,
+ #[cfg(span_locations)]
+ pub off: u32,
+}
+
+impl<'a> Cursor<'a> {
+ pub fn advance(&self, bytes: usize) -> Cursor<'a> {
+ let (_front, rest) = self.rest.split_at(bytes);
+ Cursor {
+ rest,
+ #[cfg(span_locations)]
+ off: self.off + _front.chars().count() as u32,
+ }
+ }
+
+ pub fn starts_with(&self, s: &str) -> bool {
+ self.rest.starts_with(s)
+ }
+
+ pub fn starts_with_char(&self, ch: char) -> bool {
+ self.rest.starts_with(ch)
+ }
+
+ pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
+ where
+ Pattern: FnMut(char) -> bool,
+ {
+ self.rest.starts_with(f)
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.rest.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.rest.len()
+ }
+
+ fn as_bytes(&self) -> &'a [u8] {
+ self.rest.as_bytes()
+ }
+
+ fn bytes(&self) -> Bytes<'a> {
+ self.rest.bytes()
+ }
+
+ fn chars(&self) -> Chars<'a> {
+ self.rest.chars()
+ }
+
+ fn char_indices(&self) -> CharIndices<'a> {
+ self.rest.char_indices()
+ }
+
+ fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
+ if self.starts_with(tag) {
+ Ok(self.advance(tag.len()))
+ } else {
+ Err(Reject)
+ }
+ }
+}
+
+pub(crate) struct Reject;
+type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
+
+fn skip_whitespace(input: Cursor) -> Cursor {
+ let mut s = input;
+
+ while !s.is_empty() {
+ let byte = s.as_bytes()[0];
+ if byte == b'/' {
+ if s.starts_with("//")
+ && (!s.starts_with("///") || s.starts_with("////"))
+ && !s.starts_with("//!")
+ {
+ let (cursor, _) = take_until_newline_or_eof(s);
+ s = cursor;
+ continue;
+ } else if s.starts_with("/**/") {
+ s = s.advance(4);
+ continue;
+ } else if s.starts_with("/*")
+ && (!s.starts_with("/**") || s.starts_with("/***"))
+ && !s.starts_with("/*!")
+ {
+ match block_comment(s) {
+ Ok((rest, _)) => {
+ s = rest;
+ continue;
+ }
+ Err(Reject) => return s,
+ }
+ }
+ }
+ match byte {
+ b' ' | 0x09..=0x0d => {
+ s = s.advance(1);
+ continue;
+ }
+ b if b.is_ascii() => {}
+ _ => {
+ let ch = s.chars().next().unwrap();
+ if is_whitespace(ch) {
+ s = s.advance(ch.len_utf8());
+ continue;
+ }
+ }
+ }
+ return s;
+ }
+ s
+}
+
+fn block_comment(input: Cursor) -> PResult<&str> {
+ if !input.starts_with("/*") {
+ return Err(Reject);
+ }
+
+ let mut depth = 0usize;
+ let bytes = input.as_bytes();
+ let mut i = 0usize;
+ let upper = bytes.len() - 1;
+
+ while i < upper {
+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+ depth += 1;
+ i += 1; // eat '*'
+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+ depth -= 1;
+ if depth == 0 {
+ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+ }
+ i += 1; // eat '/'
+ }
+ i += 1;
+ }
+
+ Err(Reject)
+}
+
+fn is_whitespace(ch: char) -> bool {
+ // Rust treats left-to-right mark and right-to-left mark as whitespace
+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+}
+
+fn word_break(input: Cursor) -> Result<Cursor, Reject> {
+ match input.chars().next() {
+ Some(ch) if is_ident_continue(ch) => Err(Reject),
+ Some(_) | None => Ok(input),
+ }
+}
+
+// Rustc's representation of a macro expansion error in expression position or
+// type position.
+const ERROR: &str = "(/*ERROR*/)";
+
+pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError>
{
+ let mut trees = TokenStreamBuilder::new();
+ let mut stack = Vec::new();
+
+ loop {
+ input = skip_whitespace(input);
+
+ if let Ok((rest, ())) = doc_comment(input, &mut trees) {
+ input = rest;
+ continue;
+ }
+
+ #[cfg(span_locations)]
+ let lo = input.off;
+
+ let first = match input.bytes().next() {
+ Some(first) => first,
+ None => match stack.last() {
+ None => return Ok(trees.build()),
+ #[cfg(span_locations)]
+ Some((lo, _frame)) => {
+ return Err(LexError {
+ span: Span { lo: *lo, hi: *lo },
+ })
+ }
+ #[cfg(not(span_locations))]
+ Some(_frame) => return Err(LexError { span: Span {} }),
+ },
+ };
+
+ if let Some(open_delimiter) = match first {
+ b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis),
+ b'[' => Some(Delimiter::Bracket),
+ b'{' => Some(Delimiter::Brace),
+ _ => None,
+ } {
+ input = input.advance(1);
+ let frame = (open_delimiter, trees);
+ #[cfg(span_locations)]
+ let frame = (lo, frame);
+ stack.push(frame);
+ trees = TokenStreamBuilder::new();
+ } else if let Some(close_delimiter) = match first {
+ b')' => Some(Delimiter::Parenthesis),
+ b']' => Some(Delimiter::Bracket),
+ b'}' => Some(Delimiter::Brace),
+ _ => None,
+ } {
+ let frame = match stack.pop() {
+ Some(frame) => frame,
+ None => return Err(lex_error(input)),
+ };
+ #[cfg(span_locations)]
+ let (lo, frame) = frame;
+ let (open_delimiter, outer) = frame;
+ if open_delimiter != close_delimiter {
+ return Err(lex_error(input));
+ }
+ input = input.advance(1);
+ let mut g = Group::new(open_delimiter, trees.build());
+ g.set_span(Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: input.off,
+ });
+ trees = outer;
+
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
+ } else {
+ let (rest, mut tt) = match leaf_token(input) {
+ Ok((rest, tt)) => (rest, tt),
+ Err(Reject) => return Err(lex_error(input)),
+ };
+ tt.set_span(crate::Span::_new_fallback(Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: rest.off,
+ }));
+ trees.push_token_from_parser(tt);
+ input = rest;
+ }
+ }
+}
+
+fn lex_error(cursor: Cursor) -> LexError {
+ #[cfg(not(span_locations))]
+ let _ = cursor;
+ LexError {
+ span: Span {
+ #[cfg(span_locations)]
+ lo: cursor.off,
+ #[cfg(span_locations)]
+ hi: cursor.off,
+ },
+ }
+}
+
+fn leaf_token(input: Cursor) -> PResult<TokenTree> {
+ if let Ok((input, l)) = literal(input) {
+ // must be parsed before ident
+ Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
+ } else if let Ok((input, p)) = punct(input) {
+ Ok((input, TokenTree::Punct(p)))
+ } else if let Ok((input, i)) = ident(input) {
+ Ok((input, TokenTree::Ident(i)))
+ } else if input.starts_with(ERROR) {
+ let rest = input.advance(ERROR.len());
+ let repr =
crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned()));
+ Ok((rest, TokenTree::Literal(repr)))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn ident(input: Cursor) -> PResult<crate::Ident> {
+ if [
+ "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"",
"cr#",
+ ]
+ .iter()
+ .any(|prefix| input.starts_with(prefix))
+ {
+ Err(Reject)
+ } else {
+ ident_any(input)
+ }
+}
+
+fn ident_any(input: Cursor) -> PResult<crate::Ident> {
+ let raw = input.starts_with("r#");
+ let rest = input.advance((raw as usize) << 1);
+
+ let (rest, sym) = ident_not_raw(rest)?;
+
+ if !raw {
+ let ident = crate::Ident::_new(crate::imp::Ident::new_unchecked(
+ sym,
+ fallback::Span::call_site(),
+ ));
+ return Ok((rest, ident));
+ }
+
+ match sym {
+ "_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
+ _ => {}
+ }
+
+ let ident = crate::Ident::_new(crate::imp::Ident::new_raw_unchecked(
+ sym,
+ fallback::Span::call_site(),
+ ));
+ Ok((rest, ident))
+}
+
+fn ident_not_raw(input: Cursor) -> PResult<&str> {
+ let mut chars = input.char_indices();
+
+ match chars.next() {
+ Some((_, ch)) if is_ident_start(ch) => {}
+ _ => return Err(Reject),
+ }
+
+ let mut end = input.len();
+ for (i, ch) in chars {
+ if !is_ident_continue(ch) {
+ end = i;
+ break;
+ }
+ }
+
+ Ok((input.advance(end), &input.rest[..end]))
+}
+
+pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
+ let rest = literal_nocapture(input)?;
+ let end = input.len() - rest.len();
+ Ok((rest, Literal::_new(input.rest[..end].to_string())))
+}
+
+fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(ok) = string(input) {
+ Ok(ok)
+ } else if let Ok(ok) = byte_string(input) {
+ Ok(ok)
+ } else if let Ok(ok) = c_string(input) {
+ Ok(ok)
+ } else if let Ok(ok) = byte(input) {
+ Ok(ok)
+ } else if let Ok(ok) = character(input) {
+ Ok(ok)
+ } else if let Ok(ok) = float(input) {
+ Ok(ok)
+ } else if let Ok(ok) = int(input) {
+ Ok(ok)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn literal_suffix(input: Cursor) -> Cursor {
+ match ident_not_raw(input) {
+ Ok((input, _)) => input,
+ Err(Reject) => input,
+ }
+}
+
+fn string(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(input) = input.parse("\"") {
+ cooked_string(input)
+ } else if let Ok(input) = input.parse("r") {
+ raw_string(input)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.char_indices();
+
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ let input = input.advance(i + 1);
+ return Ok(literal_suffix(input));
+ }
+ '\r' => match chars.next() {
+ Some((_, '\n')) => {}
+ _ => break,
+ },
+ '\\' => match chars.next() {
+ Some((_, 'x')) => {
+ backslash_x_char(&mut chars)?;
+ }
+ Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {}
+ Some((_, 'u')) => {
+ backslash_u(&mut chars)?;
+ }
+ Some((newline, ch @ ('\n' | '\r'))) => {
+ input = input.advance(newline + 1);
+ trailing_backslash(&mut input, ch as u8)?;
+ chars = input.char_indices();
+ }
+ _ => break,
+ },
+ _ch => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
+ let (input, delimiter) = delimiter_of_raw_string(input)?;
+ let mut bytes = input.bytes().enumerate();
+ while let Some((i, byte)) = bytes.next() {
+ match byte {
+ b'"' if input.rest[i + 1..].starts_with(delimiter) => {
+ let rest = input.advance(i + 1 + delimiter.len());
+ return Ok(literal_suffix(rest));
+ }
+ b'\r' => match bytes.next() {
+ Some((_, b'\n')) => {}
+ _ => break,
+ },
+ _ => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(input) = input.parse("b\"") {
+ cooked_byte_string(input)
+ } else if let Ok(input) = input.parse("br") {
+ raw_byte_string(input)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
+ let mut bytes = input.bytes().enumerate();
+ while let Some((offset, b)) = bytes.next() {
+ match b {
+ b'"' => {
+ let input = input.advance(offset + 1);
+ return Ok(literal_suffix(input));
+ }
+ b'\r' => match bytes.next() {
+ Some((_, b'\n')) => {}
+ _ => break,
+ },
+ b'\\' => match bytes.next() {
+ Some((_, b'x')) => {
+ backslash_x_byte(&mut bytes)?;
+ }
+ Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) =>
{}
+ Some((newline, b @ (b'\n' | b'\r'))) => {
+ input = input.advance(newline + 1);
+ trailing_backslash(&mut input, b)?;
+ bytes = input.bytes().enumerate();
+ }
+ _ => break,
+ },
+ b if b.is_ascii() => {}
+ _ => break,
+ }
+ }
+ Err(Reject)
+}
+
+fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> {
+ for (i, byte) in input.bytes().enumerate() {
+ match byte {
+ b'"' => {
+ if i > 255 {
+ // https://github.com/rust-lang/rust/pull/95251
+ return Err(Reject);
+ }
+ return Ok((input.advance(i + 1), &input.rest[..i]));
+ }
+ b'#' => {}
+ _ => break,
+ }
+ }
+ Err(Reject)
+}
+
+fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> {
+ let (input, delimiter) = delimiter_of_raw_string(input)?;
+ let mut bytes = input.bytes().enumerate();
+ while let Some((i, byte)) = bytes.next() {
+ match byte {
+ b'"' if input.rest[i + 1..].starts_with(delimiter) => {
+ let rest = input.advance(i + 1 + delimiter.len());
+ return Ok(literal_suffix(rest));
+ }
+ b'\r' => match bytes.next() {
+ Some((_, b'\n')) => {}
+ _ => break,
+ },
+ other => {
+ if !other.is_ascii() {
+ break;
+ }
+ }
+ }
+ }
+ Err(Reject)
+}
+
+fn c_string(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(input) = input.parse("c\"") {
+ cooked_c_string(input)
+ } else if let Ok(input) = input.parse("cr") {
+ raw_c_string(input)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> {
+ let (input, delimiter) = delimiter_of_raw_string(input)?;
+ let mut bytes = input.bytes().enumerate();
+ while let Some((i, byte)) = bytes.next() {
+ match byte {
+ b'"' if input.rest[i + 1..].starts_with(delimiter) => {
+ let rest = input.advance(i + 1 + delimiter.len());
+ return Ok(literal_suffix(rest));
+ }
+ b'\r' => match bytes.next() {
+ Some((_, b'\n')) => {}
+ _ => break,
+ },
+ b'\0' => break,
+ _ => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.char_indices();
+
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ let input = input.advance(i + 1);
+ return Ok(literal_suffix(input));
+ }
+ '\r' => match chars.next() {
+ Some((_, '\n')) => {}
+ _ => break,
+ },
+ '\\' => match chars.next() {
+ Some((_, 'x')) => {
+ backslash_x_nonzero(&mut chars)?;
+ }
+ Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {}
+ Some((_, 'u')) => {
+ if backslash_u(&mut chars)? == '\0' {
+ break;
+ }
+ }
+ Some((newline, ch @ ('\n' | '\r'))) => {
+ input = input.advance(newline + 1);
+ trailing_backslash(&mut input, ch as u8)?;
+ chars = input.char_indices();
+ }
+ _ => break,
+ },
+ '\0' => break,
+ _ch => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn byte(input: Cursor) -> Result<Cursor, Reject> {
+ let input = input.parse("b'")?;
+ let mut bytes = input.bytes().enumerate();
+ let ok = match bytes.next().map(|(_, b)| b) {
+ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+ Some(b'x') => backslash_x_byte(&mut bytes).is_ok(),
+ Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true,
+ _ => false,
+ },
+ b => b.is_some(),
+ };
+ if !ok {
+ return Err(Reject);
+ }
+ let (offset, _) = bytes.next().ok_or(Reject)?;
+ if !input.chars().as_str().is_char_boundary(offset) {
+ return Err(Reject);
+ }
+ let input = input.advance(offset).parse("'")?;
+ Ok(literal_suffix(input))
+}
+
+fn character(input: Cursor) -> Result<Cursor, Reject> {
+ let input = input.parse("'")?;
+ let mut chars = input.char_indices();
+ let ok = match chars.next().map(|(_, ch)| ch) {
+ Some('\\') => match chars.next().map(|(_, ch)| ch) {
+ Some('x') => backslash_x_char(&mut chars).is_ok(),
+ Some('u') => backslash_u(&mut chars).is_ok(),
+ Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true,
+ _ => false,
+ },
+ ch => ch.is_some(),
+ };
+ if !ok {
+ return Err(Reject);
+ }
+ let (idx, _) = chars.next().ok_or(Reject)?;
+ let input = input.advance(idx).parse("'")?;
+ Ok(literal_suffix(input))
+}
+
+macro_rules! next_ch {
+ ($chars:ident @ $pat:pat) => {
+ match $chars.next() {
+ Some((_, ch)) => match ch {
+ $pat => ch,
+ _ => return Err(Reject),
+ },
+ None => return Err(Reject),
+ }
+ };
+}
+
+fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject>
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '0'..='7');
+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+ Ok(())
+}
+
+fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject>
+where
+ I: Iterator<Item = (usize, u8)>,
+{
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+ Ok(())
+}
+
+fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject>
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+ let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+ if first == '0' && second == '0' {
+ Err(Reject)
+ } else {
+ Ok(())
+ }
+}
+
+fn backslash_u<I>(chars: &mut I) -> Result<char, Reject>
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '{');
+ let mut value = 0;
+ let mut len = 0;
+ for (_, ch) in chars {
+ let digit = match ch {
+ '0'..='9' => ch as u8 - b'0',
+ 'a'..='f' => 10 + ch as u8 - b'a',
+ 'A'..='F' => 10 + ch as u8 - b'A',
+ '_' if len > 0 => continue,
+ '}' if len > 0 => return char::from_u32(value).ok_or(Reject),
+ _ => break,
+ };
+ if len == 6 {
+ break;
+ }
+ value *= 0x10;
+ value += u32::from(digit);
+ len += 1;
+ }
+ Err(Reject)
+}
+
+fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> {
+ let mut whitespace = input.bytes().enumerate();
+ loop {
+ if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b !=
b'\n') {
+ return Err(Reject);
+ }
+ match whitespace.next() {
+ Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => {
+ last = b;
+ }
+ Some((offset, _)) => {
+ *input = input.advance(offset);
+ return Ok(());
+ }
+ None => return Err(Reject),
+ }
+ }
+}
+
+fn float(input: Cursor) -> Result<Cursor, Reject> {
+ let mut rest = float_digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = ident_not_raw(rest)?.0;
+ }
+ }
+ word_break(rest)
+}
+
+fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.chars().peekable();
+ match chars.next() {
+ Some(ch) if '0' <= ch && ch <= '9' => {}
+ _ => return Err(Reject),
+ }
+
+ let mut len = 1;
+ let mut has_dot = false;
+ let mut has_exp = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '0'..='9' | '_' => {
+ chars.next();
+ len += 1;
+ }
+ '.' => {
+ if has_dot {
+ break;
+ }
+ chars.next();
+ if chars
+ .peek()
+ .map_or(false, |&ch| ch == '.' || is_ident_start(ch))
+ {
+ return Err(Reject);
+ }
+ len += 1;
+ has_dot = true;
+ }
+ 'e' | 'E' => {
+ chars.next();
+ len += 1;
+ has_exp = true;
+ break;
+ }
+ _ => break,
+ }
+ }
+
+ if !(has_dot || has_exp) {
+ return Err(Reject);
+ }
+
+ if has_exp {
+ let token_before_exp = if has_dot {
+ Ok(input.advance(len - 1))
+ } else {
+ Err(Reject)
+ };
+ let mut has_sign = false;
+ let mut has_exp_value = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '+' | '-' => {
+ if has_exp_value {
+ break;
+ }
+ if has_sign {
+ return token_before_exp;
+ }
+ chars.next();
+ len += 1;
+ has_sign = true;
+ }
+ '0'..='9' => {
+ chars.next();
+ len += 1;
+ has_exp_value = true;
+ }
+ '_' => {
+ chars.next();
+ len += 1;
+ }
+ _ => break,
+ }
+ }
+ if !has_exp_value {
+ return token_before_exp;
+ }
+ }
+
+ Ok(input.advance(len))
+}
+
+fn int(input: Cursor) -> Result<Cursor, Reject> {
+ let mut rest = digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = ident_not_raw(rest)?.0;
+ }
+ }
+ word_break(rest)
+}
+
+fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
+ let base = if input.starts_with("0x") {
+ input = input.advance(2);
+ 16
+ } else if input.starts_with("0o") {
+ input = input.advance(2);
+ 8
+ } else if input.starts_with("0b") {
+ input = input.advance(2);
+ 2
+ } else {
+ 10
+ };
+
+ let mut len = 0;
+ let mut empty = true;
+ for b in input.bytes() {
+ match b {
+ b'0'..=b'9' => {
+ let digit = (b - b'0') as u64;
+ if digit >= base {
+ return Err(Reject);
+ }
+ }
+ b'a'..=b'f' => {
+ let digit = 10 + (b - b'a') as u64;
+ if digit >= base {
+ break;
+ }
+ }
+ b'A'..=b'F' => {
+ let digit = 10 + (b - b'A') as u64;
+ if digit >= base {
+ break;
+ }
+ }
+ b'_' => {
+ if empty && base == 10 {
+ return Err(Reject);
+ }
+ len += 1;
+ continue;
+ }
+ _ => break,
+ };
+ len += 1;
+ empty = false;
+ }
+ if empty {
+ Err(Reject)
+ } else {
+ Ok(input.advance(len))
+ }
+}
+
+fn punct(input: Cursor) -> PResult<Punct> {
+ let (rest, ch) = punct_char(input)?;
+ if ch == '\'' {
+ if ident_any(rest)?.0.starts_with_char('\'') {
+ Err(Reject)
+ } else {
+ Ok((rest, Punct::new('\'', Spacing::Joint)))
+ }
+ } else {
+ let kind = match punct_char(rest) {
+ Ok(_) => Spacing::Joint,
+ Err(Reject) => Spacing::Alone,
+ };
+ Ok((rest, Punct::new(ch, kind)))
+ }
+}
+
+fn punct_char(input: Cursor) -> PResult<char> {
+ if input.starts_with("//") || input.starts_with("/*") {
+ // Do not accept `/` of a comment as a punct.
+ return Err(Reject);
+ }
+
+ let mut chars = input.chars();
+ let first = match chars.next() {
+ Some(ch) => ch,
+ None => {
+ return Err(Reject);
+ }
+ };
+ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+ if recognized.contains(first) {
+ Ok((input.advance(first.len_utf8()), first))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) ->
PResult<'a, ()> {
+ #[cfg(span_locations)]
+ let lo = input.off;
+ let (rest, (comment, inner)) = doc_comment_contents(input)?;
+ let fallback_span = Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: rest.off,
+ };
+ let span = crate::Span::_new_fallback(fallback_span);
+
+ let mut scan_for_bare_cr = comment;
+ while let Some(cr) = scan_for_bare_cr.find('\r') {
+ let rest = &scan_for_bare_cr[cr + 1..];
+ if !rest.starts_with('\n') {
+ return Err(Reject);
+ }
+ scan_for_bare_cr = rest;
+ }
+
+ let mut pound = Punct::new('#', Spacing::Alone);
+ pound.set_span(span);
+ trees.push_token_from_parser(TokenTree::Punct(pound));
+
+ if inner {
+ let mut bang = Punct::new('!', Spacing::Alone);
+ bang.set_span(span);
+ trees.push_token_from_parser(TokenTree::Punct(bang));
+ }
+
+ let doc_ident = crate::Ident::_new(crate::imp::Ident::new_unchecked("doc",
fallback_span));
+ let mut equal = Punct::new('=', Spacing::Alone);
+ equal.set_span(span);
+ let mut literal = crate::Literal::string(comment);
+ literal.set_span(span);
+ let mut bracketed = TokenStreamBuilder::with_capacity(3);
+ bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
+ bracketed.push_token_from_parser(TokenTree::Punct(equal));
+ bracketed.push_token_from_parser(TokenTree::Literal(literal));
+ let group = Group::new(Delimiter::Bracket, bracketed.build());
+ let mut group = crate::Group::_new_fallback(group);
+ group.set_span(span);
+ trees.push_token_from_parser(TokenTree::Group(group));
+
+ Ok((rest, ()))
+}
+
+fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
+ if input.starts_with("//!") {
+ let input = input.advance(3);
+ let (input, s) = take_until_newline_or_eof(input);
+ Ok((input, (s, true)))
+ } else if input.starts_with("/*!") {
+ let (input, s) = block_comment(input)?;
+ Ok((input, (&s[3..s.len() - 2], true)))
+ } else if input.starts_with("///") {
+ let input = input.advance(3);
+ if input.starts_with_char('/') {
+ return Err(Reject);
+ }
+ let (input, s) = take_until_newline_or_eof(input);
+ Ok((input, (s, false)))
+ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
+ let (input, s) = block_comment(input)?;
+ Ok((input, (&s[3..s.len() - 2], false)))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
+ let chars = input.char_indices();
+
+ for (i, ch) in chars {
+ if ch == '\n' {
+ return (input.advance(i), &input.rest[..i]);
+ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
+ return (input.advance(i + 1), &input.rest[..i]);
+ }
+ }
+
+ (input.advance(input.len()), input.rest)
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/rcvec.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/rcvec.rs
new file mode 100644
index 0000000000..37955afb11
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/rcvec.rs
@@ -0,0 +1,145 @@
+use alloc::rc::Rc;
+use alloc::vec;
+use core::mem;
+use core::panic::RefUnwindSafe;
+use core::slice;
+
+pub(crate) struct RcVec<T> {
+ inner: Rc<Vec<T>>,
+}
+
+pub(crate) struct RcVecBuilder<T> {
+ inner: Vec<T>,
+}
+
+pub(crate) struct RcVecMut<'a, T> {
+ inner: &'a mut Vec<T>,
+}
+
+#[derive(Clone)]
+pub(crate) struct RcVecIntoIter<T> {
+ inner: vec::IntoIter<T>,
+}
+
+impl<T> RcVec<T> {
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+
+ pub fn iter(&self) -> slice::Iter<T> {
+ self.inner.iter()
+ }
+
+ pub fn make_mut(&mut self) -> RcVecMut<T>
+ where
+ T: Clone,
+ {
+ RcVecMut {
+ inner: Rc::make_mut(&mut self.inner),
+ }
+ }
+
+ pub fn get_mut(&mut self) -> Option<RcVecMut<T>> {
+ let inner = Rc::get_mut(&mut self.inner)?;
+ Some(RcVecMut { inner })
+ }
+
+ pub fn make_owned(mut self) -> RcVecBuilder<T>
+ where
+ T: Clone,
+ {
+ let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
+ mem::take(owned)
+ } else {
+ Vec::clone(&self.inner)
+ };
+ RcVecBuilder { inner: vec }
+ }
+}
+
+impl<T> RcVecBuilder<T> {
+ pub fn new() -> Self {
+ RcVecBuilder { inner: Vec::new() }
+ }
+
+ pub fn with_capacity(cap: usize) -> Self {
+ RcVecBuilder {
+ inner: Vec::with_capacity(cap),
+ }
+ }
+
+ pub fn push(&mut self, element: T) {
+ self.inner.push(element);
+ }
+
+ pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
+ self.inner.extend(iter);
+ }
+
+ pub fn as_mut(&mut self) -> RcVecMut<T> {
+ RcVecMut {
+ inner: &mut self.inner,
+ }
+ }
+
+ pub fn build(self) -> RcVec<T> {
+ RcVec {
+ inner: Rc::new(self.inner),
+ }
+ }
+}
+
+impl<'a, T> RcVecMut<'a, T> {
+ pub fn push(&mut self, element: T) {
+ self.inner.push(element);
+ }
+
+ pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
+ self.inner.extend(iter);
+ }
+
+ pub fn pop(&mut self) -> Option<T> {
+ self.inner.pop()
+ }
+
+ pub fn as_mut(&mut self) -> RcVecMut<T> {
+ RcVecMut { inner: self.inner }
+ }
+}
+
+impl<T> Clone for RcVec<T> {
+ fn clone(&self) -> Self {
+ RcVec {
+ inner: Rc::clone(&self.inner),
+ }
+ }
+}
+
+impl<T> IntoIterator for RcVecBuilder<T> {
+ type Item = T;
+ type IntoIter = RcVecIntoIter<T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ RcVecIntoIter {
+ inner: self.inner.into_iter(),
+ }
+ }
+}
+
+impl<T> Iterator for RcVecIntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/src/wrapper.rs
b/rust/hw/char/pl011/vendor/proc-macro2/src/wrapper.rs
new file mode 100644
index 0000000000..87e348dbb3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/src/wrapper.rs
@@ -0,0 +1,993 @@
+use crate::detection::inside_proc_macro;
+#[cfg(span_locations)]
+use crate::location::LineColumn;
+use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+use core::fmt::{self, Debug, Display};
+#[cfg(span_locations)]
+use core::ops::Range;
+use core::ops::RangeBounds;
+use core::str::FromStr;
+use std::ffi::CStr;
+use std::panic;
+#[cfg(super_unstable)]
+use std::path::PathBuf;
+
+#[derive(Clone)]
+pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+}
+
+// Work around https://github.com/rust-lang/rust/issues/65080.
+// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+// late as possible to batch together consecutive uses of the Extend impl.
+#[derive(Clone)]
+pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+}
+
+pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+
+ // Rustc was supposed to return a LexError, but it panicked instead.
+ // https://github.com/rust-lang/rust/issues/58736
+ CompilerPanic,
+}
+
+#[cold]
+fn mismatch(line: u32) -> ! {
+ #[cfg(procmacro2_backtrace)]
+ {
+ let backtrace = std::backtrace::Backtrace::force_capture();
+ panic!("compiler/fallback mismatch #{}\n\n{}", line, backtrace)
+ }
+ #[cfg(not(procmacro2_backtrace))]
+ {
+ panic!("compiler/fallback mismatch #{}", line)
+ }
+}
+
+impl DeferredTokenStream {
+ fn new(stream: proc_macro::TokenStream) -> Self {
+ DeferredTokenStream {
+ stream,
+ extra: Vec::new(),
+ }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.stream.is_empty() && self.extra.is_empty()
+ }
+
+ fn evaluate_now(&mut self) {
+ // If-check provides a fast short circuit for the common case of
`extra`
+ // being empty, which saves a round trip over the proc macro bridge.
+ // Improves macro expansion time in winrt by 6% in debug mode.
+ if !self.extra.is_empty() {
+ self.stream.extend(self.extra.drain(..));
+ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+ self.evaluate_now();
+ self.stream
+ }
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ if inside_proc_macro() {
+
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ match self {
+ TokenStream::Compiler(tts) => tts.is_empty(),
+ TokenStream::Fallback(tts) => tts.is_empty(),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::TokenStream {
+ match self {
+ TokenStream::Compiler(s) => s.into_token_stream(),
+ TokenStream::Fallback(_) => mismatch(line!()),
+ }
+ }
+
+ fn unwrap_stable(self) -> fallback::TokenStream {
+ match self {
+ TokenStream::Compiler(_) => mismatch(line!()),
+ TokenStream::Fallback(s) => s,
+ }
+ }
+}
+
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+ }
+ }
+}
+
+// Work around https://github.com/rust-lang/rust/issues/58736.
+fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
+ let result = panic::catch_unwind(||
src.parse().map_err(LexError::Compiler));
+ result.unwrap_or_else(|_| Err(LexError::CompilerPanic))
+}
+
+impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ TokenStream::Compiler(tts) =>
Display::fmt(&tts.clone().into_token_stream(), f),
+ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+}
+
+impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
+ TokenStream::Compiler(DeferredTokenStream::new(inner))
+ }
+}
+
+impl From<TokenStream> for proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
+ match inner {
+ TokenStream::Compiler(inner) => inner.into_token_stream(),
+ TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
+ }
+ }
+}
+
+impl From<fallback::TokenStream> for TokenStream {
+ fn from(inner: fallback::TokenStream) -> Self {
+ TokenStream::Fallback(inner)
+ }
+}
+
+// Assumes inside_proc_macro().
+fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Punct(tt) => {
+ let spacing = match tt.spacing() {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
+ punct.set_span(tt.span().inner.unwrap_nightly());
+ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+ }
+}
+
+impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> Self {
+ if inside_proc_macro() {
+
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+ }
+ }
+}
+
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+ } else {
+ TokenStream::Fallback(trees.into_iter().collect())
+ }
+ }
+}
+
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+ Some(TokenStream::Compiler(mut first)) => {
+ first.evaluate_now();
+ first.stream.extend(streams.map(|s| match s {
+ TokenStream::Compiler(s) => s.into_token_stream(),
+ TokenStream::Fallback(_) => mismatch(line!()),
+ }));
+ TokenStream::Compiler(first)
+ }
+ Some(TokenStream::Fallback(mut first)) => {
+ first.extend(streams.map(|s| match s {
+ TokenStream::Fallback(s) => s,
+ TokenStream::Compiler(_) => mismatch(line!()),
+ }));
+ TokenStream::Fallback(first)
+ }
+ None => TokenStream::new(),
+ }
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+ for token in stream {
+ tts.extra.push(into_compiler_token(token));
+ }
+ }
+ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+
.extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+
tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+}
+
+impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ TokenStream::Compiler(tts) =>
Debug::fmt(&tts.clone().into_token_stream(), f),
+ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+}
+
+impl LexError {
+ pub(crate) fn span(&self) -> Span {
+ match self {
+ LexError::Compiler(_) | LexError::CompilerPanic =>
Span::call_site(),
+ LexError::Fallback(e) => Span::Fallback(e.span()),
+ }
+ }
+}
+
+impl From<proc_macro::LexError> for LexError {
+ fn from(e: proc_macro::LexError) -> Self {
+ LexError::Compiler(e)
+ }
+}
+
+impl From<fallback::LexError> for LexError {
+ fn from(e: fallback::LexError) -> Self {
+ LexError::Fallback(e)
+ }
+}
+
+impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ LexError::Compiler(e) => Debug::fmt(e, f),
+ LexError::Fallback(e) => Debug::fmt(e, f),
+ LexError::CompilerPanic => {
+ let fallback = fallback::LexError::call_site();
+ Debug::fmt(&fallback, f)
+ }
+ }
+ }
+}
+
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ LexError::Compiler(e) => Display::fmt(e, f),
+ LexError::Fallback(e) => Display::fmt(e, f),
+ LexError::CompilerPanic => {
+ let fallback = fallback::LexError::call_site();
+ Display::fmt(&fallback, f)
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+}
+
+impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+ fn into_iter(self) -> TokenTreeIter {
+ match self {
+ TokenStream::Compiler(tts) => {
+ TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
+ }
+ TokenStream::Fallback(tts) =>
TokenTreeIter::Fallback(tts.into_iter()),
+ }
+ }
+}
+
+impl Iterator for TokenTreeIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ let token = match self {
+ TokenTreeIter::Compiler(iter) => iter.next()?,
+ TokenTreeIter::Fallback(iter) => return iter.next(),
+ };
+ Some(match token {
+ proc_macro::TokenTree::Group(tt) =>
crate::Group::_new(Group::Compiler(tt)).into(),
+ proc_macro::TokenTree::Punct(tt) => {
+ let spacing = match tt.spacing() {
+ proc_macro::Spacing::Joint => Spacing::Joint,
+ proc_macro::Spacing::Alone => Spacing::Alone,
+ };
+ let mut o = Punct::new(tt.as_char(), spacing);
+ o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
+ o.into()
+ }
+ proc_macro::TokenTree::Ident(s) =>
crate::Ident::_new(Ident::Compiler(s)).into(),
+ proc_macro::TokenTree::Literal(l) =>
crate::Literal::_new(Literal::Compiler(l)).into(),
+ })
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match self {
+ TokenTreeIter::Compiler(tts) => tts.size_hint(),
+ TokenTreeIter::Fallback(tts) => tts.size_hint(),
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq)]
+#[cfg(super_unstable)]
+pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+}
+
+#[cfg(super_unstable)]
+impl SourceFile {
+ fn nightly(sf: proc_macro::SourceFile) -> Self {
+ SourceFile::Compiler(sf)
+ }
+
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> PathBuf {
+ match self {
+ SourceFile::Compiler(a) => a.path(),
+ SourceFile::Fallback(a) => a.path(),
+ }
+ }
+
+ pub fn is_real(&self) -> bool {
+ match self {
+ SourceFile::Compiler(a) => a.is_real(),
+ SourceFile::Fallback(a) => a.is_real(),
+ }
+ }
+}
+
+#[cfg(super_unstable)]
+impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ SourceFile::Compiler(a) => Debug::fmt(a, f),
+ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+}
+
+impl Span {
+ pub fn call_site() -> Self {
+ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
+ pub fn mixed_site() -> Self {
+ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::mixed_site())
+ } else {
+ Span::Fallback(fallback::Span::mixed_site())
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Self {
+ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
+ (Span::Compiler(a), Span::Compiler(b)) =>
Span::Compiler(a.resolved_at(b)),
+ (Span::Fallback(a), Span::Fallback(b)) =>
Span::Fallback(a.resolved_at(b)),
+ (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
+ (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
+ }
+ }
+
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
+ (Span::Compiler(a), Span::Compiler(b)) =>
Span::Compiler(a.located_at(b)),
+ (Span::Fallback(a), Span::Fallback(b)) =>
Span::Fallback(a.located_at(b)),
+ (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
+ (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
+ }
+ }
+
+ pub fn unwrap(self) -> proc_macro::Span {
+ match self {
+ Span::Compiler(s) => s,
+ Span::Fallback(_) => panic!("proc_macro::Span is only available in
procedural macros"),
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn source_file(&self) -> SourceFile {
+ match self {
+ Span::Compiler(s) => SourceFile::nightly(s.source_file()),
+ Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
+ }
+ }
+
+ #[cfg(span_locations)]
+ pub fn byte_range(&self) -> Range<usize> {
+ match self {
+ #[cfg(proc_macro_span)]
+ Span::Compiler(s) => s.byte_range(),
+ #[cfg(not(proc_macro_span))]
+ Span::Compiler(_) => 0..0,
+ Span::Fallback(s) => s.byte_range(),
+ }
+ }
+
+ #[cfg(span_locations)]
+ pub fn start(&self) -> LineColumn {
+ match self {
+ Span::Compiler(_) => LineColumn { line: 0, column: 0 },
+ Span::Fallback(s) => s.start(),
+ }
+ }
+
+ #[cfg(span_locations)]
+ pub fn end(&self) -> LineColumn {
+ match self {
+ Span::Compiler(_) => LineColumn { line: 0, column: 0 },
+ Span::Fallback(s) => s.end(),
+ }
+ }
+
+ pub fn join(&self, other: Span) -> Option<Span> {
+ let ret = match (self, other) {
+ #[cfg(proc_macro_span)]
+ (Span::Compiler(a), Span::Compiler(b)) =>
Span::Compiler(a.join(b)?),
+ (Span::Fallback(a), Span::Fallback(b)) =>
Span::Fallback(a.join(b)?),
+ _ => return None,
+ };
+ Some(ret)
+ }
+
+ #[cfg(super_unstable)]
+ pub fn eq(&self, other: &Span) -> bool {
+ match (self, other) {
+ (Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
+ (Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
+ _ => false,
+ }
+ }
+
+ pub fn source_text(&self) -> Option<String> {
+ match self {
+ #[cfg(not(no_source_text))]
+ Span::Compiler(s) => s.source_text(),
+ #[cfg(no_source_text)]
+ Span::Compiler(_) => None,
+ Span::Fallback(s) => s.source_text(),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::Span {
+ match self {
+ Span::Compiler(s) => s,
+ Span::Fallback(_) => mismatch(line!()),
+ }
+ }
+}
+
+impl From<proc_macro::Span> for crate::Span {
+ fn from(proc_span: proc_macro::Span) -> Self {
+ crate::Span::_new(Span::Compiler(proc_span))
+ }
+}
+
+impl From<fallback::Span> for Span {
+ fn from(inner: fallback::Span) -> Self {
+ Span::Fallback(inner)
+ }
+}
+
+impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Span::Compiler(s) => Debug::fmt(s, f),
+ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+}
+
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct,
span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+ }
+ Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug,
s),
+ }
+}
+
+#[derive(Clone)]
+pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+}
+
+impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
+ match stream {
+ TokenStream::Compiler(tts) => {
+ let delimiter = match delimiter {
+ Delimiter::Parenthesis =>
proc_macro::Delimiter::Parenthesis,
+ Delimiter::Bracket => proc_macro::Delimiter::Bracket,
+ Delimiter::Brace => proc_macro::Delimiter::Brace,
+ Delimiter::None => proc_macro::Delimiter::None,
+ };
+ Group::Compiler(proc_macro::Group::new(delimiter,
tts.into_token_stream()))
+ }
+ TokenStream::Fallback(stream) => {
+ Group::Fallback(fallback::Group::new(delimiter, stream))
+ }
+ }
+ }
+
+ pub fn delimiter(&self) -> Delimiter {
+ match self {
+ Group::Compiler(g) => match g.delimiter() {
+ proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
+ proc_macro::Delimiter::Bracket => Delimiter::Bracket,
+ proc_macro::Delimiter::Brace => Delimiter::Brace,
+ proc_macro::Delimiter::None => Delimiter::None,
+ },
+ Group::Fallback(g) => g.delimiter(),
+ }
+ }
+
+ pub fn stream(&self) -> TokenStream {
+ match self {
+ Group::Compiler(g) =>
TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
+ Group::Fallback(g) => TokenStream::Fallback(g.stream()),
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+ Group::Compiler(g) => Span::Compiler(g.span()),
+ Group::Fallback(g) => Span::Fallback(g.span()),
+ }
+ }
+
+ pub fn span_open(&self) -> Span {
+ match self {
+ Group::Compiler(g) => Span::Compiler(g.span_open()),
+ Group::Fallback(g) => Span::Fallback(g.span_open()),
+ }
+ }
+
+ pub fn span_close(&self) -> Span {
+ match self {
+ Group::Compiler(g) => Span::Compiler(g.span_close()),
+ Group::Fallback(g) => Span::Fallback(g.span_close()),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match (self, span) {
+ (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
+ (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
+ (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
+ (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::Group {
+ match self {
+ Group::Compiler(g) => g,
+ Group::Fallback(_) => mismatch(line!()),
+ }
+ }
+}
+
+impl From<fallback::Group> for Group {
+ fn from(g: fallback::Group) -> Self {
+ Group::Fallback(g)
+ }
+}
+
+impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Group::Compiler(group) => Display::fmt(group, formatter),
+ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+}
+
+impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Group::Compiler(group) => Debug::fmt(group, formatter),
+ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+}
+
+#[derive(Clone)]
+pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+}
+
+impl Ident {
+ #[track_caller]
+ pub fn new_checked(string: &str, span: Span) -> Self {
+ match span {
+ Span::Compiler(s) =>
Ident::Compiler(proc_macro::Ident::new(string, s)),
+ Span::Fallback(s) =>
Ident::Fallback(fallback::Ident::new_checked(string, s)),
+ }
+ }
+
+ pub fn new_unchecked(string: &str, span: fallback::Span) -> Self {
+ Ident::Fallback(fallback::Ident::new_unchecked(string, span))
+ }
+
+ #[track_caller]
+ pub fn new_raw_checked(string: &str, span: Span) -> Self {
+ match span {
+ Span::Compiler(s) =>
Ident::Compiler(proc_macro::Ident::new_raw(string, s)),
+ Span::Fallback(s) =>
Ident::Fallback(fallback::Ident::new_raw_checked(string, s)),
+ }
+ }
+
+ pub fn new_raw_unchecked(string: &str, span: fallback::Span) -> Self {
+ Ident::Fallback(fallback::Ident::new_raw_unchecked(string, span))
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+ Ident::Compiler(t) => Span::Compiler(t.span()),
+ Ident::Fallback(t) => Span::Fallback(t.span()),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match (self, span) {
+ (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
+ (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
+ (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
+ (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::Ident {
+ match self {
+ Ident::Compiler(s) => s,
+ Ident::Fallback(_) => mismatch(line!()),
+ }
+ }
+}
+
+impl PartialEq for Ident {
+ fn eq(&self, other: &Ident) -> bool {
+ match (self, other) {
+ (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() ==
o.to_string(),
+ (Ident::Fallback(t), Ident::Fallback(o)) => t == o,
+ (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()),
+ (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()),
+ }
+ }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+ T: ?Sized + AsRef<str>,
+{
+ fn eq(&self, other: &T) -> bool {
+ let other = other.as_ref();
+ match self {
+ Ident::Compiler(t) => t.to_string() == other,
+ Ident::Fallback(t) => t == other,
+ }
+ }
+}
+
+impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Ident::Compiler(t) => Display::fmt(t, f),
+ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+}
+
+impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Ident::Compiler(t) => Debug::fmt(t, f),
+ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+}
+
+#[derive(Clone)]
+pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+}
+
+macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+}
+
+impl Literal {
+ pub unsafe fn from_str_unchecked(repr: &str) -> Self {
+ if inside_proc_macro() {
+
Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal"))
+ } else {
+ Literal::Fallback(unsafe {
fallback::Literal::from_str_unchecked(repr) })
+ }
+ }
+
+ suffixed_numbers! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+
+ f32_suffixed => f32,
+ f64_suffixed => f64,
+ }
+
+ unsuffixed_integers! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+ }
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+ }
+ }
+
+ pub fn string(string: &str) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(string))
+ } else {
+ Literal::Fallback(fallback::Literal::string(string))
+ }
+ }
+
+ pub fn character(ch: char) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(ch))
+ } else {
+ Literal::Fallback(fallback::Literal::character(ch))
+ }
+ }
+
+ pub fn byte_character(byte: u8) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler({
+ #[cfg(not(no_literal_byte_character))]
+ {
+ proc_macro::Literal::byte_character(byte)
+ }
+
+ #[cfg(no_literal_byte_character)]
+ {
+ let fallback = fallback::Literal::byte_character(byte);
+ fallback.repr.parse::<proc_macro::Literal>().unwrap()
+ }
+ })
+ } else {
+ Literal::Fallback(fallback::Literal::byte_character(byte))
+ }
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+ }
+ }
+
+ pub fn c_string(string: &CStr) -> Literal {
+ if inside_proc_macro() {
+ Literal::Compiler({
+ #[cfg(not(no_literal_c_string))]
+ {
+ proc_macro::Literal::c_string(string)
+ }
+
+ #[cfg(no_literal_c_string)]
+ {
+ let fallback = fallback::Literal::c_string(string);
+ fallback.repr.parse::<proc_macro::Literal>().unwrap()
+ }
+ })
+ } else {
+ Literal::Fallback(fallback::Literal::c_string(string))
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+ Literal::Compiler(lit) => Span::Compiler(lit.span()),
+ Literal::Fallback(lit) => Span::Fallback(lit.span()),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match (self, span) {
+ (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
+ (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
+ (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
+ (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
+ }
+ }
+
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ match self {
+ #[cfg(proc_macro_span)]
+ Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
+ #[cfg(not(proc_macro_span))]
+ Literal::Compiler(_lit) => None,
+ Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::Literal {
+ match self {
+ Literal::Compiler(s) => s,
+ Literal::Fallback(_) => mismatch(line!()),
+ }
+ }
+}
+
+impl From<fallback::Literal> for Literal {
+ fn from(s: fallback::Literal) -> Self {
+ Literal::Fallback(s)
+ }
+}
+
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(repr: &str) -> Result<Self, Self::Err> {
+ if inside_proc_macro() {
+ let literal = proc_macro::Literal::from_str(repr)?;
+ Ok(Literal::Compiler(literal))
+ } else {
+ let literal = fallback::Literal::from_str(repr)?;
+ Ok(Literal::Fallback(literal))
+ }
+ }
+}
+
+impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Literal::Compiler(t) => Display::fmt(t, f),
+ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+}
+
+impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Literal::Compiler(t) => Debug::fmt(t, f),
+ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+}
+
+#[cfg(span_locations)]
+pub(crate) fn invalidate_current_thread_spans() {
+ if inside_proc_macro() {
+ panic!(
+ "proc_macro2::extra::invalidate_current_thread_spans is not
available in procedural macros"
+ );
+ } else {
+ crate::fallback::invalidate_current_thread_spans();
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/comments.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/comments.rs
new file mode 100644
index 0000000000..4f7236dea9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/comments.rs
@@ -0,0 +1,105 @@
+#![allow(clippy::assertions_on_result_states)]
+
+use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
+
+// #[doc = "..."] -> "..."
+fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal {
+ lit_of_doc_comment(tokens, false)
+}
+
+// #![doc = "..."] -> "..."
+fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal {
+ lit_of_doc_comment(tokens, true)
+}
+
+fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal {
+ let mut iter = tokens.clone().into_iter();
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '#');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ if inner {
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '!');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ }
+ iter = match iter.next().unwrap() {
+ TokenTree::Group(group) => {
+ assert_eq!(group.delimiter(), Delimiter::Bracket);
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
+ group.stream().into_iter()
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ };
+ match iter.next().unwrap() {
+ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '=');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Literal(literal) => {
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
+ literal
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+}
+
+#[test]
+fn closed_immediately() {
+ let stream = "/**/".parse::<TokenStream>().unwrap();
+ let tokens = stream.into_iter().collect::<Vec<_>>();
+ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+}
+
+#[test]
+fn incomplete() {
+ assert!("/*/".parse::<TokenStream>().is_err());
+}
+
+#[test]
+fn lit() {
+ let stream = "/// doc".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\" doc\"");
+
+ let stream = "//! doc".parse::<TokenStream>().unwrap();
+ let lit = lit_of_inner_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\" doc\"");
+
+ let stream = "/** doc */".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\" doc \"");
+
+ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
+ let lit = lit_of_inner_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\" doc \"");
+}
+
+#[test]
+fn carriage_return() {
+ let stream = "///\r\n".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\"\"");
+
+ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(&stream);
+ assert_eq!(lit.to_string(), "\"\\r\\n\"");
+
+ "///\r".parse::<TokenStream>().unwrap_err();
+ "///\r \n".parse::<TokenStream>().unwrap_err();
+ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/features.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/features.rs
new file mode 100644
index 0000000000..073f6e60fb
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/features.rs
@@ -0,0 +1,8 @@
+#[test]
+#[ignore]
+fn make_sure_no_proc_macro() {
+ assert!(
+ !cfg!(feature = "proc-macro"),
+ "still compiled with proc_macro?"
+ );
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/marker.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/marker.rs
new file mode 100644
index 0000000000..99f64c068f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/marker.rs
@@ -0,0 +1,100 @@
+#![allow(clippy::extra_unused_type_parameters)]
+
+use proc_macro2::{
+ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span,
TokenStream, TokenTree,
+};
+
+macro_rules! assert_impl {
+ ($ty:ident is $($marker:ident) and +) => {
+ #[test]
+ #[allow(non_snake_case)]
+ fn $ty() {
+ fn assert_implemented<T: $($marker +)+>() {}
+ assert_implemented::<$ty>();
+ }
+ };
+
+ ($ty:ident is not $($marker:ident) or +) => {
+ #[test]
+ #[allow(non_snake_case)]
+ fn $ty() {
+ $(
+ {
+ // Implemented for types that implement $marker.
+ #[allow(dead_code)]
+ trait IsNotImplemented {
+ fn assert_not_implemented() {}
+ }
+ impl<T: $marker> IsNotImplemented for T {}
+
+ // Implemented for the type being tested.
+ trait IsImplemented {
+ fn assert_not_implemented() {}
+ }
+ impl IsImplemented for $ty {}
+
+ // If $ty does not implement $marker, there is no ambiguity
+ // in the following trait method call.
+ <$ty>::assert_not_implemented();
+ }
+ )+
+ }
+ };
+}
+
+assert_impl!(Delimiter is Send and Sync);
+assert_impl!(Spacing is Send and Sync);
+
+assert_impl!(Group is not Send or Sync);
+assert_impl!(Ident is not Send or Sync);
+assert_impl!(LexError is not Send or Sync);
+assert_impl!(Literal is not Send or Sync);
+assert_impl!(Punct is not Send or Sync);
+assert_impl!(Span is not Send or Sync);
+assert_impl!(TokenStream is not Send or Sync);
+assert_impl!(TokenTree is not Send or Sync);
+
+#[cfg(procmacro2_semver_exempt)]
+mod semver_exempt {
+ use proc_macro2::{LineColumn, SourceFile};
+
+ assert_impl!(LineColumn is Send and Sync);
+
+ assert_impl!(SourceFile is not Send or Sync);
+}
+
+mod unwind_safe {
+ use proc_macro2::{
+ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span,
TokenStream, TokenTree,
+ };
+ #[cfg(procmacro2_semver_exempt)]
+ use proc_macro2::{LineColumn, SourceFile};
+ use std::panic::{RefUnwindSafe, UnwindSafe};
+
+ macro_rules! assert_unwind_safe {
+ ($($types:ident)*) => {
+ $(
+ assert_impl!($types is UnwindSafe and RefUnwindSafe);
+ )*
+ };
+ }
+
+ assert_unwind_safe! {
+ Delimiter
+ Group
+ Ident
+ LexError
+ Literal
+ Punct
+ Spacing
+ Span
+ TokenStream
+ TokenTree
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ assert_unwind_safe! {
+ LineColumn
+ SourceFile
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/test.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/test.rs
new file mode 100644
index 0000000000..0d7c88d3df
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/test.rs
@@ -0,0 +1,905 @@
+#![allow(
+ clippy::assertions_on_result_states,
+ clippy::items_after_statements,
+ clippy::needless_pass_by_value,
+ clippy::needless_raw_string_hashes,
+ clippy::non_ascii_literal,
+ clippy::octal_escapes
+)]
+
+use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream,
TokenTree};
+use std::ffi::CStr;
+use std::iter;
+use std::str::{self, FromStr};
+
+#[test]
+fn idents() {
+ assert_eq!(
+ Ident::new("String", Span::call_site()).to_string(),
+ "String"
+ );
+ assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
+ assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
+}
+
+#[test]
+fn raw_idents() {
+ assert_eq!(
+ Ident::new_raw("String", Span::call_site()).to_string(),
+ "r#String"
+ );
+ assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
+}
+
+#[test]
+#[should_panic(expected = "`r#_` cannot be a raw identifier")]
+fn ident_raw_underscore() {
+ Ident::new_raw("_", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "`r#super` cannot be a raw identifier")]
+fn ident_raw_reserved() {
+ Ident::new_raw("super", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "Ident is not allowed to be empty; use
Option<Ident>")]
+fn ident_empty() {
+ Ident::new("", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
+fn ident_number() {
+ Ident::new("255", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "\"a#\" is not a valid Ident")]
+fn ident_invalid() {
+ Ident::new("a#", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "not a valid Ident")]
+fn raw_ident_empty() {
+ Ident::new("r#", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "not a valid Ident")]
+fn raw_ident_number() {
+ Ident::new("r#255", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
+fn raw_ident_invalid() {
+ Ident::new("r#a#", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "not a valid Ident")]
+fn lifetime_empty() {
+ Ident::new("'", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "not a valid Ident")]
+fn lifetime_number() {
+ Ident::new("'255", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = r#""'a#" is not a valid Ident"#)]
+fn lifetime_invalid() {
+ Ident::new("'a#", Span::call_site());
+}
+
+#[test]
+fn literal_string() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected.trim());
+ }
+
+ assert(Literal::string(""), r#" "" "#);
+ assert(Literal::string("aA"), r#" "aA" "#);
+ assert(Literal::string("\t"), r#" "\t" "#);
+ assert(Literal::string("❤"), r#" "❤" "#);
+ assert(Literal::string("'"), r#" "'" "#);
+ assert(Literal::string("\""), r#" "\"" "#);
+ assert(Literal::string("\0"), r#" "\0" "#);
+ assert(Literal::string("\u{1}"), r#" "\u{1}" "#);
+ assert(
+ Literal::string("a\00b\07c\08d\0e\0"),
+ r#" "a\x000b\x007c\08d\0e\0" "#,
+ );
+
+ "\"\\\r\n x\"".parse::<TokenStream>().unwrap();
+ "\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
+}
+
+#[test]
+fn literal_raw_string() {
+ "r\"\r\n\"".parse::<TokenStream>().unwrap();
+
+ fn raw_string_literal_with_hashes(n: usize) -> String {
+ let mut literal = String::new();
+ literal.push('r');
+ literal.extend(iter::repeat('#').take(n));
+ literal.push('"');
+ literal.push('"');
+ literal.extend(iter::repeat('#').take(n));
+ literal
+ }
+
+ raw_string_literal_with_hashes(255)
+ .parse::<TokenStream>()
+ .unwrap();
+
+ // https://github.com/rust-lang/rust/pull/95251
+ raw_string_literal_with_hashes(256)
+ .parse::<TokenStream>()
+ .unwrap_err();
+}
+
+#[test]
+fn literal_byte_character() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected.trim());
+ }
+
+ assert(Literal::byte_character(b'a'), r#" b'a' "#);
+ assert(Literal::byte_character(b'\0'), r#" b'\0' "#);
+ assert(Literal::byte_character(b'\t'), r#" b'\t' "#);
+ assert(Literal::byte_character(b'\n'), r#" b'\n' "#);
+ assert(Literal::byte_character(b'\r'), r#" b'\r' "#);
+ assert(Literal::byte_character(b'\''), r#" b'\'' "#);
+ assert(Literal::byte_character(b'\\'), r#" b'\\' "#);
+ assert(Literal::byte_character(b'\x1f'), r#" b'\x1F' "#);
+ assert(Literal::byte_character(b'"'), r#" b'"' "#);
+}
+
+#[test]
+fn literal_byte_string() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected.trim());
+ }
+
+ assert(Literal::byte_string(b""), r#" b"" "#);
+ assert(Literal::byte_string(b"\0"), r#" b"\0" "#);
+ assert(Literal::byte_string(b"\t"), r#" b"\t" "#);
+ assert(Literal::byte_string(b"\n"), r#" b"\n" "#);
+ assert(Literal::byte_string(b"\r"), r#" b"\r" "#);
+ assert(Literal::byte_string(b"\""), r#" b"\"" "#);
+ assert(Literal::byte_string(b"\\"), r#" b"\\" "#);
+ assert(Literal::byte_string(b"\x1f"), r#" b"\x1F" "#);
+ assert(Literal::byte_string(b"'"), r#" b"'" "#);
+ assert(
+ Literal::byte_string(b"a\00b\07c\08d\0e\0"),
+ r#" b"a\x000b\x007c\08d\0e\0" "#,
+ );
+
+ "b\"\\\r\n x\"".parse::<TokenStream>().unwrap();
+ "b\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
+ "b\"\\\r\n \u{a0}x\"".parse::<TokenStream>().unwrap_err();
+ "br\"\u{a0}\"".parse::<TokenStream>().unwrap_err();
+}
+
+#[test]
+fn literal_c_string() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected.trim());
+ }
+
+ assert(Literal::c_string(<&CStr>::default()), r#" c"" "#);
+ assert(
+ Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()),
+ r#" c"aA" "#,
+ );
+ assert(
+ Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()),
+ r#" c"aA" "#,
+ );
+ assert(
+ Literal::c_string(CStr::from_bytes_with_nul(b"\t\0").unwrap()),
+ r#" c"\t" "#,
+ );
+ assert(
+
Literal::c_string(CStr::from_bytes_with_nul(b"\xE2\x9D\xA4\0").unwrap()),
+ r#" c"❤" "#,
+ );
+ assert(
+ Literal::c_string(CStr::from_bytes_with_nul(b"'\0").unwrap()),
+ r#" c"'" "#,
+ );
+ assert(
+ Literal::c_string(CStr::from_bytes_with_nul(b"\"\0").unwrap()),
+ r#" c"\"" "#,
+ );
+ assert(
+
Literal::c_string(CStr::from_bytes_with_nul(b"\x7F\xFF\xFE\xCC\xB3\0").unwrap()),
+ r#" c"\u{7f}\xFF\xFE\u{333}" "#,
+ );
+
+ let strings = r###"
+ c"hello\x80我叫\u{1F980}" // from the RFC
+ cr"\"
+ cr##"Hello "world"!"##
+ c"\t\n\r\"\\"
+ "###;
+
+ let mut tokens = strings.parse::<TokenStream>().unwrap().into_iter();
+
+ for expected in &[
+ r#"c"hello\x80我叫\u{1F980}""#,
+ r#"cr"\""#,
+ r###"cr##"Hello "world"!"##"###,
+ r#"c"\t\n\r\"\\""#,
+ ] {
+ match tokens.next().unwrap() {
+ TokenTree::Literal(literal) => {
+ assert_eq!(literal.to_string(), *expected);
+ }
+ unexpected => panic!("unexpected token: {:?}", unexpected),
+ }
+ }
+
+ if let Some(unexpected) = tokens.next() {
+ panic!("unexpected token: {:?}", unexpected);
+ }
+
+ for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] {
+ if let Ok(unexpected) = invalid.parse::<TokenStream>() {
+ panic!("unexpected token: {:?}", unexpected);
+ }
+ }
+}
+
+#[test]
+fn literal_character() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected.trim());
+ }
+
+ assert(Literal::character('a'), r#" 'a' "#);
+ assert(Literal::character('\t'), r#" '\t' "#);
+ assert(Literal::character('❤'), r#" '❤' "#);
+ assert(Literal::character('\''), r#" '\'' "#);
+ assert(Literal::character('"'), r#" '"' "#);
+ assert(Literal::character('\0'), r#" '\0' "#);
+ assert(Literal::character('\u{1}'), r#" '\u{1}' "#);
+}
+
+#[test]
+fn literal_integer() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected);
+ }
+
+ assert(Literal::u8_suffixed(10), "10u8");
+ assert(Literal::u16_suffixed(10), "10u16");
+ assert(Literal::u32_suffixed(10), "10u32");
+ assert(Literal::u64_suffixed(10), "10u64");
+ assert(Literal::u128_suffixed(10), "10u128");
+ assert(Literal::usize_suffixed(10), "10usize");
+
+ assert(Literal::i8_suffixed(10), "10i8");
+ assert(Literal::i16_suffixed(10), "10i16");
+ assert(Literal::i32_suffixed(10), "10i32");
+ assert(Literal::i64_suffixed(10), "10i64");
+ assert(Literal::i128_suffixed(10), "10i128");
+ assert(Literal::isize_suffixed(10), "10isize");
+
+ assert(Literal::u8_unsuffixed(10), "10");
+ assert(Literal::u16_unsuffixed(10), "10");
+ assert(Literal::u32_unsuffixed(10), "10");
+ assert(Literal::u64_unsuffixed(10), "10");
+ assert(Literal::u128_unsuffixed(10), "10");
+ assert(Literal::usize_unsuffixed(10), "10");
+
+ assert(Literal::i8_unsuffixed(10), "10");
+ assert(Literal::i16_unsuffixed(10), "10");
+ assert(Literal::i32_unsuffixed(10), "10");
+ assert(Literal::i64_unsuffixed(10), "10");
+ assert(Literal::i128_unsuffixed(10), "10");
+ assert(Literal::isize_unsuffixed(10), "10");
+
+ assert(Literal::i32_suffixed(-10), "-10i32");
+ assert(Literal::i32_unsuffixed(-10), "-10");
+}
+
+#[test]
+fn literal_float() {
+ #[track_caller]
+ fn assert(literal: Literal, expected: &str) {
+ assert_eq!(literal.to_string(), expected);
+ }
+
+ assert(Literal::f32_suffixed(10.0), "10f32");
+ assert(Literal::f32_suffixed(-10.0), "-10f32");
+ assert(Literal::f64_suffixed(10.0), "10f64");
+ assert(Literal::f64_suffixed(-10.0), "-10f64");
+
+ assert(Literal::f32_unsuffixed(10.0), "10.0");
+ assert(Literal::f32_unsuffixed(-10.0), "-10.0");
+ assert(Literal::f64_unsuffixed(10.0), "10.0");
+ assert(Literal::f64_unsuffixed(-10.0), "-10.0");
+
+ assert(
+ Literal::f64_unsuffixed(1e100),
+
"10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0",
+ );
+}
+
+#[test]
+fn literal_suffix() {
+ fn token_count(p: &str) -> usize {
+ p.parse::<TokenStream>().unwrap().into_iter().count()
+ }
+
+ assert_eq!(token_count("999u256"), 1);
+ assert_eq!(token_count("999r#u256"), 3);
+ assert_eq!(token_count("1."), 1);
+ assert_eq!(token_count("1.f32"), 3);
+ assert_eq!(token_count("1.0_0"), 1);
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
+ assert_eq!(token_count("r\"\"r"), 1);
+ assert_eq!(token_count("r#\"\"#r"), 1);
+ assert_eq!(token_count("b\"\"b"), 1);
+ assert_eq!(token_count("br\"\"br"), 1);
+ assert_eq!(token_count("br#\"\"#br"), 1);
+ assert_eq!(token_count("c\"\"c"), 1);
+ assert_eq!(token_count("cr\"\"cr"), 1);
+ assert_eq!(token_count("cr#\"\"#cr"), 1);
+ assert_eq!(token_count("'c'c"), 1);
+ assert_eq!(token_count("b'b'b"), 1);
+ assert_eq!(token_count("0E"), 1);
+ assert_eq!(token_count("0o0A"), 1);
+ assert_eq!(token_count("0E--0"), 4);
+ assert_eq!(token_count("0.0ECMA"), 1);
+}
+
+#[test]
+fn literal_iter_negative() {
+ let negative_literal = Literal::i32_suffixed(-3);
+ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
+ let mut iter = tokens.into_iter();
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '-');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ unexpected => panic!("unexpected token {:?}", unexpected),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Literal(literal) => {
+ assert_eq!(literal.to_string(), "3i32");
+ }
+ unexpected => panic!("unexpected token {:?}", unexpected),
+ }
+ assert!(iter.next().is_none());
+}
+
+#[test]
+fn literal_parse() {
+ assert!("1".parse::<Literal>().is_ok());
+ assert!("-1".parse::<Literal>().is_ok());
+ assert!("-1u12".parse::<Literal>().is_ok());
+ assert!("1.0".parse::<Literal>().is_ok());
+ assert!("-1.0".parse::<Literal>().is_ok());
+ assert!("-1.0f12".parse::<Literal>().is_ok());
+ assert!("'a'".parse::<Literal>().is_ok());
+ assert!("\"\n\"".parse::<Literal>().is_ok());
+ assert!("0 1".parse::<Literal>().is_err());
+ assert!(" 0".parse::<Literal>().is_err());
+ assert!("0 ".parse::<Literal>().is_err());
+ assert!("/* comment */0".parse::<Literal>().is_err());
+ assert!("0/* comment */".parse::<Literal>().is_err());
+ assert!("0// comment".parse::<Literal>().is_err());
+ assert!("- 1".parse::<Literal>().is_err());
+ assert!("- 1.0".parse::<Literal>().is_err());
+ assert!("-\"\"".parse::<Literal>().is_err());
+}
+
+#[test]
+fn literal_span() {
+ let positive = "0.1".parse::<Literal>().unwrap();
+ let negative = "-0.1".parse::<Literal>().unwrap();
+ let subspan = positive.subspan(1..2);
+
+ #[cfg(not(span_locations))]
+ {
+ let _ = negative;
+ assert!(subspan.is_none());
+ }
+
+ #[cfg(span_locations)]
+ {
+ assert_eq!(positive.span().start().column, 0);
+ assert_eq!(positive.span().end().column, 3);
+ assert_eq!(negative.span().start().column, 0);
+ assert_eq!(negative.span().end().column, 4);
+ assert_eq!(subspan.unwrap().source_text().unwrap(), ".");
+ }
+
+ assert!(positive.subspan(1..4).is_none());
+}
+
+#[cfg(span_locations)]
+#[test]
+fn source_text() {
+ let input = " 𓀕 a z ";
+ let mut tokens = input
+ .parse::<proc_macro2::TokenStream>()
+ .unwrap()
+ .into_iter();
+
+ let first = tokens.next().unwrap();
+ assert_eq!("𓀕", first.span().source_text().unwrap());
+
+ let second = tokens.next().unwrap();
+ let third = tokens.next().unwrap();
+ assert_eq!("z", third.span().source_text().unwrap());
+ assert_eq!("a", second.span().source_text().unwrap());
+}
+
+#[test]
+fn roundtrip() {
+ fn roundtrip(p: &str) {
+ println!("parse: {}", p);
+ let s = p.parse::<TokenStream>().unwrap().to_string();
+ println!("first: {}", s);
+ let s2 = s.parse::<TokenStream>().unwrap().to_string();
+ assert_eq!(s, s2);
+ }
+ roundtrip("a");
+ roundtrip("<<");
+ roundtrip("<<=");
+ roundtrip(
+ "
+ 1
+ 1.0
+ 1f32
+ 2f64
+ 1usize
+ 4isize
+ 4e10
+ 1_000
+ 1_0i32
+ 8u8
+ 9
+ 0
+ 0xffffffffffffffffffffffffffffffff
+ 1x
+ 1u80
+ 1f320
+ ",
+ );
+ roundtrip("'a");
+ roundtrip("'_");
+ roundtrip("'static");
+ roundtrip(r"'\u{10__FFFF}'");
+ roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
+}
+
+#[test]
+fn fail() {
+ fn fail(p: &str) {
+ if let Ok(s) = p.parse::<TokenStream>() {
+ panic!("should have failed to parse: {}\n{:#?}", p, s);
+ }
+ }
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
+ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most
6 hex digits)
+ fail("\"\\u{999999}\""); // outside of valid range of char
+ fail("\"\\u{_0}\""); // leading underscore
+ fail("\"\\u{}\""); // empty
+ fail("b\"\r\""); // bare carriage return in byte string
+ fail("r\"\r\""); // bare carriage return in raw string
+ fail("\"\\\r \""); // backslash carriage return
+ fail("'aa'aa");
+ fail("br##\"\"#");
+ fail("cr##\"\"#");
+ fail("\"\\\n\u{85}\r\"");
+}
+
+#[cfg(span_locations)]
+#[test]
+fn span_test() {
+ check_spans(
+ "\
+/// This is a document comment
+testing 123
+{
+ testing 234
+}",
+ &[
+ (1, 0, 1, 30), // #
+ (1, 0, 1, 30), // [ ... ]
+ (1, 0, 1, 30), // doc
+ (1, 0, 1, 30), // =
+ (1, 0, 1, 30), // "This is..."
+ (2, 0, 2, 7), // testing
+ (2, 8, 2, 11), // 123
+ (3, 0, 5, 1), // { ... }
+ (4, 2, 4, 9), // testing
+ (4, 10, 4, 13), // 234
+ ],
+ );
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[test]
+fn default_span() {
+ let start = Span::call_site().start();
+ assert_eq!(start.line, 1);
+ assert_eq!(start.column, 0);
+ let end = Span::call_site().end();
+ assert_eq!(end.line, 1);
+ assert_eq!(end.column, 0);
+ let source_file = Span::call_site().source_file();
+ assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
+ assert!(!source_file.is_real());
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[test]
+fn span_join() {
+ let source1 = "aaa\nbbb"
+ .parse::<TokenStream>()
+ .unwrap()
+ .into_iter()
+ .collect::<Vec<_>>();
+ let source2 = "ccc\nddd"
+ .parse::<TokenStream>()
+ .unwrap()
+ .into_iter()
+ .collect::<Vec<_>>();
+
+ assert!(source1[0].span().source_file() !=
source2[0].span().source_file());
+ assert_eq!(
+ source1[0].span().source_file(),
+ source1[1].span().source_file()
+ );
+
+ let joined1 = source1[0].span().join(source1[1].span());
+ let joined2 = source1[0].span().join(source2[0].span());
+ assert!(joined1.is_some());
+ assert!(joined2.is_none());
+
+ let start = joined1.unwrap().start();
+ let end = joined1.unwrap().end();
+ assert_eq!(start.line, 1);
+ assert_eq!(start.column, 0);
+ assert_eq!(end.line, 2);
+ assert_eq!(end.column, 3);
+
+ assert_eq!(
+ joined1.unwrap().source_file(),
+ source1[0].span().source_file()
+ );
+}
+
+#[test]
+fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+ assert!(s.parse::<TokenStream>().is_err());
+}
+
+#[test]
+fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+ assert_eq!(tt.as_char(), '~');
+ assert_eq!(tt.spacing(), Spacing::Alone);
+ }
+ wrong => panic!("wrong token {:?}", wrong),
+ }
+}
+
+#[test]
+fn joint_last_token() {
+ // This test verifies that we match the behavior of libproc_macro *not* in
+ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
+ // behavior was temporarily broken.
+ // See https://github.com/rust-lang/rust/issues/76399
+
+ let joint_punct = Punct::new(':', Spacing::Joint);
+ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
+ let punct = match stream.into_iter().next().unwrap() {
+ TokenTree::Punct(punct) => punct,
+ _ => unreachable!(),
+ };
+ assert_eq!(punct.spacing(), Spacing::Joint);
+}
+
+#[test]
+fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
+ wrong => panic!("wrong token {:?}", wrong),
+ }
+ assert!(tts.next().is_none());
+}
+
+#[test]
+fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+ #[cfg(span_locations)]
+ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+}
+
+#[test]
+fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+ #[cfg(not(span_locations))]
+ let expected = "\
+TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ },
+ Punct {
+ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+ lit: 1,
+ },
+ ],
+ },
+]\
+ ";
+
+ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a
+ },
+ Punct {
+ char: '+',
+ spacing: Alone
+ },
+ Literal {
+ lit: 1
+ }
+ ]
+ }
+]\
+ ";
+
+ #[cfg(span_locations)]
+ let expected = "\
+TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3),
+ },
+ Punct {
+ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+ Literal {
+ lit: 1,
+ span: bytes(6..7),
+ },
+ ],
+ span: bytes(1..8),
+ },
+]\
+ ";
+
+ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3)
+ },
+ Punct {
+ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+ Literal {
+ lit: 1,
+ span: bytes(6..7)
+ }
+ ],
+ span: bytes(1..8)
+ }
+]\
+ ";
+
+ let actual = format!("{:#?}", tts);
+ if actual.ends_with(",\n]") {
+ assert_eq!(expected, actual);
+ } else {
+ assert_eq!(expected_before_trailing_commas, actual);
+ }
+}
+
+#[test]
+fn default_tokenstream_is_empty() {
+ let default_token_stream = <TokenStream as Default>::default();
+
+ assert!(default_token_stream.is_empty());
+}
+
+#[test]
+fn tokenstream_size_hint() {
+ let tokens = "a b (c d) e".parse::<TokenStream>().unwrap();
+
+ assert_eq!(tokens.into_iter().size_hint(), (4, Some(4)));
+}
+
+#[test]
+fn tuple_indexing() {
+ // This behavior may change depending on
https://github.com/rust-lang/rust/pull/71322
+ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
+ assert_eq!("tuple", tokens.next().unwrap().to_string());
+ assert_eq!(".", tokens.next().unwrap().to_string());
+ assert_eq!("0.0", tokens.next().unwrap().to_string());
+ assert!(tokens.next().is_none());
+}
+
+#[cfg(span_locations)]
+#[test]
+fn non_ascii_tokens() {
+ check_spans("// abc", &[]);
+ check_spans("// ábc", &[]);
+ check_spans("// abc x", &[]);
+ check_spans("// ábc x", &[]);
+ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
+ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
+ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
+ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
+ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
+ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
+ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
+ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
+ check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]);
+ check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]);
+ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
+ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
+ check_spans("'a'", &[(1, 0, 1, 3)]);
+ check_spans("'á'", &[(1, 0, 1, 3)]);
+ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
+ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
+ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
+ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
+ check_spans("abc", &[(1, 0, 1, 3)]);
+ check_spans("ábc", &[(1, 0, 1, 3)]);
+ check_spans("ábć", &[(1, 0, 1, 3)]);
+ check_spans("abc// foo", &[(1, 0, 1, 3)]);
+ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
+ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
+ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
+}
+
+#[cfg(span_locations)]
+fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+ let ts = p.parse::<TokenStream>().unwrap();
+ check_spans_internal(ts, &mut lines);
+ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
+}
+
+#[cfg(span_locations)]
+fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize,
usize)]) {
+ for i in ts {
+ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first()
{
+ *lines = rest;
+
+ let start = i.span().start();
+ assert_eq!(start.line, sline, "sline did not match for {}", i);
+ assert_eq!(start.column, scol, "scol did not match for {}", i);
+
+ let end = i.span().end();
+ assert_eq!(end.line, eline, "eline did not match for {}", i);
+ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+
+ if let TokenTree::Group(g) = i {
+ check_spans_internal(g.stream().clone(), lines);
+ }
+ }
+ }
+}
+
+#[test]
+fn whitespace() {
+ // space, horizontal tab, vertical tab, form feed, carriage return, line
+ // feed, non-breaking space, left-to-right mark, right-to-left mark
+ let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}";
+ let tokens = various_spaces.parse::<TokenStream>().unwrap();
+ assert_eq!(tokens.into_iter().count(), 0);
+
+ let lone_carriage_returns = " \r \r\r\n ";
+ lone_carriage_returns.parse::<TokenStream>().unwrap();
+}
+
+#[test]
+fn byte_order_mark() {
+ let string = "\u{feff}foo";
+ let tokens = string.parse::<TokenStream>().unwrap();
+ match tokens.into_iter().next().unwrap() {
+ TokenTree::Ident(ident) => assert_eq!(ident, "foo"),
+ _ => unreachable!(),
+ }
+
+ let string = "foo\u{feff}";
+ string.parse::<TokenStream>().unwrap_err();
+}
+
+#[cfg(span_locations)]
+fn create_span() -> proc_macro2::Span {
+ let tts: TokenStream = "1".parse().unwrap();
+ match tts.into_iter().next().unwrap() {
+ TokenTree::Literal(literal) => literal.span(),
+ _ => unreachable!(),
+ }
+}
+
+#[cfg(span_locations)]
+#[test]
+fn test_invalidate_current_thread_spans() {
+ let actual = format!("{:#?}", create_span());
+ assert_eq!(actual, "bytes(1..2)");
+ let actual = format!("{:#?}", create_span());
+ assert_eq!(actual, "bytes(3..4)");
+
+ proc_macro2::extra::invalidate_current_thread_spans();
+
+ let actual = format!("{:#?}", create_span());
+ // Test that span offsets have been reset after the call
+ // to invalidate_current_thread_spans()
+ assert_eq!(actual, "bytes(1..2)");
+}
+
+#[cfg(span_locations)]
+#[test]
+#[should_panic(expected = "Invalid span with no related FileInfo!")]
+fn test_use_span_after_invalidation() {
+ let span = create_span();
+
+ proc_macro2::extra::invalidate_current_thread_spans();
+
+ span.source_text();
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/test_fmt.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/test_fmt.rs
new file mode 100644
index 0000000000..86a4c38763
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/test_fmt.rs
@@ -0,0 +1,28 @@
+#![allow(clippy::from_iter_instead_of_collect)]
+
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+use std::iter;
+
+#[test]
+fn test_fmt_group() {
+ let ident = Ident::new("x", Span::call_site());
+ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
+ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
+ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
+ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
+ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
+ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
+ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
+ let none_empty = Group::new(Delimiter::None, TokenStream::new());
+ let none_nonempty = Group::new(Delimiter::None, inner);
+
+ // Matches libproc_macro.
+ assert_eq!("()", parens_empty.to_string());
+ assert_eq!("(x)", parens_nonempty.to_string());
+ assert_eq!("[]", brackets_empty.to_string());
+ assert_eq!("[x]", brackets_nonempty.to_string());
+ assert_eq!("{ }", braces_empty.to_string());
+ assert_eq!("{ x }", braces_nonempty.to_string());
+ assert_eq!("", none_empty.to_string());
+ assert_eq!("x", none_nonempty.to_string());
+}
diff --git a/rust/hw/char/pl011/vendor/proc-macro2/tests/test_size.rs
b/rust/hw/char/pl011/vendor/proc-macro2/tests/test_size.rs
new file mode 100644
index 0000000000..7b0739023a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/proc-macro2/tests/test_size.rs
@@ -0,0 +1,73 @@
+#![cfg(not(randomize_layout))]
+
+extern crate proc_macro;
+
+use std::mem;
+
+#[rustversion::attr(before(1.64), ignore)]
+#[test]
+fn test_proc_macro_size() {
+ assert_eq!(mem::size_of::<proc_macro::Span>(), 4);
+ assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4);
+ assert_eq!(mem::size_of::<proc_macro::Group>(), 20);
+ assert_eq!(mem::size_of::<proc_macro::Ident>(), 12);
+ assert_eq!(mem::size_of::<proc_macro::Punct>(), 8);
+ assert_eq!(mem::size_of::<proc_macro::Literal>(), 16);
+ assert_eq!(mem::size_of::<proc_macro::TokenStream>(), 4);
+}
+
+#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)]
+#[test]
+fn test_proc_macro2_fallback_size_without_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 0);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1);
+ assert_eq!(mem::size_of::<proc_macro2::Group>(), 16);
+ assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::Punct>(), 8);
+ assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8);
+}
+
+#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)]
+#[test]
+fn test_proc_macro2_fallback_size_with_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 8);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
+ assert_eq!(mem::size_of::<proc_macro2::Group>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32);
+ assert_eq!(mem::size_of::<proc_macro2::Punct>(), 16);
+ assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32);
+ assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8);
+}
+
+#[rustversion::attr(before(1.71), ignore)]
+#[rustversion::attr(
+ since(1.71),
+ cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore)
+)]
+#[test]
+fn test_proc_macro2_wrapper_size_without_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 4);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8);
+ assert_eq!(mem::size_of::<proc_macro2::Group>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::Punct>(), 12);
+ assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24);
+ assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32);
+}
+
+#[rustversion::attr(before(1.65), ignore)]
+#[rustversion::attr(
+ since(1.65),
+ cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)
+)]
+#[test]
+fn test_proc_macro2_wrapper_size_with_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 12);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
+ assert_eq!(mem::size_of::<proc_macro2::Group>(), 32);
+ assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32);
+ assert_eq!(mem::size_of::<proc_macro2::Punct>(), 20);
+ assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32);
+ assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32);
+}
diff --git a/rust/hw/char/pl011/vendor/quote/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/quote/.cargo-checksum.json
new file mode 100644
index 0000000000..dcfc52a21e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"0a98ab1241e7b64caa29c6ff868e2e96e0f74c1ef8b265727f1863a960fa322c","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"626e7079eab0baacf0fcaf3e244f407b2014ebaeca45905d72e8fb8bed18aaea","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"0b3e6c2129e55910fd2d240e1e7efba6f1796801d24352d1c0bfbceb0e8b678f","src/lib.rs":"abbc178821e46d0bcd224904a7542ac4582d189f57cd4daf02a54fd772e52a55","src/runtime.rs":"7f37326edaeac2c42ed806b447eeba12e36dd4b1bc25fbf52f8eb23140f3be7a","src/spanned.rs":"3ccf5120593f35787442c0a37d243e802c5262e7f8b35aed503873008ec035c5","src/to_tokens.rs":"1c76311fcc82098e630056d71fd6f3929194ee31b0840e2aa643ed7e78026e3e","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"3be80741f84a707376c230d9cf70ce9537caa359691d8d4c34968e28175e4ad7","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"90a4bdb9267535f5d2785940148338d6b7d905548051d2c9c5dcbd58f2c11d8e","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ae7c2739554c862b331705e82781aa4687a4375210cef6ae899a4be4a4ec2d97","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"03fd560979ebcd5aa6f83858bc2c3c01ba6546c16335101275505304895c1ae9","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"d6da483c29e232ced72059bbdf05d31afb1df9e02954edaa9cfaea1ec6df72dc","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-quotable.stderr":"1b5ad13712a35f2f25a159c003956762941b111d540b20ad6a258cdb079a9c95","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/not-repeatable.stderr":"bbfb702638374001061251f81d63476851ac28ed743f13db9d65e30dd9bdcf52","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"cad072e40e0ecc04f375122ae41aede2f0da2a9244492b3fcf70249e59d1b128"},"package":"0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/quote/Cargo.toml
b/rust/hw/char/pl011/vendor/quote/Cargo.toml
new file mode 100644
index 0000000000..5b521762bc
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/Cargo.toml
@@ -0,0 +1,50 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.56"
+name = "quote"
+version = "1.0.36"
+authors = ["David Tolnay <dtolnay@gmail.com>"]
+autobenches = false
+description = "Quasi-quoting macro quote!(...)"
+documentation = "https://docs.rs/quote/"
+readme = "README.md"
+keywords = [
+ "macros",
+ "syn",
+]
+categories = ["development-tools::procedural-macro-helpers"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/quote"
+
+[package.metadata.docs.rs]
+rustdoc-args = ["--generate-link-to-definition"]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+doc-scrape-examples = false
+
+[dependencies.proc-macro2]
+version = "1.0.74"
+default-features = false
+
+[dev-dependencies.rustversion]
+version = "1.0"
+
+[dev-dependencies.trybuild]
+version = "1.0.66"
+features = ["diff"]
+
+[features]
+default = ["proc-macro"]
+proc-macro = ["proc-macro2/proc-macro"]
diff --git a/rust/hw/char/pl011/vendor/quote/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/quote/LICENSE-APACHE
new file mode 100644
index 0000000000..1b5ec8b78e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/rust/hw/char/pl011/vendor/quote/LICENSE-MIT
b/rust/hw/char/pl011/vendor/quote/LICENSE-MIT
new file mode 100644
index 0000000000..31aa79387f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/quote/README.md
b/rust/hw/char/pl011/vendor/quote/README.md
new file mode 100644
index 0000000000..bfc91a9753
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/README.md
@@ -0,0 +1,272 @@
+Rust Quasi-Quoting
+==================
+
+[<img alt="github"
src="https://img.shields.io/badge/github-dtolnay/quote-8da0cb?style=for-the-badge&labelColor=555555&logo=github"
height="20">](https://github.com/dtolnay/quote)
+[<img alt="crates.io"
src="https://img.shields.io/crates/v/quote.svg?style=for-the-badge&color=fc8d62&logo=rust"
height="20">](https://crates.io/crates/quote)
+[<img alt="docs.rs"
src="https://img.shields.io/badge/docs.rs-quote-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs"
height="20">](https://docs.rs/quote)
+[<img alt="build status"
src="https://img.shields.io/github/actions/workflow/status/dtolnay/quote/ci.yml?branch=master&style=for-the-badge"
height="20">](https://github.com/dtolnay/quote/actions?query=branch%3Amaster)
+
+This crate provides the [`quote!`] macro for turning Rust syntax tree data
+structures into tokens of source code.
+
+[`quote!`]: https://docs.rs/quote/1.0/quote/macro.quote.html
+
+Procedural macros in Rust receive a stream of tokens as input, execute
arbitrary
+Rust code to determine how to manipulate those tokens, and produce a stream of
+tokens to hand back to the compiler to compile into the caller's crate.
+Quasi-quoting is a solution to one piece of that — producing tokens to
+return to the compiler.
+
+The idea of quasi-quoting is that we write *code* that we treat as *data*.
+Within the `quote!` macro, we can write what looks like code to our text editor
+or IDE. We get all the benefits of the editor's brace matching, syntax
+highlighting, indentation, and maybe autocompletion. But rather than compiling
+that as code into the current crate, we can treat it as data, pass it around,
+mutate it, and eventually hand it back to the compiler as tokens to compile
into
+the macro caller's crate.
+
+This crate is motivated by the procedural macro use case, but is a
+general-purpose Rust quasi-quoting library and is not specific to procedural
+macros.
+
+```toml
+[dependencies]
+quote = "1.0"
+```
+
+*Version requirement: Quote supports rustc 1.56 and up.*<br>
+[*Release notes*](https://github.com/dtolnay/quote/releases)
+
+<br>
+
+## Syntax
+
+The quote crate provides a [`quote!`] macro within which you can write Rust
code
+that gets packaged into a [`TokenStream`] and can be treated as data. You
should
+think of `TokenStream` as representing a fragment of Rust source code.
+
+[`TokenStream`]:
https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html
+
+Within the `quote!` macro, interpolation is done with `#var`. Any type
+implementing the [`quote::ToTokens`] trait can be interpolated. This includes
+most Rust primitive types as well as most of the syntax tree types from
[`syn`].
+
+[`quote::ToTokens`]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+[`syn`]: https://github.com/dtolnay/syn
+
+```rust
+let tokens = quote! {
+ struct SerializeWith #generics #where_clause {
+ value: &'a #field_ty,
+ phantom: core::marker::PhantomData<#item_ty>,
+ }
+
+ impl #generics serde::Serialize for SerializeWith #generics #where_clause {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ #path(self.value, serializer)
+ }
+ }
+
+ SerializeWith {
+ value: #value,
+ phantom: core::marker::PhantomData::<#item_ty>,
+ }
+};
+```
+
+<br>
+
+## Repetition
+
+Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`.
This
+iterates through the elements of any variable interpolated within the
repetition
+and inserts a copy of the repetition body for each one. The variables in an
+interpolation may be anything that implements `IntoIterator`, including `Vec`
or
+a pre-existing iterator.
+
+- `#(#var)*` — no separators
+- `#(#var),*` — the character before the asterisk is used as a separator
+- `#( struct #var; )*` — the repetition can contain other things
+- `#( #k => println!("{}", #v), )*` — even multiple interpolations
+
+Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
+does not produce a trailing comma. This matches the behavior of delimiters in
+`macro_rules!`.
+
+<br>
+
+## Returning tokens to the compiler
+
+The `quote!` macro evaluates to an expression of type
+`proc_macro2::TokenStream`. Meanwhile Rust procedural macros are expected to
+return the type `proc_macro::TokenStream`.
+
+The difference between the two types is that `proc_macro` types are entirely
+specific to procedural macros and cannot ever exist in code outside of a
+procedural macro, while `proc_macro2` types may exist anywhere including tests
+and non-macro code like main.rs and build.rs. This is why even the procedural
+macro ecosystem is largely built around `proc_macro2`, because that ensures the
+libraries are unit testable and accessible in non-macro contexts.
+
+There is a [`From`]-conversion in both directions so returning the output of
+`quote!` from a procedural macro usually looks like `tokens.into()` or
+`proc_macro::TokenStream::from(tokens)`.
+
+[`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
+
+<br>
+
+## Examples
+
+### Combining quoted fragments
+
+Usually you don't end up constructing an entire final `TokenStream` in one
+piece. Different parts may come from different helper functions. The tokens
+produced by `quote!` themselves implement `ToTokens` and so can be interpolated
+into later `quote!` invocations to build up a final result.
+
+```rust
+let type_definition = quote! {...};
+let methods = quote! {...};
+
+let tokens = quote! {
+ #type_definition
+ #methods
+};
+```
+
+### Constructing identifiers
+
+Suppose we have an identifier `ident` which came from somewhere in a macro
+input and we need to modify it in some way for the macro output. Let's consider
+prepending the identifier with an underscore.
+
+Simply interpolating the identifier next to an underscore will not have the
+behavior of concatenating them. The underscore and the identifier will continue
+to be two separate tokens as if you had written `_ x`.
+
+```rust
+// incorrect
+quote! {
+ let mut _#ident = 0;
+}
+```
+
+The solution is to build a new identifier token with the correct value. As this
+is such a common case, the `format_ident!` macro provides a convenient utility
+for doing so correctly.
+
+```rust
+let varname = format_ident!("_{}", ident);
+quote! {
+ let mut #varname = 0;
+}
+```
+
+Alternatively, the APIs provided by Syn and proc-macro2 can be used to directly
+build the identifier. This is roughly equivalent to the above, but will not
+handle `ident` being a raw identifier.
+
+```rust
+let concatenated = format!("_{}", ident);
+let varname = syn::Ident::new(&concatenated, ident.span());
+quote! {
+ let mut #varname = 0;
+}
+```
+
+### Making method calls
+
+Let's say our macro requires some type specified in the macro input to have a
+constructor called `new`. We have the type in a variable called `field_type` of
+type `syn::Type` and want to invoke the constructor.
+
+```rust
+// incorrect
+quote! {
+ let value = #field_type::new();
+}
+```
+
+This works only sometimes. If `field_type` is `String`, the expanded code
+contains `String::new()` which is fine. But if `field_type` is something like
+`Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid syntax.
+Ordinarily in handwritten Rust we would write `Vec::<i32>::new()` but for
macros
+often the following is more convenient.
+
+```rust
+quote! {
+ let value = <#field_type>::new();
+}
+```
+
+This expands to `<Vec<i32>>::new()` which behaves correctly.
+
+A similar pattern is appropriate for trait methods.
+
+```rust
+quote! {
+ let value = <#field_type as core::default::Default>::default();
+}
+```
+
+<br>
+
+## Hygiene
+
+Any interpolated tokens preserve the `Span` information provided by their
+`ToTokens` implementation. Tokens that originate within a `quote!` invocation
+are spanned with [`Span::call_site()`].
+
+[`Span::call_site()`]:
https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site
+
+A different span can be provided explicitly through the [`quote_spanned!`]
+macro.
+
+[`quote_spanned!`]: https://docs.rs/quote/1.0/quote/macro.quote_spanned.html
+
+<br>
+
+## Non-macro code generators
+
+When using `quote` in a build.rs or main.rs and writing the output out to a
+file, consider having the code generator pass the tokens through [prettyplease]
+before writing. This way if an error occurs in the generated code it is
+convenient for a human to read and debug.
+
+Be aware that no kind of hygiene or span information is retained when tokens
are
+written to a file; the conversion from tokens to source code is lossy.
+
+Example usage in build.rs:
+
+```rust
+let output = quote! { ... };
+let syntax_tree = syn::parse2(output).unwrap();
+let formatted = prettyplease::unparse(&syntax_tree);
+
+let out_dir = env::var_os("OUT_DIR").unwrap();
+let dest_path = Path::new(&out_dir).join("out.rs");
+fs::write(dest_path, formatted).unwrap();
+```
+
+[prettyplease]: https://github.com/dtolnay/prettyplease
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
diff --git a/rust/hw/char/pl011/vendor/quote/meson.build
b/rust/hw/char/pl011/vendor/quote/meson.build
new file mode 100644
index 0000000000..11b83932f6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/meson.build
@@ -0,0 +1,17 @@
+_quote_rs = static_library(
+ 'quote',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ '--cfg', 'feature="proc-macro"',
+ ],
+ dependencies: [
+ dep_proc_macro2,
+ ],
+)
+
+dep_quote = declare_dependency(
+ link_with: _quote_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/quote/rust-toolchain.toml
b/rust/hw/char/pl011/vendor/quote/rust-toolchain.toml
new file mode 100644
index 0000000000..20fe888c30
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/rust-toolchain.toml
@@ -0,0 +1,2 @@
+[toolchain]
+components = ["rust-src"]
diff --git a/rust/hw/char/pl011/vendor/quote/src/ext.rs
b/rust/hw/char/pl011/vendor/quote/src/ext.rs
new file mode 100644
index 0000000000..92c2315b18
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/ext.rs
@@ -0,0 +1,110 @@
+use super::ToTokens;
+use core::iter;
+use proc_macro2::{TokenStream, TokenTree};
+
+/// TokenStream extension trait with methods for appending tokens.
+///
+/// This trait is sealed and cannot be implemented outside of the `quote`
crate.
+pub trait TokenStreamExt: private::Sealed {
+ /// For use by `ToTokens` implementations.
+ ///
+ /// Appends the token specified to this list of tokens.
+ fn append<U>(&mut self, token: U)
+ where
+ U: Into<TokenTree>;
+
+ /// For use by `ToTokens` implementations.
+ ///
+ /// ```
+ /// # use quote::{quote, TokenStreamExt, ToTokens};
+ /// # use proc_macro2::TokenStream;
+ /// #
+ /// struct X;
+ ///
+ /// impl ToTokens for X {
+ /// fn to_tokens(&self, tokens: &mut TokenStream) {
+ /// tokens.append_all(&[true, false]);
+ /// }
+ /// }
+ ///
+ /// let tokens = quote!(#X);
+ /// assert_eq!(tokens.to_string(), "true false");
+ /// ```
+ fn append_all<I>(&mut self, iter: I)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens;
+
+ /// For use by `ToTokens` implementations.
+ ///
+ /// Appends all of the items in the iterator `I`, separated by the tokens
+ /// `U`.
+ fn append_separated<I, U>(&mut self, iter: I, op: U)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens,
+ U: ToTokens;
+
+ /// For use by `ToTokens` implementations.
+ ///
+ /// Appends all tokens in the iterator `I`, appending `U` after each
+ /// element, including after the last element of the iterator.
+ fn append_terminated<I, U>(&mut self, iter: I, term: U)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens,
+ U: ToTokens;
+}
+
+impl TokenStreamExt for TokenStream {
+ fn append<U>(&mut self, token: U)
+ where
+ U: Into<TokenTree>,
+ {
+ self.extend(iter::once(token.into()));
+ }
+
+ fn append_all<I>(&mut self, iter: I)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens,
+ {
+ for token in iter {
+ token.to_tokens(self);
+ }
+ }
+
+ fn append_separated<I, U>(&mut self, iter: I, op: U)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens,
+ U: ToTokens,
+ {
+ for (i, token) in iter.into_iter().enumerate() {
+ if i > 0 {
+ op.to_tokens(self);
+ }
+ token.to_tokens(self);
+ }
+ }
+
+ fn append_terminated<I, U>(&mut self, iter: I, term: U)
+ where
+ I: IntoIterator,
+ I::Item: ToTokens,
+ U: ToTokens,
+ {
+ for token in iter {
+ token.to_tokens(self);
+ term.to_tokens(self);
+ }
+ }
+}
+
+mod private {
+ use proc_macro2::TokenStream;
+
+ pub trait Sealed {}
+
+ impl Sealed for TokenStream {}
+}
diff --git a/rust/hw/char/pl011/vendor/quote/src/format.rs
b/rust/hw/char/pl011/vendor/quote/src/format.rs
new file mode 100644
index 0000000000..3cddbd2819
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/format.rs
@@ -0,0 +1,168 @@
+/// Formatting macro for constructing `Ident`s.
+///
+/// <br>
+///
+/// # Syntax
+///
+/// Syntax is copied from the [`format!`] macro, supporting both positional and
+/// named arguments.
+///
+/// Only a limited set of formatting traits are supported. The current mapping
+/// of format types to traits is:
+///
+/// * `{}` ⇒ [`IdentFragment`]
+/// * `{:o}` ⇒ [`Octal`](std::fmt::Octal)
+/// * `{:x}` ⇒ [`LowerHex`](std::fmt::LowerHex)
+/// * `{:X}` ⇒ [`UpperHex`](std::fmt::UpperHex)
+/// * `{:b}` ⇒ [`Binary`](std::fmt::Binary)
+///
+/// See [`std::fmt`] for more information.
+///
+/// <br>
+///
+/// # IdentFragment
+///
+/// Unlike `format!`, this macro uses the [`IdentFragment`] formatting trait by
+/// default. This trait is like `Display`, with a few differences:
+///
+/// * `IdentFragment` is only implemented for a limited set of types, such as
+/// unsigned integers and strings.
+/// * [`Ident`] arguments will have their `r#` prefixes stripped, if present.
+///
+/// [`IdentFragment`]: crate::IdentFragment
+/// [`Ident`]: proc_macro2::Ident
+///
+/// <br>
+///
+/// # Hygiene
+///
+/// The [`Span`] of the first `Ident` argument is used as the span of the final
+/// identifier, falling back to [`Span::call_site`] when no identifiers are
+/// provided.
+///
+/// ```
+/// # use quote::format_ident;
+/// # let ident = format_ident!("Ident");
+/// // If `ident` is an Ident, the span of `my_ident` will be inherited from
it.
+/// let my_ident = format_ident!("My{}{}", ident, "IsCool");
+/// assert_eq!(my_ident, "MyIdentIsCool");
+/// ```
+///
+/// Alternatively, the span can be overridden by passing the `span` named
+/// argument.
+///
+/// ```
+/// # use quote::format_ident;
+/// # const IGNORE_TOKENS: &'static str = stringify! {
+/// let my_span = /* ... */;
+/// # };
+/// # let my_span = proc_macro2::Span::call_site();
+/// format_ident!("MyIdent", span = my_span);
+/// ```
+///
+/// [`Span`]: proc_macro2::Span
+/// [`Span::call_site`]: proc_macro2::Span::call_site
+///
+/// <p><br></p>
+///
+/// # Panics
+///
+/// This method will panic if the resulting formatted string is not a valid
+/// identifier.
+///
+/// <br>
+///
+/// # Examples
+///
+/// Composing raw and non-raw identifiers:
+/// ```
+/// # use quote::format_ident;
+/// let my_ident = format_ident!("My{}", "Ident");
+/// assert_eq!(my_ident, "MyIdent");
+///
+/// let raw = format_ident!("r#Raw");
+/// assert_eq!(raw, "r#Raw");
+///
+/// let my_ident_raw = format_ident!("{}Is{}", my_ident, raw);
+/// assert_eq!(my_ident_raw, "MyIdentIsRaw");
+/// ```
+///
+/// Integer formatting options:
+/// ```
+/// # use quote::format_ident;
+/// let num: u32 = 10;
+///
+/// let decimal = format_ident!("Id_{}", num);
+/// assert_eq!(decimal, "Id_10");
+///
+/// let octal = format_ident!("Id_{:o}", num);
+/// assert_eq!(octal, "Id_12");
+///
+/// let binary = format_ident!("Id_{:b}", num);
+/// assert_eq!(binary, "Id_1010");
+///
+/// let lower_hex = format_ident!("Id_{:x}", num);
+/// assert_eq!(lower_hex, "Id_a");
+///
+/// let upper_hex = format_ident!("Id_{:X}", num);
+/// assert_eq!(upper_hex, "Id_A");
+/// ```
+#[macro_export]
+macro_rules! format_ident {
+ ($fmt:expr) => {
+ $crate::format_ident_impl!([
+ $crate::__private::Option::None,
+ $fmt
+ ])
+ };
+
+ ($fmt:expr, $($rest:tt)*) => {
+ $crate::format_ident_impl!([
+ $crate::__private::Option::None,
+ $fmt
+ ] $($rest)*)
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! format_ident_impl {
+ // Final state
+ ([$span:expr, $($fmt:tt)*]) => {
+ $crate::__private::mk_ident(
+ &$crate::__private::format!($($fmt)*),
+ $span,
+ )
+ };
+
+ // Span argument
+ ([$old:expr, $($fmt:tt)*] span = $span:expr) => {
+ $crate::format_ident_impl!([$old, $($fmt)*] span = $span,)
+ };
+ ([$old:expr, $($fmt:tt)*] span = $span:expr, $($rest:tt)*) => {
+ $crate::format_ident_impl!([
+ $crate::__private::Option::Some::<$crate::__private::Span>($span),
+ $($fmt)*
+ ] $($rest)*)
+ };
+
+ // Named argument
+ ([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr) => {
+ $crate::format_ident_impl!([$span, $($fmt)*] $name = $arg,)
+ };
+ ([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr, $($rest:tt)*) => {
+ match $crate::__private::IdentFragmentAdapter(&$arg) {
+ arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*,
$name = arg] $($rest)*),
+ }
+ };
+
+ // Positional argument
+ ([$span:expr, $($fmt:tt)*] $arg:expr) => {
+ $crate::format_ident_impl!([$span, $($fmt)*] $arg,)
+ };
+ ([$span:expr, $($fmt:tt)*] $arg:expr, $($rest:tt)*) => {
+ match $crate::__private::IdentFragmentAdapter(&$arg) {
+ arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*,
arg] $($rest)*),
+ }
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/quote/src/ident_fragment.rs
b/rust/hw/char/pl011/vendor/quote/src/ident_fragment.rs
new file mode 100644
index 0000000000..6c2a9a87ac
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/ident_fragment.rs
@@ -0,0 +1,88 @@
+use alloc::borrow::Cow;
+use core::fmt;
+use proc_macro2::{Ident, Span};
+
+/// Specialized formatting trait used by `format_ident!`.
+///
+/// [`Ident`] arguments formatted using this trait will have their `r#` prefix
+/// stripped, if present.
+///
+/// See [`format_ident!`] for more information.
+///
+/// [`format_ident!`]: crate::format_ident
+pub trait IdentFragment {
+ /// Format this value as an identifier fragment.
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result;
+
+ /// Span associated with this `IdentFragment`.
+ ///
+ /// If non-`None`, may be inherited by formatted identifiers.
+ fn span(&self) -> Option<Span> {
+ None
+ }
+}
+
+impl<T: IdentFragment + ?Sized> IdentFragment for &T {
+ fn span(&self) -> Option<Span> {
+ <T as IdentFragment>::span(*self)
+ }
+
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ IdentFragment::fmt(*self, f)
+ }
+}
+
+impl<T: IdentFragment + ?Sized> IdentFragment for &mut T {
+ fn span(&self) -> Option<Span> {
+ <T as IdentFragment>::span(*self)
+ }
+
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ IdentFragment::fmt(*self, f)
+ }
+}
+
+impl IdentFragment for Ident {
+ fn span(&self) -> Option<Span> {
+ Some(self.span())
+ }
+
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let id = self.to_string();
+ if let Some(id) = id.strip_prefix("r#") {
+ fmt::Display::fmt(id, f)
+ } else {
+ fmt::Display::fmt(&id[..], f)
+ }
+ }
+}
+
+impl<T> IdentFragment for Cow<'_, T>
+where
+ T: IdentFragment + ToOwned + ?Sized,
+{
+ fn span(&self) -> Option<Span> {
+ T::span(self)
+ }
+
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ T::fmt(self, f)
+ }
+}
+
+// Limited set of types which this is implemented for, as we want to avoid
types
+// which will often include non-identifier characters in their `Display` impl.
+macro_rules! ident_fragment_display {
+ ($($T:ty),*) => {
+ $(
+ impl IdentFragment for $T {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(self, f)
+ }
+ }
+ )*
+ };
+}
+
+ident_fragment_display!(bool, str, String, char);
+ident_fragment_display!(u8, u16, u32, u64, u128, usize);
diff --git a/rust/hw/char/pl011/vendor/quote/src/lib.rs
b/rust/hw/char/pl011/vendor/quote/src/lib.rs
new file mode 100644
index 0000000000..4d198cb2e7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/lib.rs
@@ -0,0 +1,1464 @@
+//!
[![github]](https://github.com/dtolnay/quote) [![crates-io]](https://crates.io/crates/quote) [![docs-rs]](https://docs.rs/quote)
+//!
+//! [github]:
https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]:
https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]:
https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! This crate provides the [`quote!`] macro for turning Rust syntax tree data
+//! structures into tokens of source code.
+//!
+//! [`quote!`]: macro.quote.html
+//!
+//! Procedural macros in Rust receive a stream of tokens as input, execute
+//! arbitrary Rust code to determine how to manipulate those tokens, and
produce
+//! a stream of tokens to hand back to the compiler to compile into the
caller's
+//! crate. Quasi-quoting is a solution to one piece of that — producing
+//! tokens to return to the compiler.
+//!
+//! The idea of quasi-quoting is that we write *code* that we treat as *data*.
+//! Within the `quote!` macro, we can write what looks like code to our text
+//! editor or IDE. We get all the benefits of the editor's brace matching,
+//! syntax highlighting, indentation, and maybe autocompletion. But rather than
+//! compiling that as code into the current crate, we can treat it as data,
pass
+//! it around, mutate it, and eventually hand it back to the compiler as tokens
+//! to compile into the macro caller's crate.
+//!
+//! This crate is motivated by the procedural macro use case, but is a
+//! general-purpose Rust quasi-quoting library and is not specific to
procedural
+//! macros.
+//!
+//! ```toml
+//! [dependencies]
+//! quote = "1.0"
+//! ```
+//!
+//! <br>
+//!
+//! # Example
+//!
+//! The following quasi-quoted block of code is something you might find in [a]
+//! procedural macro having to do with data structure serialization. The `#var`
+//! syntax performs interpolation of runtime variables into the quoted tokens.
+//! Check out the documentation of the [`quote!`] macro for more detail about
+//! the syntax. See also the [`quote_spanned!`] macro which is important for
+//! implementing hygienic procedural macros.
+//!
+//! [a]: https://serde.rs/
+//! [`quote_spanned!`]: macro.quote_spanned.html
+//!
+//! ```
+//! # use quote::quote;
+//! #
+//! # let generics = "";
+//! # let where_clause = "";
+//! # let field_ty = "";
+//! # let item_ty = "";
+//! # let path = "";
+//! # let value = "";
+//! #
+//! let tokens = quote! {
+//! struct SerializeWith #generics #where_clause {
+//! value: &'a #field_ty,
+//! phantom: core::marker::PhantomData<#item_ty>,
+//! }
+//!
+//! impl #generics serde::Serialize for SerializeWith #generics
#where_clause {
+//! fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+//! where
+//! S: serde::Serializer,
+//! {
+//! #path(self.value, serializer)
+//! }
+//! }
+//!
+//! SerializeWith {
+//! value: #value,
+//! phantom: core::marker::PhantomData::<#item_ty>,
+//! }
+//! };
+//! ```
+//!
+//! <br>
+//!
+//! # Non-macro code generators
+//!
+//! When using `quote` in a build.rs or main.rs and writing the output out to a
+//! file, consider having the code generator pass the tokens through
+//! [prettyplease] before writing. This way if an error occurs in the generated
+//! code it is convenient for a human to read and debug.
+//!
+//! [prettyplease]: https://github.com/dtolnay/prettyplease
+
+// Quote types in rustdoc of other crates get linked to here.
+#![doc(html_root_url = "https://docs.rs/quote/1.0.36")]
+#![allow(
+ clippy::doc_markdown,
+ clippy::missing_errors_doc,
+ clippy::missing_panics_doc,
+ clippy::module_name_repetitions,
+ // false positive https://github.com/rust-lang/rust-clippy/issues/6983
+ clippy::wrong_self_convention,
+)]
+
+extern crate alloc;
+
+#[cfg(feature = "proc-macro")]
+extern crate proc_macro;
+
+mod ext;
+mod format;
+mod ident_fragment;
+mod to_tokens;
+
+// Not public API.
+#[doc(hidden)]
+#[path = "runtime.rs"]
+pub mod __private;
+
+pub use crate::ext::TokenStreamExt;
+pub use crate::ident_fragment::IdentFragment;
+pub use crate::to_tokens::ToTokens;
+
+// Not public API.
+#[doc(hidden)]
+pub mod spanned;
+
+macro_rules! __quote {
+ ($quote:item) => {
+ /// The whole point.
+ ///
+ /// Performs variable interpolation against the input and produces it
as
+ /// [`proc_macro2::TokenStream`].
+ ///
+ /// Note: for returning tokens to the compiler in a procedural macro,
use
+ /// `.into()` on the result to convert to [`proc_macro::TokenStream`].
+ ///
+ /// [`TokenStream`]:
https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html
+ ///
+ /// <br>
+ ///
+ /// # Interpolation
+ ///
+ /// Variable interpolation is done with `#var` (similar to `$var` in
+ /// `macro_rules!` macros). This grabs the `var` variable that is
currently in
+ /// scope and inserts it in that location in the output tokens. Any
type
+ /// implementing the [`ToTokens`] trait can be interpolated. This
includes most
+ /// Rust primitive types as well as most of the syntax tree types from
the [Syn]
+ /// crate.
+ ///
+ /// [`ToTokens`]: trait.ToTokens.html
+ /// [Syn]: https://github.com/dtolnay/syn
+ ///
+ /// Repetition is done using `#(...)*` or `#(...),*` again similar to
+ /// `macro_rules!`. This iterates through the elements of any variable
+ /// interpolated within the repetition and inserts a copy of the
repetition body
+ /// for each one. The variables in an interpolation may be a `Vec`,
slice,
+ /// `BTreeSet`, or any `Iterator`.
+ ///
+ /// - `#(#var)*` — no separators
+ /// - `#(#var),*` — the character before the asterisk is used as a
separator
+ /// - `#( struct #var; )*` — the repetition can contain other tokens
+ /// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
+ ///
+ /// <br>
+ ///
+ /// # Hygiene
+ ///
+ /// Any interpolated tokens preserve the `Span` information provided
by their
+ /// `ToTokens` implementation. Tokens that originate within the
`quote!`
+ /// invocation are spanned with [`Span::call_site()`].
+ ///
+ /// [`Span::call_site()`]:
https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site
+ ///
+ /// A different span can be provided through the [`quote_spanned!`]
macro.
+ ///
+ /// [`quote_spanned!`]: macro.quote_spanned.html
+ ///
+ /// <br>
+ ///
+ /// # Return type
+ ///
+ /// The macro evaluates to an expression of type
`proc_macro2::TokenStream`.
+ /// Meanwhile Rust procedural macros are expected to return the type
+ /// `proc_macro::TokenStream`.
+ ///
+ /// The difference between the two types is that `proc_macro` types
are entirely
+ /// specific to procedural macros and cannot ever exist in code
outside of a
+ /// procedural macro, while `proc_macro2` types may exist anywhere
including
+ /// tests and non-macro code like main.rs and build.rs. This is why
even the
+ /// procedural macro ecosystem is largely built around `proc_macro2`,
because
+ /// that ensures the libraries are unit testable and accessible in
non-macro
+ /// contexts.
+ ///
+ /// There is a [`From`]-conversion in both directions so returning the
output of
+ /// `quote!` from a procedural macro usually looks like
`tokens.into()` or
+ /// `proc_macro::TokenStream::from(tokens)`.
+ ///
+ /// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
+ ///
+ /// <br>
+ ///
+ /// # Examples
+ ///
+ /// ### Procedural macro
+ ///
+ /// The structure of a basic procedural macro is as follows. Refer to
the [Syn]
+ /// crate for further useful guidance on using `quote!` as part of a
procedural
+ /// macro.
+ ///
+ /// [Syn]: https://github.com/dtolnay/syn
+ ///
+ /// ```
+ /// # #[cfg(any())]
+ /// extern crate proc_macro;
+ /// # extern crate proc_macro2;
+ ///
+ /// # #[cfg(any())]
+ /// use proc_macro::TokenStream;
+ /// # use proc_macro2::TokenStream;
+ /// use quote::quote;
+ ///
+ /// # const IGNORE_TOKENS: &'static str = stringify! {
+ /// #[proc_macro_derive(HeapSize)]
+ /// # };
+ /// pub fn derive_heap_size(input: TokenStream) -> TokenStream {
+ /// // Parse the input and figure out what implementation to
generate...
+ /// # const IGNORE_TOKENS: &'static str = stringify! {
+ /// let name = /* ... */;
+ /// let expr = /* ... */;
+ /// # };
+ /// #
+ /// # let name = 0;
+ /// # let expr = 0;
+ ///
+ /// let expanded = quote! {
+ /// // The generated impl.
+ /// impl heapsize::HeapSize for #name {
+ /// fn heap_size_of_children(&self) -> usize {
+ /// #expr
+ /// }
+ /// }
+ /// };
+ ///
+ /// // Hand the output tokens back to the compiler.
+ /// TokenStream::from(expanded)
+ /// }
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// ### Combining quoted fragments
+ ///
+ /// Usually you don't end up constructing an entire final
`TokenStream` in one
+ /// piece. Different parts may come from different helper functions.
The tokens
+ /// produced by `quote!` themselves implement `ToTokens` and so can be
+ /// interpolated into later `quote!` invocations to build up a final
result.
+ ///
+ /// ```
+ /// # use quote::quote;
+ /// #
+ /// let type_definition = quote! {...};
+ /// let methods = quote! {...};
+ ///
+ /// let tokens = quote! {
+ /// #type_definition
+ /// #methods
+ /// };
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// ### Constructing identifiers
+ ///
+ /// Suppose we have an identifier `ident` which came from somewhere in
a macro
+ /// input and we need to modify it in some way for the macro output.
Let's
+ /// consider prepending the identifier with an underscore.
+ ///
+ /// Simply interpolating the identifier next to an underscore will not
have the
+ /// behavior of concatenating them. The underscore and the identifier
will
+ /// continue to be two separate tokens as if you had written `_ x`.
+ ///
+ /// ```
+ /// # use proc_macro2::{self as syn, Span};
+ /// # use quote::quote;
+ /// #
+ /// # let ident = syn::Ident::new("i", Span::call_site());
+ /// #
+ /// // incorrect
+ /// quote! {
+ /// let mut _#ident = 0;
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// The solution is to build a new identifier token with the correct
value. As
+ /// this is such a common case, the [`format_ident!`] macro provides a
+ /// convenient utility for doing so correctly.
+ ///
+ /// ```
+ /// # use proc_macro2::{Ident, Span};
+ /// # use quote::{format_ident, quote};
+ /// #
+ /// # let ident = Ident::new("i", Span::call_site());
+ /// #
+ /// let varname = format_ident!("_{}", ident);
+ /// quote! {
+ /// let mut #varname = 0;
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// Alternatively, the APIs provided by Syn and proc-macro2 can be
used to
+ /// directly build the identifier. This is roughly equivalent to the
above, but
+ /// will not handle `ident` being a raw identifier.
+ ///
+ /// ```
+ /// # use proc_macro2::{self as syn, Span};
+ /// # use quote::quote;
+ /// #
+ /// # let ident = syn::Ident::new("i", Span::call_site());
+ /// #
+ /// let concatenated = format!("_{}", ident);
+ /// let varname = syn::Ident::new(&concatenated, ident.span());
+ /// quote! {
+ /// let mut #varname = 0;
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// ### Making method calls
+ ///
+ /// Let's say our macro requires some type specified in the macro
input to have
+ /// a constructor called `new`. We have the type in a variable called
+ /// `field_type` of type `syn::Type` and want to invoke the
constructor.
+ ///
+ /// ```
+ /// # use quote::quote;
+ /// #
+ /// # let field_type = quote!(...);
+ /// #
+ /// // incorrect
+ /// quote! {
+ /// let value = #field_type::new();
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// This works only sometimes. If `field_type` is `String`, the
expanded code
+ /// contains `String::new()` which is fine. But if `field_type` is
something
+ /// like `Vec<i32>` then the expanded code is `Vec<i32>::new()` which
is invalid
+ /// syntax. Ordinarily in handwritten Rust we would write
`Vec::<i32>::new()`
+ /// but for macros often the following is more convenient.
+ ///
+ /// ```
+ /// # use quote::quote;
+ /// #
+ /// # let field_type = quote!(...);
+ /// #
+ /// quote! {
+ /// let value = <#field_type>::new();
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// This expands to `<Vec<i32>>::new()` which behaves correctly.
+ ///
+ /// A similar pattern is appropriate for trait methods.
+ ///
+ /// ```
+ /// # use quote::quote;
+ /// #
+ /// # let field_type = quote!(...);
+ /// #
+ /// quote! {
+ /// let value = <#field_type as core::default::Default>::default();
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// ### Interpolating text inside of doc comments
+ ///
+ /// Neither doc comments nor string literals get interpolation
behavior in
+ /// quote:
+ ///
+ /// ```compile_fail
+ /// quote! {
+ /// /// try to interpolate: #ident
+ /// ///
+ /// /// ...
+ /// }
+ /// ```
+ ///
+ /// ```compile_fail
+ /// quote! {
+ /// #[doc = "try to interpolate: #ident"]
+ /// }
+ /// ```
+ ///
+ /// Instead the best way to build doc comments that involve variables
is by
+ /// formatting the doc string literal outside of quote.
+ ///
+ /// ```rust
+ /// # use proc_macro2::{Ident, Span};
+ /// # use quote::quote;
+ /// #
+ /// # const IGNORE: &str = stringify! {
+ /// let msg = format!(...);
+ /// # };
+ /// #
+ /// # let ident = Ident::new("var", Span::call_site());
+ /// # let msg = format!("try to interpolate: {}", ident);
+ /// quote! {
+ /// #[doc = #msg]
+ /// ///
+ /// /// ...
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// ### Indexing into a tuple struct
+ ///
+ /// When interpolating indices of a tuple or tuple struct, we need
them not to
+ /// appears suffixed as integer literals by interpolating them as
[`syn::Index`]
+ /// instead.
+ ///
+ /// [`syn::Index`]: https://docs.rs/syn/2.0/syn/struct.Index.html
+ ///
+ /// ```compile_fail
+ /// let i = 0usize..self.fields.len();
+ ///
+ /// // expands to 0 + self.0usize.heap_size() +
self.1usize.heap_size() + ...
+ /// // which is not valid syntax
+ /// quote! {
+ /// 0 #( + self.#i.heap_size() )*
+ /// }
+ /// ```
+ ///
+ /// ```
+ /// # use proc_macro2::{Ident, TokenStream};
+ /// # use quote::quote;
+ /// #
+ /// # mod syn {
+ /// # use proc_macro2::{Literal, TokenStream};
+ /// # use quote::{ToTokens, TokenStreamExt};
+ /// #
+ /// # pub struct Index(usize);
+ /// #
+ /// # impl From<usize> for Index {
+ /// # fn from(i: usize) -> Self {
+ /// # Index(i)
+ /// # }
+ /// # }
+ /// #
+ /// # impl ToTokens for Index {
+ /// # fn to_tokens(&self, tokens: &mut TokenStream) {
+ /// # tokens.append(Literal::usize_unsuffixed(self.0));
+ /// # }
+ /// # }
+ /// # }
+ /// #
+ /// # struct Struct {
+ /// # fields: Vec<Ident>,
+ /// # }
+ /// #
+ /// # impl Struct {
+ /// # fn example(&self) -> TokenStream {
+ /// let i = (0..self.fields.len()).map(syn::Index::from);
+ ///
+ /// // expands to 0 + self.0.heap_size() + self.1.heap_size() + ...
+ /// quote! {
+ /// 0 #( + self.#i.heap_size() )*
+ /// }
+ /// # }
+ /// # }
+ /// ```
+ $quote
+ };
+}
+
+#[cfg(doc)]
+__quote![
+ #[macro_export]
+ macro_rules! quote {
+ ($($tt:tt)*) => {
+ ...
+ };
+ }
+];
+
+#[cfg(not(doc))]
+__quote![
+ #[macro_export]
+ macro_rules! quote {
+ () => {
+ $crate::__private::TokenStream::new()
+ };
+
+ // Special case rule for a single tt, for performance.
+ ($tt:tt) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ $crate::quote_token!{$tt _s}
+ _s
+ }};
+
+ // Special case rules for two tts, for performance.
+ (# $var:ident) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ $crate::ToTokens::to_tokens(&$var, &mut _s);
+ _s
+ }};
+ ($tt1:tt $tt2:tt) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ $crate::quote_token!{$tt1 _s}
+ $crate::quote_token!{$tt2 _s}
+ _s
+ }};
+
+ // Rule for any other number of tokens.
+ ($($tt:tt)*) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ $crate::quote_each_token!{_s $($tt)*}
+ _s
+ }};
+ }
+];
+
+macro_rules! __quote_spanned {
+ ($quote_spanned:item) => {
+ /// Same as `quote!`, but applies a given span to all tokens
originating within
+ /// the macro invocation.
+ ///
+ /// <br>
+ ///
+ /// # Syntax
+ ///
+ /// A span expression of type [`Span`], followed by `=>`, followed by
the tokens
+ /// to quote. The span expression should be brief — use a
variable for
+ /// anything more than a few characters. There should be no space
before the
+ /// `=>` token.
+ ///
+ /// [`Span`]:
https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+ ///
+ /// ```
+ /// # use proc_macro2::Span;
+ /// # use quote::quote_spanned;
+ /// #
+ /// # const IGNORE_TOKENS: &'static str = stringify! {
+ /// let span = /* ... */;
+ /// # };
+ /// # let span = Span::call_site();
+ /// # let init = 0;
+ ///
+ /// // On one line, use parentheses.
+ /// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init)));
+ ///
+ /// // On multiple lines, place the span at the top and use braces.
+ /// let tokens = quote_spanned! {span=>
+ /// Box::into_raw(Box::new(#init))
+ /// };
+ /// ```
+ ///
+ /// The lack of space before the `=>` should look jarring to Rust
programmers
+ /// and this is intentional. The formatting is designed to be visibly
+ /// off-balance and draw the eye a particular way, due to the span
expression
+ /// being evaluated in the context of the procedural macro and the
remaining
+ /// tokens being evaluated in the generated code.
+ ///
+ /// <br>
+ ///
+ /// # Hygiene
+ ///
+ /// Any interpolated tokens preserve the `Span` information provided
by their
+ /// `ToTokens` implementation. Tokens that originate within the
`quote_spanned!`
+ /// invocation are spanned with the given span argument.
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// The following procedural macro code uses `quote_spanned!` to
assert that a
+ /// particular Rust type implements the [`Sync`] trait so that
references can be
+ /// safely shared between threads.
+ ///
+ /// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+ ///
+ /// ```
+ /// # use quote::{quote_spanned, TokenStreamExt, ToTokens};
+ /// # use proc_macro2::{Span, TokenStream};
+ /// #
+ /// # struct Type;
+ /// #
+ /// # impl Type {
+ /// # fn span(&self) -> Span {
+ /// # Span::call_site()
+ /// # }
+ /// # }
+ /// #
+ /// # impl ToTokens for Type {
+ /// # fn to_tokens(&self, _tokens: &mut TokenStream) {}
+ /// # }
+ /// #
+ /// # let ty = Type;
+ /// # let call_site = Span::call_site();
+ /// #
+ /// let ty_span = ty.span();
+ /// let assert_sync = quote_spanned! {ty_span=>
+ /// struct _AssertSync where #ty: Sync;
+ /// };
+ /// ```
+ ///
+ /// If the assertion fails, the user will see an error like the
following. The
+ /// input span of their type is highlighted in the error.
+ ///
+ /// ```text
+ /// error[E0277]: the trait bound `*const (): std::marker::Sync` is
not satisfied
+ /// --> src/main.rs:10:21
+ /// |
+ /// 10 | static ref PTR: *const () = &();
+ /// | ^^^^^^^^^ `*const ()` cannot be shared
between threads safely
+ /// ```
+ ///
+ /// In this example it is important for the where-clause to be spanned
with the
+ /// line/column information of the user's input type so that error
messages are
+ /// placed appropriately by the compiler.
+ $quote_spanned
+ };
+}
+
+#[cfg(doc)]
+__quote_spanned![
+ #[macro_export]
+ macro_rules! quote_spanned {
+ ($span:expr=> $($tt:tt)*) => {
+ ...
+ };
+ }
+];
+
+#[cfg(not(doc))]
+__quote_spanned![
+ #[macro_export]
+ macro_rules! quote_spanned {
+ ($span:expr=>) => {{
+ let _: $crate::__private::Span =
$crate::__private::get_span($span).__into_span();
+ $crate::__private::TokenStream::new()
+ }};
+
+ // Special case rule for a single tt, for performance.
+ ($span:expr=> $tt:tt) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ let _span: $crate::__private::Span =
$crate::__private::get_span($span).__into_span();
+ $crate::quote_token_spanned!{$tt _s _span}
+ _s
+ }};
+
+ // Special case rules for two tts, for performance.
+ ($span:expr=> # $var:ident) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ let _: $crate::__private::Span =
$crate::__private::get_span($span).__into_span();
+ $crate::ToTokens::to_tokens(&$var, &mut _s);
+ _s
+ }};
+ ($span:expr=> $tt1:tt $tt2:tt) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ let _span: $crate::__private::Span =
$crate::__private::get_span($span).__into_span();
+ $crate::quote_token_spanned!{$tt1 _s _span}
+ $crate::quote_token_spanned!{$tt2 _s _span}
+ _s
+ }};
+
+ // Rule for any other number of tokens.
+ ($span:expr=> $($tt:tt)*) => {{
+ let mut _s = $crate::__private::TokenStream::new();
+ let _span: $crate::__private::Span =
$crate::__private::get_span($span).__into_span();
+ $crate::quote_each_token_spanned!{_s _span $($tt)*}
+ _s
+ }};
+ }
+];
+
+// Extract the names of all #metavariables and pass them to the $call macro.
+//
+// in: pounded_var_names!(then!(...) a #b c #( #d )* #e)
+// out: then!(... b);
+// then!(... d);
+// then!(... e);
+#[macro_export]
+#[doc(hidden)]
+macro_rules! pounded_var_names {
+ ($call:ident! $extra:tt $($tts:tt)*) => {
+ $crate::pounded_var_names_with_context!{$call! $extra
+ (@ $($tts)*)
+ ($($tts)* @)
+ }
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! pounded_var_names_with_context {
+ ($call:ident! $extra:tt ($($b1:tt)*) ($($curr:tt)*)) => {
+ $(
+ $crate::pounded_var_with_context!{$call! $extra $b1 $curr}
+ )*
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! pounded_var_with_context {
+ ($call:ident! $extra:tt $b1:tt ( $($inner:tt)* )) => {
+ $crate::pounded_var_names!{$call! $extra $($inner)*}
+ };
+
+ ($call:ident! $extra:tt $b1:tt [ $($inner:tt)* ]) => {
+ $crate::pounded_var_names!{$call! $extra $($inner)*}
+ };
+
+ ($call:ident! $extra:tt $b1:tt { $($inner:tt)* }) => {
+ $crate::pounded_var_names!{$call! $extra $($inner)*}
+ };
+
+ ($call:ident!($($extra:tt)*) # $var:ident) => {
+ $crate::$call!($($extra)* $var);
+ };
+
+ ($call:ident! $extra:tt $b1:tt $curr:tt) => {};
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_bind_into_iter {
+ ($has_iter:ident $var:ident) => {
+ // `mut` may be unused if $var occurs multiple times in the list.
+ #[allow(unused_mut)]
+ let (mut $var, i) = $var.quote_into_iter();
+ let $has_iter = $has_iter | i;
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_bind_next_or_break {
+ ($var:ident) => {
+ let $var = match $var.next() {
+ Some(_x) => $crate::__private::RepInterp(_x),
+ None => break,
+ };
+ };
+}
+
+// The obvious way to write this macro is as a tt muncher. This implementation
+// does something more complex for two reasons.
+//
+// - With a tt muncher it's easy to hit Rust's built-in recursion_limit,
which
+// this implementation avoids because it isn't tail recursive.
+//
+// - Compile times for a tt muncher are quadratic relative to the length of
+// the input. This implementation is linear, so it will be faster
+// (potentially much faster) for big inputs. However, the constant factors
+// of this implementation are higher than that of a tt muncher, so it is
+// somewhat slower than a tt muncher if there are many invocations with
+// short inputs.
+//
+// An invocation like this:
+//
+// quote_each_token!(_s a b c d e f g h i j);
+//
+// expands to this:
+//
+// quote_tokens_with_context!(_s
+// (@ @ @ @ @ @ a b c d e f g h i j)
+// (@ @ @ @ @ a b c d e f g h i j @)
+// (@ @ @ @ a b c d e f g h i j @ @)
+// (@ @ @ (a) (b) (c) (d) (e) (f) (g) (h) (i) (j) @ @ @)
+// (@ @ a b c d e f g h i j @ @ @ @)
+// (@ a b c d e f g h i j @ @ @ @ @)
+// (a b c d e f g h i j @ @ @ @ @ @)
+// );
+//
+// which gets transposed and expanded to this:
+//
+// quote_token_with_context!(_s @ @ @ @ @ @ a);
+// quote_token_with_context!(_s @ @ @ @ @ a b);
+// quote_token_with_context!(_s @ @ @ @ a b c);
+// quote_token_with_context!(_s @ @ @ (a) b c d);
+// quote_token_with_context!(_s @ @ a (b) c d e);
+// quote_token_with_context!(_s @ a b (c) d e f);
+// quote_token_with_context!(_s a b c (d) e f g);
+// quote_token_with_context!(_s b c d (e) f g h);
+// quote_token_with_context!(_s c d e (f) g h i);
+// quote_token_with_context!(_s d e f (g) h i j);
+// quote_token_with_context!(_s e f g (h) i j @);
+// quote_token_with_context!(_s f g h (i) j @ @);
+// quote_token_with_context!(_s g h i (j) @ @ @);
+// quote_token_with_context!(_s h i j @ @ @ @);
+// quote_token_with_context!(_s i j @ @ @ @ @);
+// quote_token_with_context!(_s j @ @ @ @ @ @);
+//
+// Without having used muncher-style recursion, we get one invocation of
+// quote_token_with_context for each original tt, with three tts of context on
+// either side. This is enough for the longest possible interpolation form (a
+// repetition with separator, as in `# (#var) , *`) to be fully represented
with
+// the first or last tt in the middle.
+//
+// The middle tt (surrounded by parentheses) is the tt being processed.
+//
+// - When it is a `#`, quote_token_with_context can do an interpolation. The
+// interpolation kind will depend on the three subsequent tts.
+//
+// - When it is within a later part of an interpolation, it can be ignored
+// because the interpolation has already been done.
+//
+// - When it is not part of an interpolation it can be pushed as a single
+// token into the output.
+//
+// - When the middle token is an unparenthesized `@`, that call is one of the
+// first 3 or last 3 calls of quote_token_with_context and does not
+// correspond to one of the original input tokens, so turns into nothing.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_each_token {
+ ($tokens:ident $($tts:tt)*) => {
+ $crate::quote_tokens_with_context!{$tokens
+ (@ @ @ @ @ @ $($tts)*)
+ (@ @ @ @ @ $($tts)* @)
+ (@ @ @ @ $($tts)* @ @)
+ (@ @ @ $(($tts))* @ @ @)
+ (@ @ $($tts)* @ @ @ @)
+ (@ $($tts)* @ @ @ @ @)
+ ($($tts)* @ @ @ @ @ @)
+ }
+ };
+}
+
+// See the explanation on quote_each_token.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_each_token_spanned {
+ ($tokens:ident $span:ident $($tts:tt)*) => {
+ $crate::quote_tokens_with_context_spanned!{$tokens $span
+ (@ @ @ @ @ @ $($tts)*)
+ (@ @ @ @ @ $($tts)* @)
+ (@ @ @ @ $($tts)* @ @)
+ (@ @ @ $(($tts))* @ @ @)
+ (@ @ $($tts)* @ @ @ @)
+ (@ $($tts)* @ @ @ @ @)
+ ($($tts)* @ @ @ @ @ @)
+ }
+ };
+}
+
+// See the explanation on quote_each_token.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_tokens_with_context {
+ ($tokens:ident
+ ($($b3:tt)*) ($($b2:tt)*) ($($b1:tt)*)
+ ($($curr:tt)*)
+ ($($a1:tt)*) ($($a2:tt)*) ($($a3:tt)*)
+ ) => {
+ $(
+ $crate::quote_token_with_context!{$tokens $b3 $b2 $b1 $curr $a1
$a2 $a3}
+ )*
+ };
+}
+
+// See the explanation on quote_each_token.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_tokens_with_context_spanned {
+ ($tokens:ident $span:ident
+ ($($b3:tt)*) ($($b2:tt)*) ($($b1:tt)*)
+ ($($curr:tt)*)
+ ($($a1:tt)*) ($($a2:tt)*) ($($a3:tt)*)
+ ) => {
+ $(
+ $crate::quote_token_with_context_spanned!{$tokens $span $b3 $b2
$b1 $curr $a1 $a2 $a3}
+ )*
+ };
+}
+
+// See the explanation on quote_each_token.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_token_with_context {
+ // Unparenthesized `@` indicates this call does not correspond to one of
the
+ // original input tokens. Ignore it.
+ ($tokens:ident $b3:tt $b2:tt $b1:tt @ $a1:tt $a2:tt $a3:tt) => {};
+
+ // A repetition with no separator.
+ ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) * $a3:tt) => {{
+ use $crate::__private::ext::*;
+ let has_iter = $crate::__private::ThereIsNoIteratorInRepetition;
+ $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) ()
$($inner)*}
+ let _: $crate::__private::HasIterator = has_iter;
+ // This is `while true` instead of `loop` because if there are no
+ // iterators used inside of this repetition then the body would not
+ // contain any `break`, so the compiler would emit unreachable code
+ // warnings on anything below the loop. We use has_iter to detect and
+ // fail to compile when there are no iterators, so here we just work
+ // around the unneeded extra warning.
+ while true {
+ $crate::pounded_var_names!{quote_bind_next_or_break!() ()
$($inner)*}
+ $crate::quote_each_token!{$tokens $($inner)*}
+ }
+ }};
+ // ... and one step later.
+ ($tokens:ident $b3:tt $b2:tt # (( $($inner:tt)* )) * $a2:tt $a3:tt) => {};
+ // ... and one step later.
+ ($tokens:ident $b3:tt # ( $($inner:tt)* ) (*) $a1:tt $a2:tt $a3:tt) => {};
+
+ // A repetition with separator.
+ ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) $sep:tt *) => {{
+ use $crate::__private::ext::*;
+ let mut _i = 0usize;
+ let has_iter = $crate::__private::ThereIsNoIteratorInRepetition;
+ $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) ()
$($inner)*}
+ let _: $crate::__private::HasIterator = has_iter;
+ while true {
+ $crate::pounded_var_names!{quote_bind_next_or_break!() ()
$($inner)*}
+ if _i > 0 {
+ $crate::quote_token!{$sep $tokens}
+ }
+ _i += 1;
+ $crate::quote_each_token!{$tokens $($inner)*}
+ }
+ }};
+ // ... and one step later.
+ ($tokens:ident $b3:tt $b2:tt # (( $($inner:tt)* )) $sep:tt * $a3:tt) => {};
+ // ... and one step later.
+ ($tokens:ident $b3:tt # ( $($inner:tt)* ) ($sep:tt) * $a2:tt $a3:tt) => {};
+ // (A special case for `#(var)**`, where the first `*` is treated as the
+ // repetition symbol and the second `*` is treated as an ordinary token.)
+ ($tokens:ident # ( $($inner:tt)* ) * (*) $a1:tt $a2:tt $a3:tt) => {
+ // https://github.com/dtolnay/quote/issues/130
+ $crate::quote_token!{* $tokens}
+ };
+ // ... and one step later.
+ ($tokens:ident # ( $($inner:tt)* ) $sep:tt (*) $a1:tt $a2:tt $a3:tt) => {};
+
+ // A non-repetition interpolation.
+ ($tokens:ident $b3:tt $b2:tt $b1:tt (#) $var:ident $a2:tt $a3:tt) => {
+ $crate::ToTokens::to_tokens(&$var, &mut $tokens);
+ };
+ // ... and one step later.
+ ($tokens:ident $b3:tt $b2:tt # ($var:ident) $a1:tt $a2:tt $a3:tt) => {};
+
+ // An ordinary token, not part of any interpolation.
+ ($tokens:ident $b3:tt $b2:tt $b1:tt ($curr:tt) $a1:tt $a2:tt $a3:tt) => {
+ $crate::quote_token!{$curr $tokens}
+ };
+}
+
+// See the explanation on quote_each_token, and on the individual rules of
+// quote_token_with_context.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_token_with_context_spanned {
+ ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt @ $a1:tt $a2:tt $a3:tt) =>
{};
+
+ ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) *
$a3:tt) => {{
+ use $crate::__private::ext::*;
+ let has_iter = $crate::__private::ThereIsNoIteratorInRepetition;
+ $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) ()
$($inner)*}
+ let _: $crate::__private::HasIterator = has_iter;
+ while true {
+ $crate::pounded_var_names!{quote_bind_next_or_break!() ()
$($inner)*}
+ $crate::quote_each_token_spanned!{$tokens $span $($inner)*}
+ }
+ }};
+ ($tokens:ident $span:ident $b3:tt $b2:tt # (( $($inner:tt)* )) * $a2:tt
$a3:tt) => {};
+ ($tokens:ident $span:ident $b3:tt # ( $($inner:tt)* ) (*) $a1:tt $a2:tt
$a3:tt) => {};
+
+ ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* )
$sep:tt *) => {{
+ use $crate::__private::ext::*;
+ let mut _i = 0usize;
+ let has_iter = $crate::__private::ThereIsNoIteratorInRepetition;
+ $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) ()
$($inner)*}
+ let _: $crate::__private::HasIterator = has_iter;
+ while true {
+ $crate::pounded_var_names!{quote_bind_next_or_break!() ()
$($inner)*}
+ if _i > 0 {
+ $crate::quote_token_spanned!{$sep $tokens $span}
+ }
+ _i += 1;
+ $crate::quote_each_token_spanned!{$tokens $span $($inner)*}
+ }
+ }};
+ ($tokens:ident $span:ident $b3:tt $b2:tt # (( $($inner:tt)* )) $sep:tt *
$a3:tt) => {};
+ ($tokens:ident $span:ident $b3:tt # ( $($inner:tt)* ) ($sep:tt) * $a2:tt
$a3:tt) => {};
+ ($tokens:ident $span:ident # ( $($inner:tt)* ) * (*) $a1:tt $a2:tt $a3:tt)
=> {
+ // https://github.com/dtolnay/quote/issues/130
+ $crate::quote_token_spanned!{* $tokens $span}
+ };
+ ($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt (*) $a1:tt $a2:tt
$a3:tt) => {};
+
+ ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) $var:ident $a2:tt
$a3:tt) => {
+ $crate::ToTokens::to_tokens(&$var, &mut $tokens);
+ };
+ ($tokens:ident $span:ident $b3:tt $b2:tt # ($var:ident) $a1:tt $a2:tt
$a3:tt) => {};
+
+ ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt ($curr:tt) $a1:tt $a2:tt
$a3:tt) => {
+ $crate::quote_token_spanned!{$curr $tokens $span}
+ };
+}
+
+// These rules are ordered by approximate token frequency, at least for the
+// first 10 or so, to improve compile times. Having `ident` first is by far the
+// most important because it's typically 2-3x more common than the next most
+// common token.
+//
+// Separately, we put the token being matched in the very front so that failing
+// rules may fail to match as quickly as possible.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_token {
+ ($ident:ident $tokens:ident) => {
+ $crate::__private::push_ident(&mut $tokens, stringify!($ident));
+ };
+
+ (:: $tokens:ident) => {
+ $crate::__private::push_colon2(&mut $tokens);
+ };
+
+ (( $($inner:tt)* ) $tokens:ident) => {
+ $crate::__private::push_group(
+ &mut $tokens,
+ $crate::__private::Delimiter::Parenthesis,
+ $crate::quote!($($inner)*),
+ );
+ };
+
+ ([ $($inner:tt)* ] $tokens:ident) => {
+ $crate::__private::push_group(
+ &mut $tokens,
+ $crate::__private::Delimiter::Bracket,
+ $crate::quote!($($inner)*),
+ );
+ };
+
+ ({ $($inner:tt)* } $tokens:ident) => {
+ $crate::__private::push_group(
+ &mut $tokens,
+ $crate::__private::Delimiter::Brace,
+ $crate::quote!($($inner)*),
+ );
+ };
+
+ (# $tokens:ident) => {
+ $crate::__private::push_pound(&mut $tokens);
+ };
+
+ (, $tokens:ident) => {
+ $crate::__private::push_comma(&mut $tokens);
+ };
+
+ (. $tokens:ident) => {
+ $crate::__private::push_dot(&mut $tokens);
+ };
+
+ (; $tokens:ident) => {
+ $crate::__private::push_semi(&mut $tokens);
+ };
+
+ (: $tokens:ident) => {
+ $crate::__private::push_colon(&mut $tokens);
+ };
+
+ (+ $tokens:ident) => {
+ $crate::__private::push_add(&mut $tokens);
+ };
+
+ (+= $tokens:ident) => {
+ $crate::__private::push_add_eq(&mut $tokens);
+ };
+
+ (& $tokens:ident) => {
+ $crate::__private::push_and(&mut $tokens);
+ };
+
+ (&& $tokens:ident) => {
+ $crate::__private::push_and_and(&mut $tokens);
+ };
+
+ (&= $tokens:ident) => {
+ $crate::__private::push_and_eq(&mut $tokens);
+ };
+
+ (@ $tokens:ident) => {
+ $crate::__private::push_at(&mut $tokens);
+ };
+
+ (! $tokens:ident) => {
+ $crate::__private::push_bang(&mut $tokens);
+ };
+
+ (^ $tokens:ident) => {
+ $crate::__private::push_caret(&mut $tokens);
+ };
+
+ (^= $tokens:ident) => {
+ $crate::__private::push_caret_eq(&mut $tokens);
+ };
+
+ (/ $tokens:ident) => {
+ $crate::__private::push_div(&mut $tokens);
+ };
+
+ (/= $tokens:ident) => {
+ $crate::__private::push_div_eq(&mut $tokens);
+ };
+
+ (.. $tokens:ident) => {
+ $crate::__private::push_dot2(&mut $tokens);
+ };
+
+ (... $tokens:ident) => {
+ $crate::__private::push_dot3(&mut $tokens);
+ };
+
+ (..= $tokens:ident) => {
+ $crate::__private::push_dot_dot_eq(&mut $tokens);
+ };
+
+ (= $tokens:ident) => {
+ $crate::__private::push_eq(&mut $tokens);
+ };
+
+ (== $tokens:ident) => {
+ $crate::__private::push_eq_eq(&mut $tokens);
+ };
+
+ (>= $tokens:ident) => {
+ $crate::__private::push_ge(&mut $tokens);
+ };
+
+ (> $tokens:ident) => {
+ $crate::__private::push_gt(&mut $tokens);
+ };
+
+ (<= $tokens:ident) => {
+ $crate::__private::push_le(&mut $tokens);
+ };
+
+ (< $tokens:ident) => {
+ $crate::__private::push_lt(&mut $tokens);
+ };
+
+ (*= $tokens:ident) => {
+ $crate::__private::push_mul_eq(&mut $tokens);
+ };
+
+ (!= $tokens:ident) => {
+ $crate::__private::push_ne(&mut $tokens);
+ };
+
+ (| $tokens:ident) => {
+ $crate::__private::push_or(&mut $tokens);
+ };
+
+ (|= $tokens:ident) => {
+ $crate::__private::push_or_eq(&mut $tokens);
+ };
+
+ (|| $tokens:ident) => {
+ $crate::__private::push_or_or(&mut $tokens);
+ };
+
+ (? $tokens:ident) => {
+ $crate::__private::push_question(&mut $tokens);
+ };
+
+ (-> $tokens:ident) => {
+ $crate::__private::push_rarrow(&mut $tokens);
+ };
+
+ (<- $tokens:ident) => {
+ $crate::__private::push_larrow(&mut $tokens);
+ };
+
+ (% $tokens:ident) => {
+ $crate::__private::push_rem(&mut $tokens);
+ };
+
+ (%= $tokens:ident) => {
+ $crate::__private::push_rem_eq(&mut $tokens);
+ };
+
+ (=> $tokens:ident) => {
+ $crate::__private::push_fat_arrow(&mut $tokens);
+ };
+
+ (<< $tokens:ident) => {
+ $crate::__private::push_shl(&mut $tokens);
+ };
+
+ (<<= $tokens:ident) => {
+ $crate::__private::push_shl_eq(&mut $tokens);
+ };
+
+ (>> $tokens:ident) => {
+ $crate::__private::push_shr(&mut $tokens);
+ };
+
+ (>>= $tokens:ident) => {
+ $crate::__private::push_shr_eq(&mut $tokens);
+ };
+
+ (* $tokens:ident) => {
+ $crate::__private::push_star(&mut $tokens);
+ };
+
+ (- $tokens:ident) => {
+ $crate::__private::push_sub(&mut $tokens);
+ };
+
+ (-= $tokens:ident) => {
+ $crate::__private::push_sub_eq(&mut $tokens);
+ };
+
+ ($lifetime:lifetime $tokens:ident) => {
+ $crate::__private::push_lifetime(&mut $tokens, stringify!($lifetime));
+ };
+
+ (_ $tokens:ident) => {
+ $crate::__private::push_underscore(&mut $tokens);
+ };
+
+ ($other:tt $tokens:ident) => {
+ $crate::__private::parse(&mut $tokens, stringify!($other));
+ };
+}
+
+// See the comment above `quote_token!` about the rule ordering.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_token_spanned {
+ ($ident:ident $tokens:ident $span:ident) => {
+ $crate::__private::push_ident_spanned(&mut $tokens, $span,
stringify!($ident));
+ };
+
+ (:: $tokens:ident $span:ident) => {
+ $crate::__private::push_colon2_spanned(&mut $tokens, $span);
+ };
+
+ (( $($inner:tt)* ) $tokens:ident $span:ident) => {
+ $crate::__private::push_group_spanned(
+ &mut $tokens,
+ $span,
+ $crate::__private::Delimiter::Parenthesis,
+ $crate::quote_spanned!($span=> $($inner)*),
+ );
+ };
+
+ ([ $($inner:tt)* ] $tokens:ident $span:ident) => {
+ $crate::__private::push_group_spanned(
+ &mut $tokens,
+ $span,
+ $crate::__private::Delimiter::Bracket,
+ $crate::quote_spanned!($span=> $($inner)*),
+ );
+ };
+
+ ({ $($inner:tt)* } $tokens:ident $span:ident) => {
+ $crate::__private::push_group_spanned(
+ &mut $tokens,
+ $span,
+ $crate::__private::Delimiter::Brace,
+ $crate::quote_spanned!($span=> $($inner)*),
+ );
+ };
+
+ (# $tokens:ident $span:ident) => {
+ $crate::__private::push_pound_spanned(&mut $tokens, $span);
+ };
+
+ (, $tokens:ident $span:ident) => {
+ $crate::__private::push_comma_spanned(&mut $tokens, $span);
+ };
+
+ (. $tokens:ident $span:ident) => {
+ $crate::__private::push_dot_spanned(&mut $tokens, $span);
+ };
+
+ (; $tokens:ident $span:ident) => {
+ $crate::__private::push_semi_spanned(&mut $tokens, $span);
+ };
+
+ (: $tokens:ident $span:ident) => {
+ $crate::__private::push_colon_spanned(&mut $tokens, $span);
+ };
+
+ (+ $tokens:ident $span:ident) => {
+ $crate::__private::push_add_spanned(&mut $tokens, $span);
+ };
+
+ (+= $tokens:ident $span:ident) => {
+ $crate::__private::push_add_eq_spanned(&mut $tokens, $span);
+ };
+
+ (& $tokens:ident $span:ident) => {
+ $crate::__private::push_and_spanned(&mut $tokens, $span);
+ };
+
+ (&& $tokens:ident $span:ident) => {
+ $crate::__private::push_and_and_spanned(&mut $tokens, $span);
+ };
+
+ (&= $tokens:ident $span:ident) => {
+ $crate::__private::push_and_eq_spanned(&mut $tokens, $span);
+ };
+
+ (@ $tokens:ident $span:ident) => {
+ $crate::__private::push_at_spanned(&mut $tokens, $span);
+ };
+
+ (! $tokens:ident $span:ident) => {
+ $crate::__private::push_bang_spanned(&mut $tokens, $span);
+ };
+
+ (^ $tokens:ident $span:ident) => {
+ $crate::__private::push_caret_spanned(&mut $tokens, $span);
+ };
+
+ (^= $tokens:ident $span:ident) => {
+ $crate::__private::push_caret_eq_spanned(&mut $tokens, $span);
+ };
+
+ (/ $tokens:ident $span:ident) => {
+ $crate::__private::push_div_spanned(&mut $tokens, $span);
+ };
+
+ (/= $tokens:ident $span:ident) => {
+ $crate::__private::push_div_eq_spanned(&mut $tokens, $span);
+ };
+
+ (.. $tokens:ident $span:ident) => {
+ $crate::__private::push_dot2_spanned(&mut $tokens, $span);
+ };
+
+ (... $tokens:ident $span:ident) => {
+ $crate::__private::push_dot3_spanned(&mut $tokens, $span);
+ };
+
+ (..= $tokens:ident $span:ident) => {
+ $crate::__private::push_dot_dot_eq_spanned(&mut $tokens, $span);
+ };
+
+ (= $tokens:ident $span:ident) => {
+ $crate::__private::push_eq_spanned(&mut $tokens, $span);
+ };
+
+ (== $tokens:ident $span:ident) => {
+ $crate::__private::push_eq_eq_spanned(&mut $tokens, $span);
+ };
+
+ (>= $tokens:ident $span:ident) => {
+ $crate::__private::push_ge_spanned(&mut $tokens, $span);
+ };
+
+ (> $tokens:ident $span:ident) => {
+ $crate::__private::push_gt_spanned(&mut $tokens, $span);
+ };
+
+ (<= $tokens:ident $span:ident) => {
+ $crate::__private::push_le_spanned(&mut $tokens, $span);
+ };
+
+ (< $tokens:ident $span:ident) => {
+ $crate::__private::push_lt_spanned(&mut $tokens, $span);
+ };
+
+ (*= $tokens:ident $span:ident) => {
+ $crate::__private::push_mul_eq_spanned(&mut $tokens, $span);
+ };
+
+ (!= $tokens:ident $span:ident) => {
+ $crate::__private::push_ne_spanned(&mut $tokens, $span);
+ };
+
+ (| $tokens:ident $span:ident) => {
+ $crate::__private::push_or_spanned(&mut $tokens, $span);
+ };
+
+ (|= $tokens:ident $span:ident) => {
+ $crate::__private::push_or_eq_spanned(&mut $tokens, $span);
+ };
+
+ (|| $tokens:ident $span:ident) => {
+ $crate::__private::push_or_or_spanned(&mut $tokens, $span);
+ };
+
+ (? $tokens:ident $span:ident) => {
+ $crate::__private::push_question_spanned(&mut $tokens, $span);
+ };
+
+ (-> $tokens:ident $span:ident) => {
+ $crate::__private::push_rarrow_spanned(&mut $tokens, $span);
+ };
+
+ (<- $tokens:ident $span:ident) => {
+ $crate::__private::push_larrow_spanned(&mut $tokens, $span);
+ };
+
+ (% $tokens:ident $span:ident) => {
+ $crate::__private::push_rem_spanned(&mut $tokens, $span);
+ };
+
+ (%= $tokens:ident $span:ident) => {
+ $crate::__private::push_rem_eq_spanned(&mut $tokens, $span);
+ };
+
+ (=> $tokens:ident $span:ident) => {
+ $crate::__private::push_fat_arrow_spanned(&mut $tokens, $span);
+ };
+
+ (<< $tokens:ident $span:ident) => {
+ $crate::__private::push_shl_spanned(&mut $tokens, $span);
+ };
+
+ (<<= $tokens:ident $span:ident) => {
+ $crate::__private::push_shl_eq_spanned(&mut $tokens, $span);
+ };
+
+ (>> $tokens:ident $span:ident) => {
+ $crate::__private::push_shr_spanned(&mut $tokens, $span);
+ };
+
+ (>>= $tokens:ident $span:ident) => {
+ $crate::__private::push_shr_eq_spanned(&mut $tokens, $span);
+ };
+
+ (* $tokens:ident $span:ident) => {
+ $crate::__private::push_star_spanned(&mut $tokens, $span);
+ };
+
+ (- $tokens:ident $span:ident) => {
+ $crate::__private::push_sub_spanned(&mut $tokens, $span);
+ };
+
+ (-= $tokens:ident $span:ident) => {
+ $crate::__private::push_sub_eq_spanned(&mut $tokens, $span);
+ };
+
+ ($lifetime:lifetime $tokens:ident $span:ident) => {
+ $crate::__private::push_lifetime_spanned(&mut $tokens, $span,
stringify!($lifetime));
+ };
+
+ (_ $tokens:ident $span:ident) => {
+ $crate::__private::push_underscore_spanned(&mut $tokens, $span);
+ };
+
+ ($other:tt $tokens:ident $span:ident) => {
+ $crate::__private::parse_spanned(&mut $tokens, $span,
stringify!($other));
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/quote/src/runtime.rs
b/rust/hw/char/pl011/vendor/quote/src/runtime.rs
new file mode 100644
index 0000000000..eff044a957
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/runtime.rs
@@ -0,0 +1,530 @@
+use self::get_span::{GetSpan, GetSpanBase, GetSpanInner};
+use crate::{IdentFragment, ToTokens, TokenStreamExt};
+use core::fmt;
+use core::iter;
+use core::ops::BitOr;
+use proc_macro2::{Group, Ident, Punct, Spacing, TokenTree};
+
+#[doc(hidden)]
+pub use alloc::format;
+#[doc(hidden)]
+pub use core::option::Option;
+
+#[doc(hidden)]
+pub type Delimiter = proc_macro2::Delimiter;
+#[doc(hidden)]
+pub type Span = proc_macro2::Span;
+#[doc(hidden)]
+pub type TokenStream = proc_macro2::TokenStream;
+
+#[doc(hidden)]
+pub struct HasIterator; // True
+#[doc(hidden)]
+pub struct ThereIsNoIteratorInRepetition; // False
+
+impl BitOr<ThereIsNoIteratorInRepetition> for ThereIsNoIteratorInRepetition {
+ type Output = ThereIsNoIteratorInRepetition;
+ fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) ->
ThereIsNoIteratorInRepetition {
+ ThereIsNoIteratorInRepetition
+ }
+}
+
+impl BitOr<ThereIsNoIteratorInRepetition> for HasIterator {
+ type Output = HasIterator;
+ fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator {
+ HasIterator
+ }
+}
+
+impl BitOr<HasIterator> for ThereIsNoIteratorInRepetition {
+ type Output = HasIterator;
+ fn bitor(self, _rhs: HasIterator) -> HasIterator {
+ HasIterator
+ }
+}
+
+impl BitOr<HasIterator> for HasIterator {
+ type Output = HasIterator;
+ fn bitor(self, _rhs: HasIterator) -> HasIterator {
+ HasIterator
+ }
+}
+
+/// Extension traits used by the implementation of `quote!`. These are defined
+/// in separate traits, rather than as a single trait due to ambiguity issues.
+///
+/// These traits expose a `quote_into_iter` method which should allow calling
+/// whichever impl happens to be applicable. Calling that method repeatedly on
+/// the returned value should be idempotent.
+#[doc(hidden)]
+pub mod ext {
+ use super::RepInterp;
+ use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as
DoesNotHaveIter};
+ use crate::ToTokens;
+ use alloc::collections::btree_set::{self, BTreeSet};
+ use core::slice;
+
+ /// Extension trait providing the `quote_into_iter` method on iterators.
+ #[doc(hidden)]
+ pub trait RepIteratorExt: Iterator + Sized {
+ fn quote_into_iter(self) -> (Self, HasIter) {
+ (self, HasIter)
+ }
+ }
+
+ impl<T: Iterator> RepIteratorExt for T {}
+
+ /// Extension trait providing the `quote_into_iter` method for
+ /// non-iterable types. These types interpolate the same value in each
+ /// iteration of the repetition.
+ #[doc(hidden)]
+ pub trait RepToTokensExt {
+ /// Pretend to be an iterator for the purposes of `quote_into_iter`.
+ /// This allows repeated calls to `quote_into_iter` to continue
+ /// correctly returning DoesNotHaveIter.
+ fn next(&self) -> Option<&Self> {
+ Some(self)
+ }
+
+ fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) {
+ (self, DoesNotHaveIter)
+ }
+ }
+
+ impl<T: ToTokens + ?Sized> RepToTokensExt for T {}
+
+ /// Extension trait providing the `quote_into_iter` method for types that
+ /// can be referenced as an iterator.
+ #[doc(hidden)]
+ pub trait RepAsIteratorExt<'q> {
+ type Iter: Iterator;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter);
+ }
+
+ impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for
&'a T {
+ type Iter = T::Iter;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ <T as RepAsIteratorExt>::quote_into_iter(*self)
+ }
+ }
+
+ impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for
&'a mut T {
+ type Iter = T::Iter;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ <T as RepAsIteratorExt>::quote_into_iter(*self)
+ }
+ }
+
+ impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] {
+ type Iter = slice::Iter<'q, T>;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ (self.iter(), HasIter)
+ }
+ }
+
+ impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec<T> {
+ type Iter = slice::Iter<'q, T>;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ (self.iter(), HasIter)
+ }
+ }
+
+ impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet<T> {
+ type Iter = btree_set::Iter<'q, T>;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ (self.iter(), HasIter)
+ }
+ }
+
+ impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp<T> {
+ type Iter = T::Iter;
+
+ fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
+ self.0.quote_into_iter()
+ }
+ }
+}
+
+// Helper type used within interpolations to allow for repeated binding names.
+// Implements the relevant traits, and exports a dummy `next()` method.
+#[derive(Copy, Clone)]
+#[doc(hidden)]
+pub struct RepInterp<T>(pub T);
+
+impl<T> RepInterp<T> {
+ // This method is intended to look like `Iterator::next`, and is called
when
+ // a name is bound multiple times, as the previous binding will shadow the
+ // original `Iterator` object. This allows us to avoid advancing the
+ // iterator multiple times per iteration.
+ pub fn next(self) -> Option<T> {
+ Some(self.0)
+ }
+}
+
+impl<T: Iterator> Iterator for RepInterp<T> {
+ type Item = T::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl<T: ToTokens> ToTokens for RepInterp<T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.0.to_tokens(tokens);
+ }
+}
+
+#[doc(hidden)]
+#[inline]
+pub fn get_span<T>(span: T) -> GetSpan<T> {
+ GetSpan(GetSpanInner(GetSpanBase(span)))
+}
+
+mod get_span {
+ use core::ops::Deref;
+ use proc_macro2::extra::DelimSpan;
+ use proc_macro2::Span;
+
+ pub struct GetSpan<T>(pub(crate) GetSpanInner<T>);
+
+ pub struct GetSpanInner<T>(pub(crate) GetSpanBase<T>);
+
+ pub struct GetSpanBase<T>(pub(crate) T);
+
+ impl GetSpan<Span> {
+ #[inline]
+ pub fn __into_span(self) -> Span {
+ ((self.0).0).0
+ }
+ }
+
+ impl GetSpanInner<DelimSpan> {
+ #[inline]
+ pub fn __into_span(&self) -> Span {
+ (self.0).0.join()
+ }
+ }
+
+ impl<T> GetSpanBase<T> {
+ #[allow(clippy::unused_self)]
+ pub fn __into_span(&self) -> T {
+ unreachable!()
+ }
+ }
+
+ impl<T> Deref for GetSpan<T> {
+ type Target = GetSpanInner<T>;
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+
+ impl<T> Deref for GetSpanInner<T> {
+ type Target = GetSpanBase<T>;
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+}
+
+#[doc(hidden)]
+pub fn push_group(tokens: &mut TokenStream, delimiter: Delimiter, inner:
TokenStream) {
+ tokens.append(Group::new(delimiter, inner));
+}
+
+#[doc(hidden)]
+pub fn push_group_spanned(
+ tokens: &mut TokenStream,
+ span: Span,
+ delimiter: Delimiter,
+ inner: TokenStream,
+) {
+ let mut g = Group::new(delimiter, inner);
+ g.set_span(span);
+ tokens.append(g);
+}
+
+#[doc(hidden)]
+pub fn parse(tokens: &mut TokenStream, s: &str) {
+ let s: TokenStream = s.parse().expect("invalid token stream");
+ tokens.extend(iter::once(s));
+}
+
+#[doc(hidden)]
+pub fn parse_spanned(tokens: &mut TokenStream, span: Span, s: &str) {
+ let s: TokenStream = s.parse().expect("invalid token stream");
+ tokens.extend(s.into_iter().map(|t| respan_token_tree(t, span)));
+}
+
+// Token tree with every span replaced by the given one.
+fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+ let stream = g
+ .stream()
+ .into_iter()
+ .map(|token| respan_token_tree(token, span))
+ .collect();
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+ other => other.set_span(span),
+ }
+ token
+}
+
+#[doc(hidden)]
+pub fn push_ident(tokens: &mut TokenStream, s: &str) {
+ let span = Span::call_site();
+ push_ident_spanned(tokens, span, s);
+}
+
+#[doc(hidden)]
+pub fn push_ident_spanned(tokens: &mut TokenStream, span: Span, s: &str) {
+ tokens.append(ident_maybe_raw(s, span));
+}
+
+#[doc(hidden)]
+pub fn push_lifetime(tokens: &mut TokenStream, lifetime: &str) {
+ struct Lifetime<'a> {
+ name: &'a str,
+ state: u8,
+ }
+
+ impl<'a> Iterator for Lifetime<'a> {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.state {
+ 0 => {
+ self.state = 1;
+ Some(TokenTree::Punct(Punct::new('\'', Spacing::Joint)))
+ }
+ 1 => {
+ self.state = 2;
+ Some(TokenTree::Ident(Ident::new(self.name,
Span::call_site())))
+ }
+ _ => None,
+ }
+ }
+ }
+
+ tokens.extend(Lifetime {
+ name: &lifetime[1..],
+ state: 0,
+ });
+}
+
+#[doc(hidden)]
+pub fn push_lifetime_spanned(tokens: &mut TokenStream, span: Span, lifetime:
&str) {
+ struct Lifetime<'a> {
+ name: &'a str,
+ span: Span,
+ state: u8,
+ }
+
+ impl<'a> Iterator for Lifetime<'a> {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.state {
+ 0 => {
+ self.state = 1;
+ let mut apostrophe = Punct::new('\'', Spacing::Joint);
+ apostrophe.set_span(self.span);
+ Some(TokenTree::Punct(apostrophe))
+ }
+ 1 => {
+ self.state = 2;
+ Some(TokenTree::Ident(Ident::new(self.name, self.span)))
+ }
+ _ => None,
+ }
+ }
+ }
+
+ tokens.extend(Lifetime {
+ name: &lifetime[1..],
+ span,
+ state: 0,
+ });
+}
+
+macro_rules! push_punct {
+ ($name:ident $spanned:ident $char1:tt) => {
+ #[doc(hidden)]
+ pub fn $name(tokens: &mut TokenStream) {
+ tokens.append(Punct::new($char1, Spacing::Alone));
+ }
+ #[doc(hidden)]
+ pub fn $spanned(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
+ ($name:ident $spanned:ident $char1:tt $char2:tt) => {
+ #[doc(hidden)]
+ pub fn $name(tokens: &mut TokenStream) {
+ tokens.append(Punct::new($char1, Spacing::Joint));
+ tokens.append(Punct::new($char2, Spacing::Alone));
+ }
+ #[doc(hidden)]
+ pub fn $spanned(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char2, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
+ ($name:ident $spanned:ident $char1:tt $char2:tt $char3:tt) => {
+ #[doc(hidden)]
+ pub fn $name(tokens: &mut TokenStream) {
+ tokens.append(Punct::new($char1, Spacing::Joint));
+ tokens.append(Punct::new($char2, Spacing::Joint));
+ tokens.append(Punct::new($char3, Spacing::Alone));
+ }
+ #[doc(hidden)]
+ pub fn $spanned(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char2, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char3, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
+}
+
+push_punct!(push_add push_add_spanned '+');
+push_punct!(push_add_eq push_add_eq_spanned '+' '=');
+push_punct!(push_and push_and_spanned '&');
+push_punct!(push_and_and push_and_and_spanned '&' '&');
+push_punct!(push_and_eq push_and_eq_spanned '&' '=');
+push_punct!(push_at push_at_spanned '@');
+push_punct!(push_bang push_bang_spanned '!');
+push_punct!(push_caret push_caret_spanned '^');
+push_punct!(push_caret_eq push_caret_eq_spanned '^' '=');
+push_punct!(push_colon push_colon_spanned ':');
+push_punct!(push_colon2 push_colon2_spanned ':' ':');
+push_punct!(push_comma push_comma_spanned ',');
+push_punct!(push_div push_div_spanned '/');
+push_punct!(push_div_eq push_div_eq_spanned '/' '=');
+push_punct!(push_dot push_dot_spanned '.');
+push_punct!(push_dot2 push_dot2_spanned '.' '.');
+push_punct!(push_dot3 push_dot3_spanned '.' '.' '.');
+push_punct!(push_dot_dot_eq push_dot_dot_eq_spanned '.' '.' '=');
+push_punct!(push_eq push_eq_spanned '=');
+push_punct!(push_eq_eq push_eq_eq_spanned '=' '=');
+push_punct!(push_ge push_ge_spanned '>' '=');
+push_punct!(push_gt push_gt_spanned '>');
+push_punct!(push_le push_le_spanned '<' '=');
+push_punct!(push_lt push_lt_spanned '<');
+push_punct!(push_mul_eq push_mul_eq_spanned '*' '=');
+push_punct!(push_ne push_ne_spanned '!' '=');
+push_punct!(push_or push_or_spanned '|');
+push_punct!(push_or_eq push_or_eq_spanned '|' '=');
+push_punct!(push_or_or push_or_or_spanned '|' '|');
+push_punct!(push_pound push_pound_spanned '#');
+push_punct!(push_question push_question_spanned '?');
+push_punct!(push_rarrow push_rarrow_spanned '-' '>');
+push_punct!(push_larrow push_larrow_spanned '<' '-');
+push_punct!(push_rem push_rem_spanned '%');
+push_punct!(push_rem_eq push_rem_eq_spanned '%' '=');
+push_punct!(push_fat_arrow push_fat_arrow_spanned '=' '>');
+push_punct!(push_semi push_semi_spanned ';');
+push_punct!(push_shl push_shl_spanned '<' '<');
+push_punct!(push_shl_eq push_shl_eq_spanned '<' '<' '=');
+push_punct!(push_shr push_shr_spanned '>' '>');
+push_punct!(push_shr_eq push_shr_eq_spanned '>' '>' '=');
+push_punct!(push_star push_star_spanned '*');
+push_punct!(push_sub push_sub_spanned '-');
+push_punct!(push_sub_eq push_sub_eq_spanned '-' '=');
+
+#[doc(hidden)]
+pub fn push_underscore(tokens: &mut TokenStream) {
+ push_underscore_spanned(tokens, Span::call_site());
+}
+
+#[doc(hidden)]
+pub fn push_underscore_spanned(tokens: &mut TokenStream, span: Span) {
+ tokens.append(Ident::new("_", span));
+}
+
+// Helper method for constructing identifiers from the `format_ident!` macro,
+// handling `r#` prefixes.
+#[doc(hidden)]
+pub fn mk_ident(id: &str, span: Option<Span>) -> Ident {
+ let span = span.unwrap_or_else(Span::call_site);
+ ident_maybe_raw(id, span)
+}
+
+fn ident_maybe_raw(id: &str, span: Span) -> Ident {
+ if let Some(id) = id.strip_prefix("r#") {
+ Ident::new_raw(id, span)
+ } else {
+ Ident::new(id, span)
+ }
+}
+
+// Adapts from `IdentFragment` to `fmt::Display` for use by the `format_ident!`
+// macro, and exposes span information from these fragments.
+//
+// This struct also has forwarding implementations of the formatting traits
+// `Octal`, `LowerHex`, `UpperHex`, and `Binary` to allow for their use within
+// `format_ident!`.
+#[derive(Copy, Clone)]
+#[doc(hidden)]
+pub struct IdentFragmentAdapter<T: IdentFragment>(pub T);
+
+impl<T: IdentFragment> IdentFragmentAdapter<T> {
+ pub fn span(&self) -> Option<Span> {
+ self.0.span()
+ }
+}
+
+impl<T: IdentFragment> fmt::Display for IdentFragmentAdapter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ IdentFragment::fmt(&self.0, f)
+ }
+}
+
+impl<T: IdentFragment + fmt::Octal> fmt::Octal for IdentFragmentAdapter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Octal::fmt(&self.0, f)
+ }
+}
+
+impl<T: IdentFragment + fmt::LowerHex> fmt::LowerHex for
IdentFragmentAdapter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::LowerHex::fmt(&self.0, f)
+ }
+}
+
+impl<T: IdentFragment + fmt::UpperHex> fmt::UpperHex for
IdentFragmentAdapter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::UpperHex::fmt(&self.0, f)
+ }
+}
+
+impl<T: IdentFragment + fmt::Binary> fmt::Binary for IdentFragmentAdapter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Binary::fmt(&self.0, f)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/quote/src/spanned.rs
b/rust/hw/char/pl011/vendor/quote/src/spanned.rs
new file mode 100644
index 0000000000..6eba64445d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/spanned.rs
@@ -0,0 +1,50 @@
+use crate::ToTokens;
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::{Span, TokenStream};
+
+// Not public API other than via the syn crate. Use syn::spanned::Spanned.
+pub trait Spanned: private::Sealed {
+ fn __span(&self) -> Span;
+}
+
+impl Spanned for Span {
+ fn __span(&self) -> Span {
+ *self
+ }
+}
+
+impl Spanned for DelimSpan {
+ fn __span(&self) -> Span {
+ self.join()
+ }
+}
+
+impl<T: ?Sized + ToTokens> Spanned for T {
+ fn __span(&self) -> Span {
+ join_spans(self.into_token_stream())
+ }
+}
+
+fn join_spans(tokens: TokenStream) -> Span {
+ let mut iter = tokens.into_iter().map(|tt| tt.span());
+
+ let first = match iter.next() {
+ Some(span) => span,
+ None => return Span::call_site(),
+ };
+
+ iter.fold(None, |_prev, next| Some(next))
+ .and_then(|last| first.join(last))
+ .unwrap_or(first)
+}
+
+mod private {
+ use crate::ToTokens;
+ use proc_macro2::extra::DelimSpan;
+ use proc_macro2::Span;
+
+ pub trait Sealed {}
+ impl Sealed for Span {}
+ impl Sealed for DelimSpan {}
+ impl<T: ?Sized + ToTokens> Sealed for T {}
+}
diff --git a/rust/hw/char/pl011/vendor/quote/src/to_tokens.rs
b/rust/hw/char/pl011/vendor/quote/src/to_tokens.rs
new file mode 100644
index 0000000000..23b6ec2c08
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/src/to_tokens.rs
@@ -0,0 +1,209 @@
+use super::TokenStreamExt;
+use alloc::borrow::Cow;
+use alloc::rc::Rc;
+use core::iter;
+use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
+
+/// Types that can be interpolated inside a `quote!` invocation.
+///
+/// [`quote!`]: macro.quote.html
+pub trait ToTokens {
+ /// Write `self` to the given `TokenStream`.
+ ///
+ /// The token append methods provided by the [`TokenStreamExt`] extension
+ /// trait may be useful for implementing `ToTokens`.
+ ///
+ /// [`TokenStreamExt`]: trait.TokenStreamExt.html
+ ///
+ /// # Example
+ ///
+ /// Example implementation for a struct representing Rust paths like
+ /// `std::cmp::PartialEq`:
+ ///
+ /// ```
+ /// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
+ /// use quote::{TokenStreamExt, ToTokens};
+ ///
+ /// pub struct Path {
+ /// pub global: bool,
+ /// pub segments: Vec<PathSegment>,
+ /// }
+ ///
+ /// impl ToTokens for Path {
+ /// fn to_tokens(&self, tokens: &mut TokenStream) {
+ /// for (i, segment) in self.segments.iter().enumerate() {
+ /// if i > 0 || self.global {
+ /// // Double colon `::`
+ /// tokens.append(Punct::new(':', Spacing::Joint));
+ /// tokens.append(Punct::new(':', Spacing::Alone));
+ /// }
+ /// segment.to_tokens(tokens);
+ /// }
+ /// }
+ /// }
+ /// #
+ /// # pub struct PathSegment;
+ /// #
+ /// # impl ToTokens for PathSegment {
+ /// # fn to_tokens(&self, tokens: &mut TokenStream) {
+ /// # unimplemented!()
+ /// # }
+ /// # }
+ /// ```
+ fn to_tokens(&self, tokens: &mut TokenStream);
+
+ /// Convert `self` directly into a `TokenStream` object.
+ ///
+ /// This method is implicitly implemented using `to_tokens`, and acts as a
+ /// convenience method for consumers of the `ToTokens` trait.
+ fn to_token_stream(&self) -> TokenStream {
+ let mut tokens = TokenStream::new();
+ self.to_tokens(&mut tokens);
+ tokens
+ }
+
+ /// Convert `self` directly into a `TokenStream` object.
+ ///
+ /// This method is implicitly implemented using `to_tokens`, and acts as a
+ /// convenience method for consumers of the `ToTokens` trait.
+ fn into_token_stream(self) -> TokenStream
+ where
+ Self: Sized,
+ {
+ self.to_token_stream()
+ }
+}
+
+impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
+impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
+impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
+impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
+impl<T: ?Sized + ToTokens> ToTokens for Rc<T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
+impl<T: ToTokens> ToTokens for Option<T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if let Some(ref t) = *self {
+ t.to_tokens(tokens);
+ }
+ }
+}
+
+impl ToTokens for str {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(Literal::string(self));
+ }
+}
+
+impl ToTokens for String {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.as_str().to_tokens(tokens);
+ }
+}
+
+macro_rules! primitive {
+ ($($t:ident => $name:ident)*) => {
+ $(
+ impl ToTokens for $t {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(Literal::$name(*self));
+ }
+ }
+ )*
+ };
+}
+
+primitive! {
+ i8 => i8_suffixed
+ i16 => i16_suffixed
+ i32 => i32_suffixed
+ i64 => i64_suffixed
+ i128 => i128_suffixed
+ isize => isize_suffixed
+
+ u8 => u8_suffixed
+ u16 => u16_suffixed
+ u32 => u32_suffixed
+ u64 => u64_suffixed
+ u128 => u128_suffixed
+ usize => usize_suffixed
+
+ f32 => f32_suffixed
+ f64 => f64_suffixed
+}
+
+impl ToTokens for char {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(Literal::character(*self));
+ }
+}
+
+impl ToTokens for bool {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let word = if *self { "true" } else { "false" };
+ tokens.append(Ident::new(word, Span::call_site()));
+ }
+}
+
+impl ToTokens for Group {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(self.clone());
+ }
+}
+
+impl ToTokens for Ident {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(self.clone());
+ }
+}
+
+impl ToTokens for Punct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(self.clone());
+ }
+}
+
+impl ToTokens for Literal {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(self.clone());
+ }
+}
+
+impl ToTokens for TokenTree {
+ fn to_tokens(&self, dst: &mut TokenStream) {
+ dst.append(self.clone());
+ }
+}
+
+impl ToTokens for TokenStream {
+ fn to_tokens(&self, dst: &mut TokenStream) {
+ dst.extend(iter::once(self.clone()));
+ }
+
+ fn into_token_stream(self) -> TokenStream {
+ self
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/compiletest.rs
b/rust/hw/char/pl011/vendor/quote/tests/compiletest.rs
new file mode 100644
index 0000000000..7974a6249e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/compiletest.rs
@@ -0,0 +1,7 @@
+#[rustversion::attr(not(nightly), ignore)]
+#[cfg_attr(miri, ignore)]
+#[test]
+fn ui() {
+ let t = trybuild::TestCases::new();
+ t.compile_fail("tests/ui/*.rs");
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/test.rs
b/rust/hw/char/pl011/vendor/quote/tests/test.rs
new file mode 100644
index 0000000000..eab4f55aa8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/test.rs
@@ -0,0 +1,549 @@
+#![allow(
+ clippy::disallowed_names,
+ clippy::let_underscore_untyped,
+ clippy::shadow_unrelated,
+ clippy::unseparated_literal_suffix,
+ clippy::used_underscore_binding
+)]
+
+extern crate proc_macro;
+
+use std::borrow::Cow;
+use std::collections::BTreeSet;
+
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream};
+use quote::{format_ident, quote, quote_spanned, TokenStreamExt};
+
+struct X;
+
+impl quote::ToTokens for X {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(Ident::new("X", Span::call_site()));
+ }
+}
+
+#[test]
+fn test_quote_impl() {
+ let tokens = quote! {
+ impl<'a, T: ToTokens> ToTokens for &'a T {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens)
+ }
+ }
+ };
+
+ let expected = concat!(
+ "impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
+ "fn to_tokens (& self , tokens : & mut TokenStream) { ",
+ "(* * self) . to_tokens (tokens) ",
+ "} ",
+ "}"
+ );
+
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_quote_spanned_impl() {
+ let span = Span::call_site();
+ let tokens = quote_spanned! {span=>
+ impl<'a, T: ToTokens> ToTokens for &'a T {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens)
+ }
+ }
+ };
+
+ let expected = concat!(
+ "impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
+ "fn to_tokens (& self , tokens : & mut TokenStream) { ",
+ "(* * self) . to_tokens (tokens) ",
+ "} ",
+ "}"
+ );
+
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_substitution() {
+ let x = X;
+ let tokens = quote!(#x <#x> (#x) [#x] {#x});
+
+ let expected = "X < X > (X) [X] { X }";
+
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_iter() {
+ let primes = &[X, X, X, X];
+
+ assert_eq!("X X X X", quote!(#(#primes)*).to_string());
+
+ assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string());
+
+ assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string());
+}
+
+#[test]
+fn test_array() {
+ let array: [u8; 40] = [0; 40];
+ let _ = quote!(#(#array #array)*);
+
+ let ref_array: &[u8; 40] = &[0; 40];
+ let _ = quote!(#(#ref_array #ref_array)*);
+
+ let ref_slice: &[u8] = &[0; 40];
+ let _ = quote!(#(#ref_slice #ref_slice)*);
+
+ let array: [X; 2] = [X, X]; // !Copy
+ let _ = quote!(#(#array #array)*);
+
+ let ref_array: &[X; 2] = &[X, X];
+ let _ = quote!(#(#ref_array #ref_array)*);
+
+ let ref_slice: &[X] = &[X, X];
+ let _ = quote!(#(#ref_slice #ref_slice)*);
+}
+
+#[test]
+fn test_advanced() {
+ let generics = quote!( <'a, T> );
+
+ let where_clause = quote!( where T: Serialize );
+
+ let field_ty = quote!(String);
+
+ let item_ty = quote!(Cow<'a, str>);
+
+ let path = quote!(SomeTrait::serialize_with);
+
+ let value = quote!(self.x);
+
+ let tokens = quote! {
+ struct SerializeWith #generics #where_clause {
+ value: &'a #field_ty,
+ phantom: ::std::marker::PhantomData<#item_ty>,
+ }
+
+ impl #generics ::serde::Serialize for SerializeWith #generics
#where_clause {
+ fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
+ where S: ::serde::Serializer
+ {
+ #path(self.value, s)
+ }
+ }
+
+ SerializeWith {
+ value: #value,
+ phantom: ::std::marker::PhantomData::<#item_ty>,
+ }
+ };
+
+ let expected = concat!(
+ "struct SerializeWith < 'a , T > where T : Serialize { ",
+ "value : & 'a String , ",
+ "phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
+ "} ",
+ "impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T >
where T : Serialize { ",
+ "fn serialize < S > (& self , s : & mut S) -> Result < () , S :: Error
> ",
+ "where S : :: serde :: Serializer ",
+ "{ ",
+ "SomeTrait :: serialize_with (self . value , s) ",
+ "} ",
+ "} ",
+ "SerializeWith { ",
+ "value : self . x , ",
+ "phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
+ "}"
+ );
+
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_integer() {
+ let ii8 = -1i8;
+ let ii16 = -1i16;
+ let ii32 = -1i32;
+ let ii64 = -1i64;
+ let ii128 = -1i128;
+ let iisize = -1isize;
+ let uu8 = 1u8;
+ let uu16 = 1u16;
+ let uu32 = 1u32;
+ let uu64 = 1u64;
+ let uu128 = 1u128;
+ let uusize = 1usize;
+
+ let tokens = quote! {
+ 1 1i32 1u256
+ #ii8 #ii16 #ii32 #ii64 #ii128 #iisize
+ #uu8 #uu16 #uu32 #uu64 #uu128 #uusize
+ };
+ let expected =
+ "1 1i32 1u256 - 1i8 - 1i16 - 1i32 - 1i64 - 1i128 - 1isize 1u8 1u16
1u32 1u64 1u128 1usize";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_floating() {
+ let e32 = 2.345f32;
+
+ let e64 = 2.345f64;
+
+ let tokens = quote! {
+ #e32
+ #e64
+ };
+ let expected = concat!("2.345f32 2.345f64");
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_char() {
+ let zero = '\u{1}';
+ let pound = '#';
+ let quote = '"';
+ let apost = '\'';
+ let newline = '\n';
+ let heart = '\u{2764}';
+
+ let tokens = quote! {
+ #zero #pound #quote #apost #newline #heart
+ };
+ let expected = "'\\u{1}' '#' '\"' '\\'' '\\n' '\u{2764}'";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_str() {
+ let s = "\u{1} a 'b \" c";
+ let tokens = quote!(#s);
+ let expected = "\"\\u{1} a 'b \\\" c\"";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_string() {
+ let s = "\u{1} a 'b \" c".to_string();
+ let tokens = quote!(#s);
+ let expected = "\"\\u{1} a 'b \\\" c\"";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_interpolated_literal() {
+ macro_rules! m {
+ ($literal:literal) => {
+ quote!($literal)
+ };
+ }
+
+ let tokens = m!(1);
+ let expected = "1";
+ assert_eq!(expected, tokens.to_string());
+
+ let tokens = m!(-1);
+ let expected = "- 1";
+ assert_eq!(expected, tokens.to_string());
+
+ let tokens = m!(true);
+ let expected = "true";
+ assert_eq!(expected, tokens.to_string());
+
+ let tokens = m!(-true);
+ let expected = "- true";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_ident() {
+ let foo = Ident::new("Foo", Span::call_site());
+ let bar = Ident::new(&format!("Bar{}", 7), Span::call_site());
+ let tokens = quote!(struct #foo; enum #bar {});
+ let expected = "struct Foo ; enum Bar7 { }";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_underscore() {
+ let tokens = quote!(let _;);
+ let expected = "let _ ;";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_duplicate() {
+ let ch = 'x';
+
+ let tokens = quote!(#ch #ch);
+
+ let expected = "'x' 'x'";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_fancy_repetition() {
+ let foo = vec!["a", "b"];
+ let bar = vec![true, false];
+
+ let tokens = quote! {
+ #(#foo: #bar),*
+ };
+
+ let expected = r#""a" : true , "b" : false"#;
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_nested_fancy_repetition() {
+ let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
+
+ let tokens = quote! {
+ #(
+ #(#nested)*
+ ),*
+ };
+
+ let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_duplicate_name_repetition() {
+ let foo = &["a", "b"];
+
+ let tokens = quote! {
+ #(#foo: #foo),*
+ #(#foo: #foo),*
+ };
+
+ let expected = r#""a" : "a" , "b" : "b" "a" : "a" , "b" : "b""#;
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_duplicate_name_repetition_no_copy() {
+ let foo = vec!["a".to_owned(), "b".to_owned()];
+
+ let tokens = quote! {
+ #(#foo: #foo),*
+ };
+
+ let expected = r#""a" : "a" , "b" : "b""#;
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_btreeset_repetition() {
+ let mut set = BTreeSet::new();
+ set.insert("a".to_owned());
+ set.insert("b".to_owned());
+
+ let tokens = quote! {
+ #(#set: #set),*
+ };
+
+ let expected = r#""a" : "a" , "b" : "b""#;
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_variable_name_conflict() {
+ // The implementation of `#(...),*` uses the variable `_i` but it should be
+ // fine, if a little confusing when debugging.
+ let _i = vec!['a', 'b'];
+ let tokens = quote! { #(#_i),* };
+ let expected = "'a' , 'b'";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_nonrep_in_repetition() {
+ let rep = vec!["a", "b"];
+ let nonrep = "c";
+
+ let tokens = quote! {
+ #(#rep #rep : #nonrep #nonrep),*
+ };
+
+ let expected = r#""a" "a" : "c" "c" , "b" "b" : "c" "c""#;
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_empty_quote() {
+ let tokens = quote!();
+ assert_eq!("", tokens.to_string());
+}
+
+#[test]
+fn test_box_str() {
+ let b = "str".to_owned().into_boxed_str();
+ let tokens = quote! { #b };
+ assert_eq!("\"str\"", tokens.to_string());
+}
+
+#[test]
+fn test_cow() {
+ let owned: Cow<Ident> = Cow::Owned(Ident::new("owned", Span::call_site()));
+
+ let ident = Ident::new("borrowed", Span::call_site());
+ let borrowed = Cow::Borrowed(&ident);
+
+ let tokens = quote! { #owned #borrowed };
+ assert_eq!("owned borrowed", tokens.to_string());
+}
+
+#[test]
+fn test_closure() {
+ fn field_i(i: usize) -> Ident {
+ format_ident!("__field{}", i)
+ }
+
+ let fields = (0usize..3)
+ .map(field_i as fn(_) -> _)
+ .map(|var| quote! { #var });
+
+ let tokens = quote! { #(#fields)* };
+ assert_eq!("__field0 __field1 __field2", tokens.to_string());
+}
+
+#[test]
+fn test_append_tokens() {
+ let mut a = quote!(a);
+ let b = quote!(b);
+ a.append_all(b);
+ assert_eq!("a b", a.to_string());
+}
+
+#[test]
+fn test_format_ident() {
+ let id0 = format_ident!("Aa");
+ let id1 = format_ident!("Hello{x}", x = id0);
+ let id2 = format_ident!("Hello{x}", x = 5usize);
+ let id3 = format_ident!("Hello{}_{x}", id0, x = 10usize);
+ let id4 = format_ident!("Aa", span = Span::call_site());
+ let id5 = format_ident!("Hello{}", Cow::Borrowed("World"));
+
+ assert_eq!(id0, "Aa");
+ assert_eq!(id1, "HelloAa");
+ assert_eq!(id2, "Hello5");
+ assert_eq!(id3, "HelloAa_10");
+ assert_eq!(id4, "Aa");
+ assert_eq!(id5, "HelloWorld");
+}
+
+#[test]
+fn test_format_ident_strip_raw() {
+ let id = format_ident!("r#struct");
+ let my_id = format_ident!("MyId{}", id);
+ let raw_my_id = format_ident!("r#MyId{}", id);
+
+ assert_eq!(id, "r#struct");
+ assert_eq!(my_id, "MyIdstruct");
+ assert_eq!(raw_my_id, "r#MyIdstruct");
+}
+
+#[test]
+fn test_outer_line_comment() {
+ let tokens = quote! {
+ /// doc
+ };
+ let expected = "# [doc = r\" doc\"]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_inner_line_comment() {
+ let tokens = quote! {
+ //! doc
+ };
+ let expected = "# ! [doc = r\" doc\"]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_outer_block_comment() {
+ let tokens = quote! {
+ /** doc */
+ };
+ let expected = "# [doc = r\" doc \"]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_inner_block_comment() {
+ let tokens = quote! {
+ /*! doc */
+ };
+ let expected = "# ! [doc = r\" doc \"]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_outer_attr() {
+ let tokens = quote! {
+ #[inline]
+ };
+ let expected = "# [inline]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_inner_attr() {
+ let tokens = quote! {
+ #![no_std]
+ };
+ let expected = "# ! [no_std]";
+ assert_eq!(expected, tokens.to_string());
+}
+
+// https://github.com/dtolnay/quote/issues/130
+#[test]
+fn test_star_after_repetition() {
+ let c = vec!['0', '1'];
+ let tokens = quote! {
+ #(
+ f(#c);
+ )*
+ *out = None;
+ };
+ let expected = "f ('0') ; f ('1') ; * out = None ;";
+ assert_eq!(expected, tokens.to_string());
+}
+
+#[test]
+fn test_quote_raw_id() {
+ let id = quote!(r#raw_id);
+ assert_eq!(id.to_string(), "r#raw_id");
+}
+
+#[test]
+fn test_type_inference_for_span() {
+ trait CallSite {
+ fn get() -> Self;
+ }
+
+ impl CallSite for Span {
+ fn get() -> Self {
+ Span::call_site()
+ }
+ }
+
+ let span = Span::call_site();
+ let _ = quote_spanned!(span=> ...);
+
+ let delim_span = Group::new(Delimiter::Parenthesis,
TokenStream::new()).delim_span();
+ let _ = quote_spanned!(delim_span=> ...);
+
+ let inferred = CallSite::get();
+ let _ = quote_spanned!(inferred=> ...);
+
+ if false {
+ let proc_macro_span = proc_macro::Span::call_site();
+ let _ = quote_spanned!(proc_macro_span.into()=> ...);
+ }
+}
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs
new file mode 100644
index 0000000000..0a39f41507
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs
@@ -0,0 +1,9 @@
+use quote::quote;
+
+fn main() {
+ let nonrep = "";
+
+ // Without some protection against repetitions with no iterator somewhere
+ // inside, this would loop infinitely.
+ quote!(#(#nonrep #nonrep)*);
+}
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr
new file mode 100644
index 0000000000..99c20a5676
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr
@@ -0,0 +1,11 @@
+error[E0308]: mismatched types
+ --> tests/ui/does-not-have-iter-interpolated-dup.rs:8:5
+ |
+8 | quote!(#(#nonrep #nonrep)*);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
+ | expected due to this
+ | here the type of `has_iter` is inferred to be
`ThereIsNoIteratorInRepetition`
+ |
+ = note: this error originates in the macro
`$crate::quote_token_with_context` which comes from the expansion of the macro
`quote` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs
new file mode 100644
index 0000000000..2c740cc083
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs
@@ -0,0 +1,9 @@
+use quote::quote;
+
+fn main() {
+ let nonrep = "";
+
+ // Without some protection against repetitions with no iterator somewhere
+ // inside, this would loop infinitely.
+ quote!(#(#nonrep)*);
+}
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr
new file mode 100644
index 0000000000..ef908131ba
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr
@@ -0,0 +1,11 @@
+error[E0308]: mismatched types
+ --> tests/ui/does-not-have-iter-interpolated.rs:8:5
+ |
+8 | quote!(#(#nonrep)*);
+ | ^^^^^^^^^^^^^^^^^^^
+ | |
+ | expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
+ | expected due to this
+ | here the type of `has_iter` is inferred to be
`ThereIsNoIteratorInRepetition`
+ |
+ = note: this error originates in the macro
`$crate::quote_token_with_context` which comes from the expansion of the macro
`quote` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.rs
new file mode 100644
index 0000000000..c027243dda
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.rs
@@ -0,0 +1,5 @@
+use quote::quote;
+
+fn main() {
+ quote!(#(a b),*);
+}
diff --git
a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.stderr
new file mode 100644
index 0000000000..7c6e30f2b8
--- /dev/null
+++
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter-separated.stderr
@@ -0,0 +1,10 @@
+error[E0308]: mismatched types
+ --> tests/ui/does-not-have-iter-separated.rs:4:5
+ |
+4 | quote!(#(a b),*);
+ | ^^^^^^^^^^^^^^^^
+ | |
+ | expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
+ | expected due to this
+ |
+ = note: this error originates in the macro
`$crate::quote_token_with_context` which comes from the expansion of the macro
`quote` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.rs
new file mode 100644
index 0000000000..8908353b57
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.rs
@@ -0,0 +1,5 @@
+use quote::quote;
+
+fn main() {
+ quote!(#(a b)*);
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.stderr
new file mode 100644
index 0000000000..0b13e5cb78
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/does-not-have-iter.stderr
@@ -0,0 +1,10 @@
+error[E0308]: mismatched types
+ --> tests/ui/does-not-have-iter.rs:4:5
+ |
+4 | quote!(#(a b)*);
+ | ^^^^^^^^^^^^^^^
+ | |
+ | expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
+ | expected due to this
+ |
+ = note: this error originates in the macro
`$crate::quote_token_with_context` which comes from the expansion of the macro
`quote` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.rs
new file mode 100644
index 0000000000..f991c1883d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.rs
@@ -0,0 +1,7 @@
+use quote::quote;
+use std::net::Ipv4Addr;
+
+fn main() {
+ let ip = Ipv4Addr::LOCALHOST;
+ let _ = quote! { #ip };
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.stderr
new file mode 100644
index 0000000000..7bd20707e7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/not-quotable.stderr
@@ -0,0 +1,20 @@
+error[E0277]: the trait bound `Ipv4Addr: ToTokens` is not satisfied
+ --> tests/ui/not-quotable.rs:6:13
+ |
+6 | let _ = quote! { #ip };
+ | ^^^^^^^^^^^^^^
+ | |
+ | the trait `ToTokens` is not implemented for `Ipv4Addr`
+ | required by a bound introduced by this call
+ |
+ = help: the following other types implement trait `ToTokens`:
+ &'a T
+ &'a mut T
+ Box<T>
+ Cow<'a, T>
+ Option<T>
+ Rc<T>
+ RepInterp<T>
+ String
+ and $N others
+ = note: this error originates in the macro `quote` (in Nightly builds, run
with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.rs
new file mode 100644
index 0000000000..a8f0fe773c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.rs
@@ -0,0 +1,8 @@
+use quote::quote;
+
+struct Ipv4Addr;
+
+fn main() {
+ let ip = Ipv4Addr;
+ let _ = quote! { #(#ip)* };
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.stderr
new file mode 100644
index 0000000000..26932bbf67
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/not-repeatable.stderr
@@ -0,0 +1,34 @@
+error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but
its trait bounds were not satisfied
+ --> tests/ui/not-repeatable.rs:7:13
+ |
+3 | struct Ipv4Addr;
+ | --------------- method `quote_into_iter` not found for this struct because
it doesn't satisfy `Ipv4Addr: Iterator`, `Ipv4Addr: ToTokens`, `Ipv4Addr:
ext::RepIteratorExt` or `Ipv4Addr: ext::RepToTokensExt`
+...
+7 | let _ = quote! { #(#ip)* };
+ | ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due
to unsatisfied trait bounds
+ |
+ = note: the following trait bounds were not satisfied:
+ `Ipv4Addr: Iterator`
+ which is required by `Ipv4Addr: ext::RepIteratorExt`
+ `&Ipv4Addr: Iterator`
+ which is required by `&Ipv4Addr: ext::RepIteratorExt`
+ `Ipv4Addr: ToTokens`
+ which is required by `Ipv4Addr: ext::RepToTokensExt`
+ `&mut Ipv4Addr: Iterator`
+ which is required by `&mut Ipv4Addr: ext::RepIteratorExt`
+note: the traits `Iterator` and `ToTokens` must be implemented
+ --> src/to_tokens.rs
+ |
+ | pub trait ToTokens {
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ ::: $RUST/core/src/iter/traits/iterator.rs
+ |
+ | pub trait Iterator {
+ | ^^^^^^^^^^^^^^^^^^
+ = help: items from traits can only be used if the trait is implemented and
in scope
+ = note: the following traits define an item `quote_into_iter`, perhaps you
need to implement one of them:
+ candidate #1: `ext::RepAsIteratorExt`
+ candidate #2: `ext::RepIteratorExt`
+ candidate #3: `ext::RepToTokensExt`
+ = note: this error originates in the macro `$crate::quote_bind_into_iter`
which comes from the expansion of the macro `quote` (in Nightly builds, run
with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.rs
b/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.rs
new file mode 100644
index 0000000000..d5601c8a06
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.rs
@@ -0,0 +1,7 @@
+use quote::quote_spanned;
+
+fn main() {
+ let span = "";
+ let x = 0i32;
+ quote_spanned!(span=> #x);
+}
diff --git a/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.stderr
b/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.stderr
new file mode 100644
index 0000000000..12ad307703
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/quote/tests/ui/wrong-type-span.stderr
@@ -0,0 +1,10 @@
+error[E0308]: mismatched types
+ --> tests/ui/wrong-type-span.rs:6:5
+ |
+6 | quote_spanned!(span=> #x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | expected `Span`, found `&str`
+ | expected due to this
+ |
+ = note: this error originates in the macro `quote_spanned` (in Nightly
builds, run with -Z macro-backtrace for more info)
diff --git a/rust/hw/char/pl011/vendor/syn/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/syn/.cargo-checksum.json
new file mode 100644
index 0000000000..abc9f4cabd
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"9ed91391aa48c48ccc7b52a0f196ee2e5d3b4905cc390d682dc3517957d2ff40","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"684c75ce029ed7c4ad57cb12fcecfee86d521699c3332c6c4c57af98f6e3b276","benches/file.rs":"0a0527c78d849148cbb6118b4d36f72da7d4add865ba1a410e0a1be9e8dbfe0e","benches/rust.rs":"77342ecd278686080c6390e84d70ecb4a31cc696d45dc4b8ddcc5ca65a25bcfb","src/attr.rs":"bd959c93f997d5d77ec08a5f580d1d38a391978b916b5bbe82bad4c03e694563","src/bigint.rs":"0299829b2f7a1a798fe2f7bc1680e4a10f9b6f4a852d09af4da2deab466c4242","src/buffer.rs":"8f05a11b2c6fcfca3c52a70695de982b0afba7d41d4c0d4ca4192589179ca626","src/classify.rs":"192835587f4585d84abdcc76ee00892f2dd39da6c16911aca073e24b44bddc35","src/custom_keyword.rs":"322114e36ae43a2f8605506fb4568efdbc2986853e2fee74bd10a4ca0fb60c69","src/custom_punctuation.rs":"2ae2339c29b1aff3ab16157d51a3a07bfca594aa38586981534fe07a62cdd9d1","src/data.rs":"7ad7f3c55d9f0b2ff223f3f30602ffa82013c0e7366006b0a9a01193de753db0","src/derive.rs":"f54f8cf9386a2d45186ff3c86ade5dae59e0e337b0198532449190ae8520cff8","src/discouraged.rs":"c246e96f7b25cc0af11944766e1ce176be8c396447bb696769b73c4d9bb5cfcf","src/drops.rs":"013385f1dd95663f1afab41abc1e2eea04181998644828935ca564c74d6462ae","src/error.rs":"3b03fd75eee8b0bb646eaf20f7e287345bdc7515ad5286024a2dd1e53c1e7bf2","src/export.rs":"b260cc49da1da3489e7755832bc8015cfad79e84f6c74e237f65ae25a2385e56","src/expr.rs":"35cc7d3ba13bad9355756d3903d8156c8fec8e203ff016e5ff0a8acb252b9dd5","src/ext.rs":"ed143b029af286e62ceb4310286a4ce894792dd588465face042b4199b39d329","src/file.rs":"39d4ed9c56a7dc0d83259843732c434cd187248a4cde3dba4a98c3b92df6d08f","src/fixup.rs":"ec3f84bb2e09a00d460399ee009c582ce468d294c7a7c930f7677571b5bc985e","src/gen/clone.rs":"36491f5f9e9cad6c4eb354b3331ec2b672607bb26429eba6379d1e9a4919170f","src/gen/debug.rs":"c9b2547663ed9025ba614fb1a70810df1b25f471ebb57abb01de5ab8e4fa8bf0","src/gen/eq.rs":"b5fffca0c3b6c31b3fcc80a7bd8fec65baed982a4e2fb4c8862db6059ab7dea1","src/gen/fold.rs":"345e6a6d9a7d2d09e09cd5857fc903af4202df42f0759a3da118556e98829fd2","src/gen/hash.rs":"447e840245178d0777b4e143b54c356b88962456e80282dcaad1763093709c13","src/gen/visit.rs":"178a6841d7d1974bff8c8c2f9a18e9b77384956841861a50828252bcaef67c18","src/gen/visit_mut.rs":"2a8f9a1c0259060f3fa1d6cab8a1924c1b07d713561aa9562cde8e79a39e66d5","src/generics.rs":"53ba17d13f89d316387c7debb302f703f99d5038921a7272a6d25e6f8bec42ec","src/group.rs":"911dd046f5a043d1c984b1b0b893addd152a9065457908fa4515722e96663d79","src/ident.rs":"d6061030fadae9c7dc847e1ee46178d9657d782aad108c7197e8cafe765b3eaa","src/item.rs":"81e2f57843cf2f8df82e314da940a6dda9bfa54e0641dbabe3ca2a7a68c9c5a8","src/lib.rs":"3a3e7d3d191d4411c9cf98e31a5efb562bab63facba27d12f54fc333a1ecd158","src/lifetime.rs":"5787d5a5dc7e5332b03283a25ae0a9e826464242ca2d149b1a19e7cae9cee34d","src/lit.rs":"205bb729cdf82ce0ec727304878132da0290b3042d41f4ccf0dbb7250fe91c51","src/lookahead.rs":"289dbd9048a74d75e5c3fa66045e72805ff8a2d3cf7aab685b6d7e136faba248","src/mac.rs":"cd85132ab4d302333f771be7a9b40f9281781ae9bcaee0607e0a25547352baaa","src/macros.rs":"e0587f60d510fd0079c60521f6898b61da5857664bd8b450154f83c85c4403c5","src/meta.rs":"969d8ccbdbc6ea2e4928a21831b791c57447b231e1373149e4c63b46f3951801","src/op.rs":"a61757370f802e44efa3c4a1057ae2cd26e64e273f7d76c06d5ffb49602319e2","src/parse.rs":"2b5d032212425a220e910ca5ebe3eb84ee9e2892e25d4b5a34ae7e9384e5ff2a","src/parse_macro_input.rs":"e4e22b63d0496d06a4ca17742a22467ed93f08a739081324773828bad63175ee","src/parse_quote.rs":"50bfd45f176d10562cb5f4e53af9034b8e1506081bf6cb5f507ce42d24d81d7d","src/pat.rs":"e552911a1712508c672eca35abdf9f81bad3a960f21522eddbc411a6a7070445","src/path.rs":"d77045e5dad382056d67fe31a42bc45b6a02ce044c43287bd38a95e32fd6aead","src/precedence.rs":"abd13523c4e72c555d68e734d11b779ba16e33a214cf00bf9a993d3abff34638","src/print.rs":"22910bf0521ab868ebd7c62601c55912d12cfb400c65723e08e5cfa3a2d111c0","src/punctuated.rs":"19f762231c8ea46b1610fc2a293329c85f0ce82f1819f1607e71634634d43212","src/restriction.rs":"eabb012634ef67aa3c3849c905ab549189461df7fefde2a4b90161c8685f31b2","src/sealed.rs":"6ece3b3dcb30f6bb98b93d83759ca7712ee8592bef9c0511141039c38765db0e","src/span.rs":"0a48e375e5c9768f6f64174a91ba6a255f4b021e2fb3548d8494e617f142601b","src/spanned.rs":"4b9bd65f60ab81922adfd0be8f03b6d50e98da3a5f525f242f9639aec4beac79","src/stmt.rs":"bb4cd196ce23c3fc07fefa47e67a0cd815db4f02ce1192625379d60bd657ffd2","src/thread.rs":"1f1deb1272525ab2af9a36aac4bce8f65b0e315adb1656641fd7075662f49222","src/token.rs":"25df9f6a305c1be58eb4f2454b6ab35c6bef703bf4954fcfed2108b27723cb16","src/tt.rs":"a58303a95d08d6bf3f3e09715b9b70a57b91b54774cfc1f00f2848034d2ff5c7","src/ty.rs":"90af4ce1911c91bdfd9ae431def641640daeb0c788c39a2ef024926485e7b2b2","src/verbatim.rs":"87cbe82a90f48efb57ffd09141042698b3e011a21d0d5412154d80324b0a5ef0","src/whitespace.rs":"9cdcbfe9045b259046329a795bc1105ab5a871471a6d3f7318d275ee53f7a825","tests/common/eq.rs":"1a754d31cd6acd15cd17d7cc8e6afe918f2a11334fe6fc46c92ab887a470d838","tests/common/mod.rs":"64fb893bc0e7148395fd9ce1f67432b3d8406be29cbd664e2b73585da5ee5719","tests/common/parse.rs":"fff650bb98a9382beefbd22d2a89c0c8f90501dd6d58abc4d12b29cb4f647dc7","tests/debug/gen.rs":"3ca161a049fe72ff73ead99fbfe78335fdb2ac7c41085fe8cd0c9a0b29995151","tests/debug/mod.rs":"b56136586267ae1812a937b69215dd053ada2c21717771d89dcd3ce52bcb27f5","tests/macros/mod.rs":"64b0da858096e7cf0f772e66bc1787a867e45897d7677de580c0a1f35c0f6852","tests/regression.rs":"e9565ea0efecb4136f099164ffcfa26e1996b0a27fb9c6659e90ad9bdd42e7b6","tests/regression/issue1108.rs":"f32db35244a674e22ff824ca9e5bbec2184e287b59f022db68c418b5878a2edc","tests/regression/issue1235.rs":"a2266b10c3f7c7af5734817ab0a3e8b309b51e7d177b63f26e67e6b744d280b0","tests/repo/mod.rs":"a463bc4786fa211005ef93bf2257d89c8ccd0be621275d9689396a53cb9bf425","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"8982f6bc4e36510f924e288247473403e72697389ce9dda4e4b5ab0a8e49259f","tests/test_attribute.rs":"b35550a43bbd187bb330997ba36f90c65d8fc489135b1d32ef4547f145cb7612","tests/test_derive_input.rs":"99c4e6e45e3322ea9e269b309059c8a00fda1dcc03aed41f6e7d8c7e0a72fa2b","tests/test_expr.rs":"59843a1534d5a84fd302a815523eef9d5177f7323b8be48e75f2d9d970950751","tests/test_generics.rs":"2fcc8575d695b568f3724b3b33d853b8fa6d9864eb816b5e3ca82420682e6155","tests/test_grouping.rs":"1bd63c8ca0b90bd493fb3f927079ab9ddf74d2a78da82db2f638e652d22305d5","tests/test_ident.rs":"d5850e817720e774cd397a46dbc5298c57933823c18e20805e84503fc9387e8f","tests/test_item.rs":"1b8412a5581adf93eaa215785a592f139af8511c954dee283d52dff2718a6cc2","tests/test_iterators.rs":"f4dacb5f3a8e0473dfb0d27f05270d41e79eddb4759b1fad3e88e379b4731e17","tests/test_lit.rs":"01b0acfe03cff16e7c1a45ceb7f4b637e5cbc6145840886ba981b7ed8e83691c","tests/test_meta.rs":"4ae570333f849ed8edec5dd957111a2deb721ede360f1e1ffeeab75380578ad4","tests/test_parse_buffer.rs":"0de6af13ba0345986b18d495063f9b75a1018e8569c34b277f9522c63a6c0941","tests/test_parse_quote.rs":"928176da6ebb449ef01a798f3352c9b181d3077c1266eb008df73876f4013c47","tests/test_parse_stream.rs":"b6b533432173123d6d01d8d2cb33714bc50b30b16ffbb6116f93937221ad4594","tests/test_pat.rs":"f6954a50e62a97ac2bc1ba0cb7a5a1fc53b7b01fb55ffe0176bee3fe1955d460","tests/test_path.rs":"d54350aa91508f8d301f5be3e3a34e03b0615b1a04e8fbbab9840da20161838b","tests/test_precedence.rs":"62484c9a04778b506c183b06cae5f0c460a581e3c3b6baf4ff2cff0827698c3f","tests/test_receiver.rs":"af64117acd66fbf42edc476f731ecd20c88009d9cb641dbd7a1d6384ae99ae73","tests/test_round_trip.rs":"c9aae3a76ee801b9fb7ce2f2732aa9e1bf1b8f43f317ec1bfd0f8e5765c4e39c","tests/test_shebang.rs":"98e8a6690c04e0aad2893b747593620b51836fe704f50f5c6fe352609837138a","tests/test_size.rs":"57c83ebf1a4d4fb910b4db16566c611b08428271da30a278fab749b2f2177459","tests/test_stmt.rs":"bbc305ea888254798b6faf285187d8bc7a955e4402d9a497d4b9d361e0436691","tests/test_token_trees.rs":"d012da9c3c861073711b006bf6ffdc073821fb9fb0a08733628cdae57124d1f5","tests/test_ty.rs":"49fbb880891d4c2e21350e35b914d92aa9a056fbaad9c4afa5242802848fe9c4","tests/test_visibility.rs":"7bd239aef6f6d8173462dbd869064f3fdb9ba71644ac1f62c5d2fbb2568fb986","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/syn/Cargo.toml
b/rust/hw/char/pl011/vendor/syn/Cargo.toml
new file mode 100644
index 0000000000..1d42e2745f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/Cargo.toml
@@ -0,0 +1,260 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.60"
+name = "syn"
+version = "2.0.66"
+authors = ["David Tolnay <dtolnay@gmail.com>"]
+build = false
+include = [
+ "/benches/**",
+ "/Cargo.toml",
+ "/LICENSE-APACHE",
+ "/LICENSE-MIT",
+ "/README.md",
+ "/src/**",
+ "/tests/**",
+]
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "Parser for Rust source code"
+documentation = "https://docs.rs/syn"
+readme = "README.md"
+keywords = [
+ "macros",
+ "syn",
+]
+categories = [
+ "development-tools::procedural-macro-helpers",
+ "parser-implementations",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/syn"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--generate-link-to-definition"]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[package.metadata.playground]
+features = [
+ "full",
+ "visit",
+ "visit-mut",
+ "fold",
+ "extra-traits",
+]
+
+[lib]
+name = "syn"
+path = "src/lib.rs"
+doc-scrape-examples = false
+
+[[test]]
+name = "test_meta"
+path = "tests/test_meta.rs"
+
+[[test]]
+name = "test_stmt"
+path = "tests/test_stmt.rs"
+
+[[test]]
+name = "test_receiver"
+path = "tests/test_receiver.rs"
+
+[[test]]
+name = "regression"
+path = "tests/regression.rs"
+
+[[test]]
+name = "test_generics"
+path = "tests/test_generics.rs"
+
+[[test]]
+name = "test_grouping"
+path = "tests/test_grouping.rs"
+
+[[test]]
+name = "test_parse_stream"
+path = "tests/test_parse_stream.rs"
+
+[[test]]
+name = "test_round_trip"
+path = "tests/test_round_trip.rs"
+
+[[test]]
+name = "test_derive_input"
+path = "tests/test_derive_input.rs"
+
+[[test]]
+name = "test_visibility"
+path = "tests/test_visibility.rs"
+
+[[test]]
+name = "test_pat"
+path = "tests/test_pat.rs"
+
+[[test]]
+name = "zzz_stable"
+path = "tests/zzz_stable.rs"
+
+[[test]]
+name = "test_item"
+path = "tests/test_item.rs"
+
+[[test]]
+name = "test_iterators"
+path = "tests/test_iterators.rs"
+
+[[test]]
+name = "test_path"
+path = "tests/test_path.rs"
+
+[[test]]
+name = "test_lit"
+path = "tests/test_lit.rs"
+
+[[test]]
+name = "test_ident"
+path = "tests/test_ident.rs"
+
+[[test]]
+name = "test_parse_quote"
+path = "tests/test_parse_quote.rs"
+
+[[test]]
+name = "test_size"
+path = "tests/test_size.rs"
+
+[[test]]
+name = "test_ty"
+path = "tests/test_ty.rs"
+
+[[test]]
+name = "test_shebang"
+path = "tests/test_shebang.rs"
+
+[[test]]
+name = "test_attribute"
+path = "tests/test_attribute.rs"
+
+[[test]]
+name = "test_asyncness"
+path = "tests/test_asyncness.rs"
+
+[[test]]
+name = "test_expr"
+path = "tests/test_expr.rs"
+
+[[test]]
+name = "test_token_trees"
+path = "tests/test_token_trees.rs"
+
+[[test]]
+name = "test_parse_buffer"
+path = "tests/test_parse_buffer.rs"
+
+[[test]]
+name = "test_precedence"
+path = "tests/test_precedence.rs"
+
+[[bench]]
+name = "rust"
+path = "benches/rust.rs"
+harness = false
+required-features = [
+ "full",
+ "parsing",
+]
+
+[[bench]]
+name = "file"
+path = "benches/file.rs"
+required-features = [
+ "full",
+ "parsing",
+]
+
+[dependencies.proc-macro2]
+version = "1.0.83"
+default-features = false
+
+[dependencies.quote]
+version = "1.0.35"
+optional = true
+default-features = false
+
+[dependencies.unicode-ident]
+version = "1"
+
+[dev-dependencies.anyhow]
+version = "1"
+
+[dev-dependencies.automod]
+version = "1"
+
+[dev-dependencies.flate2]
+version = "1"
+
+[dev-dependencies.insta]
+version = "1"
+
+[dev-dependencies.rayon]
+version = "1"
+
+[dev-dependencies.ref-cast]
+version = "1"
+
+[dev-dependencies.reqwest]
+version = "0.12"
+features = ["blocking"]
+
+[dev-dependencies.rustversion]
+version = "1"
+
+[dev-dependencies.syn-test-suite]
+version = "0"
+
+[dev-dependencies.tar]
+version = "0.4.16"
+
+[dev-dependencies.termcolor]
+version = "1"
+
+[dev-dependencies.walkdir]
+version = "2.3.2"
+
+[features]
+clone-impls = []
+default = [
+ "derive",
+ "parsing",
+ "printing",
+ "clone-impls",
+ "proc-macro",
+]
+derive = []
+extra-traits = []
+fold = []
+full = []
+parsing = []
+printing = ["dep:quote"]
+proc-macro = [
+ "proc-macro2/proc-macro",
+ "quote?/proc-macro",
+]
+test = ["syn-test-suite/all-features"]
+visit = []
+visit-mut = []
diff --git a/rust/hw/char/pl011/vendor/syn/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/syn/LICENSE-APACHE
new file mode 100644
index 0000000000..1b5ec8b78e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/rust/hw/char/pl011/vendor/syn/LICENSE-MIT
b/rust/hw/char/pl011/vendor/syn/LICENSE-MIT
new file mode 100644
index 0000000000..31aa79387f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/syn/README.md
b/rust/hw/char/pl011/vendor/syn/README.md
new file mode 100644
index 0000000000..04f9bf6cb1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/README.md
@@ -0,0 +1,284 @@
+Parser for Rust source code
+===========================
+
+[<img alt="github"
src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github"
height="20">](https://github.com/dtolnay/syn)
+[<img alt="crates.io"
src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust"
height="20">](https://crates.io/crates/syn)
+[<img alt="docs.rs"
src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs"
height="20">](https://docs.rs/syn)
+[<img alt="build status"
src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge"
height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+of Rust source code.
+
+Currently this library is geared toward use in Rust procedural macros, but
+contains some APIs that may be useful more generally.
+
+- **Data structures** — Syn provides a complete syntax tree that can represent
+ any valid Rust source code. The syntax tree is rooted at [`syn::File`] which
+ represents a full source file, but there are other entry points that may be
+ useful to procedural macros including [`syn::Item`], [`syn::Expr`] and
+ [`syn::Type`].
+
+- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`]
+ which is any of the three legal input items to a derive macro. An example
+ below shows using this type in a library that can derive implementations of a
+ user-defined trait.
+
+- **Parsing** — Parsing in Syn is built around [parser functions] with the
+ signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined by
+ Syn is individually parsable and may be used as a building block for custom
+ syntaxes, or you may dream up your own brand new syntax without involving any
+ of our syntax tree types.
+
+- **Location information** — Every token parsed by Syn is associated with a
+ `Span` that tracks line and column information back to the source of that
+ token. These spans allow a procedural macro to display detailed error
messages
+ pointing to all the right places in the user's code. There is an example of
+ this below.
+
+- **Feature flags** — Functionality is aggressively feature gated so your
+ procedural macros enable only what they need, and do not pay in compile time
+ for all the rest.
+
+[`syn::File`]: https://docs.rs/syn/2.0/syn/struct.File.html
+[`syn::Item`]: https://docs.rs/syn/2.0/syn/enum.Item.html
+[`syn::Expr`]: https://docs.rs/syn/2.0/syn/enum.Expr.html
+[`syn::Type`]: https://docs.rs/syn/2.0/syn/enum.Type.html
+[`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html
+[parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html
+
+*Version requirement: Syn supports rustc 1.60 and up.*
+
+[*Release notes*](https://github.com/dtolnay/syn/releases)
+
+<br>
+
+## Resources
+
+The best way to learn about procedural macros is by writing some. Consider
+working through [this procedural macro workshop][workshop] to get familiar with
+the different types of procedural macros. The workshop contains relevant links
+into the Syn documentation as you work through each project.
+
+[workshop]: https://github.com/dtolnay/proc-macro-workshop
+
+<br>
+
+## Example of a derive macro
+
+The canonical derive macro using Syn looks like this. We write an ordinary Rust
+function tagged with a `proc_macro_derive` attribute and the name of the trait
+we are deriving. Any time that derive appears in the user's code, the Rust
+compiler passes their data structure as tokens into our macro. We get to
execute
+arbitrary Rust code to figure out what to do with those tokens, then hand some
+tokens back to the compiler to compile into the user's crate.
+
+[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
+
+```toml
+[dependencies]
+syn = "2.0"
+quote = "1.0"
+
+[lib]
+proc-macro = true
+```
+
+```rust
+use proc_macro::TokenStream;
+use quote::quote;
+use syn::{parse_macro_input, DeriveInput};
+
+#[proc_macro_derive(MyMacro)]
+pub fn my_macro(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let input = parse_macro_input!(input as DeriveInput);
+
+ // Build the output, possibly using quasi-quotation
+ let expanded = quote! {
+ // ...
+ };
+
+ // Hand the output tokens back to the compiler
+ TokenStream::from(expanded)
+}
+```
+
+The [`heapsize`] example directory shows a complete working implementation of a
+derive macro. The example derives a `HeapSize` trait which computes an estimate
+of the amount of heap memory owned by a value.
+
+[`heapsize`]: examples/heapsize
+
+```rust
+pub trait HeapSize {
+ /// Total number of bytes of heap memory owned by `self`.
+ fn heap_size_of_children(&self) -> usize;
+}
+```
+
+The derive macro allows users to write `#[derive(HeapSize)]` on data structures
+in their program.
+
+```rust
+#[derive(HeapSize)]
+struct Demo<'a, T: ?Sized> {
+ a: Box<T>,
+ b: u8,
+ c: &'a str,
+ d: String,
+}
+```
+
+<br>
+
+## Spans and error reporting
+
+The token-based procedural macro API provides great control over where the
+compiler's error messages are displayed in user code. Consider the error the
+user sees if one of their field types does not implement `HeapSize`.
+
+```rust
+#[derive(HeapSize)]
+struct Broken {
+ ok: String,
+ bad: std::thread::Thread,
+}
+```
+
+By tracking span information all the way through the expansion of a procedural
+macro as shown in the `heapsize` example, token-based macros in Syn are able to
+trigger errors that directly pinpoint the source of the problem.
+
+```console
+error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
+ --> src/main.rs:7:5
+ |
+7 | bad: std::thread::Thread,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for
`std::thread::Thread`
+```
+
+<br>
+
+## Parsing a custom syntax
+
+The [`lazy-static`] example directory shows the implementation of a
+`functionlike!(...)` procedural macro in which the input tokens are parsed
using
+Syn's parsing API.
+
+[`lazy-static`]: examples/lazy-static
+
+The example reimplements the popular `lazy_static` crate from crates.io as a
+procedural macro.
+
+```rust
+lazy_static! {
+ static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
+}
+```
+
+The implementation shows how to trigger custom warnings and error messages on
+the macro input.
+
+```console
+warning: come on, pick a more creative name
+ --> src/main.rs:10:16
+ |
+10 | static ref FOO: String = "lazy_static".to_owned();
+ | ^^^
+```
+
+<br>
+
+## Testing
+
+When testing macros, we often care not just that the macro can be used
+successfully but also that when the macro is provided with invalid input it
+produces maximally helpful error messages. Consider using the [`trybuild`]
crate
+to write tests for errors that are emitted by your macro or errors detected by
+the Rust compiler in the expanded code following misuse of the macro. Such
tests
+help avoid regressions from later refactors that mistakenly make an error no
+longer trigger or be less helpful than it used to be.
+
+[`trybuild`]: https://github.com/dtolnay/trybuild
+
+<br>
+
+## Debugging
+
+When developing a procedural macro it can be helpful to look at what the
+generated code looks like. Use `cargo rustc -- -Zunstable-options
+--pretty=expanded` or the [`cargo expand`] subcommand.
+
+[`cargo expand`]: https://github.com/dtolnay/cargo-expand
+
+To show the expanded code for some crate that uses your procedural macro, run
+`cargo expand` from that crate. To show the expanded code for one of your own
+test cases, run `cargo expand --test the_test_case` where the last argument is
+the name of the test file without the `.rs` extension.
+
+This write-up by Brandon W Maister discusses debugging in more detail:
+[Debugging Rust's new Custom Derive system][debugging].
+
+[debugging]:
https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
+
+<br>
+
+## Optional features
+
+Syn puts a lot of functionality behind optional features in order to optimize
+compile time for the most common use cases. The following features are
+available.
+
+- **`derive`** *(enabled by default)* — Data structures for representing the
+ possible input to a derive macro, including structs and enums and types.
+- **`full`** — Data structures for representing the syntax tree of all valid
+ Rust source code, including items and expressions.
+- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a
+ syntax tree node of a chosen type.
+- **`printing`** *(enabled by default)* — Ability to print a syntax tree node
as
+ tokens of Rust source code.
+- **`visit`** — Trait for traversing a syntax tree.
+- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
+- **`fold`** — Trait for transforming an owned syntax tree.
+- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
+ types.
+- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
+ types.
+- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
+ library libproc_macro from rustc toolchain.
+
+<br>
+
+## Proc macro shim
+
+Syn operates on the token representation provided by the [proc-macro2] crate
+from crates.io rather than using the compiler's built in proc-macro crate
+directly. This enables code using Syn to execute outside of the context of a
+procedural macro, such as in unit tests or build.rs, and we avoid needing
+incompatible ecosystems for proc macros vs non-macro use cases.
+
+In general all of your code should be written against proc-macro2 rather than
+proc-macro. The one exception is in the signatures of procedural macro entry
+points, which are required by the language to use `proc_macro::TokenStream`.
+
+The proc-macro2 crate will automatically detect and use the compiler's data
+structures when a procedural macro is active.
+
+[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
diff --git a/rust/hw/char/pl011/vendor/syn/benches/file.rs
b/rust/hw/char/pl011/vendor/syn/benches/file.rs
new file mode 100644
index 0000000000..b424723966
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/benches/file.rs
@@ -0,0 +1,57 @@
+// $ cargo bench --features full,test --bench file
+
+#![feature(rustc_private, test)]
+#![recursion_limit = "1024"]
+#![allow(
+ clippy::items_after_statements,
+ clippy::manual_let_else,
+ clippy::match_like_matches_macro,
+ clippy::missing_panics_doc,
+ clippy::must_use_candidate,
+ clippy::uninlined_format_args
+)]
+
+extern crate test;
+
+#[macro_use]
+#[path = "../tests/macros/mod.rs"]
+mod macros;
+
+#[allow(dead_code)]
+#[path = "../tests/repo/mod.rs"]
+mod repo;
+
+use proc_macro2::{Span, TokenStream};
+use std::fs;
+use std::str::FromStr;
+use syn::parse::{ParseStream, Parser};
+use test::Bencher;
+
+const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
+
+fn get_tokens() -> TokenStream {
+ repo::clone_rust();
+ let content = fs::read_to_string(FILE).unwrap();
+ TokenStream::from_str(&content).unwrap()
+}
+
+#[bench]
+fn baseline(b: &mut Bencher) {
+ let tokens = get_tokens();
+ b.iter(|| drop(tokens.clone()));
+}
+
+#[bench]
+fn create_token_buffer(b: &mut Bencher) {
+ let tokens = get_tokens();
+ fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
+ Err(syn::Error::new(Span::call_site(), ""))
+ }
+ b.iter(|| immediate_fail.parse2(tokens.clone()));
+}
+
+#[bench]
+fn parse_file(b: &mut Bencher) {
+ let tokens = get_tokens();
+ b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
+}
diff --git a/rust/hw/char/pl011/vendor/syn/benches/rust.rs
b/rust/hw/char/pl011/vendor/syn/benches/rust.rs
new file mode 100644
index 0000000000..bfa3a17f4a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/benches/rust.rs
@@ -0,0 +1,182 @@
+// $ cargo bench --features full,test --bench rust
+//
+// Syn only, useful for profiling:
+// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test
--bench rust
+
+#![cfg_attr(not(syn_only), feature(rustc_private))]
+#![recursion_limit = "1024"]
+#![allow(
+ clippy::arc_with_non_send_sync,
+ clippy::cast_lossless,
+ clippy::let_underscore_untyped,
+ clippy::manual_let_else,
+ clippy::match_like_matches_macro,
+ clippy::uninlined_format_args,
+ clippy::unnecessary_wraps
+)]
+
+#[macro_use]
+#[path = "../tests/macros/mod.rs"]
+mod macros;
+
+#[allow(dead_code)]
+#[path = "../tests/repo/mod.rs"]
+mod repo;
+
+use std::fs;
+use std::time::{Duration, Instant};
+
+#[cfg(not(syn_only))]
+mod tokenstream_parse {
+ use proc_macro2::TokenStream;
+ use std::str::FromStr;
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ TokenStream::from_str(content).map(drop).map_err(drop)
+ }
+}
+
+mod syn_parse {
+ pub fn bench(content: &str) -> Result<(), ()> {
+ syn::parse_file(content).map(drop).map_err(drop)
+ }
+}
+
+#[cfg(not(syn_only))]
+mod librustc_parse {
+ extern crate rustc_data_structures;
+ extern crate rustc_driver;
+ extern crate rustc_error_messages;
+ extern crate rustc_errors;
+ extern crate rustc_parse;
+ extern crate rustc_session;
+ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+ use rustc_error_messages::FluentBundle;
+ use rustc_errors::{emitter::Emitter, translation::Translate, DiagCtxt,
DiagInner};
+ use rustc_session::parse::ParseSess;
+ use rustc_span::source_map::{FilePathMapping, SourceMap};
+ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+ fn emit_diagnostic(&mut self, _diag: DiagInner) {}
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+ }
+
+ impl Translate for SilentEmitter {
+ fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> {
+ None
+ }
+ fn fallback_fluent_bundle(&self) -> &FluentBundle {
+ panic!("silent emitter attempted to translate a diagnostic");
+ }
+ }
+
+ rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
+ let source_map =
Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = DiagCtxt::new(emitter);
+ let sess = ParseSess::with_dcx(handler, source_map);
+ if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+ ) {
+ diagnostic.cancel();
+ return Err(());
+ };
+ Ok(())
+ })
+ }
+}
+
+#[cfg(not(syn_only))]
+mod read_from_disk {
+ pub fn bench(content: &str) -> Result<(), ()> {
+ let _ = content;
+ Ok(())
+ }
+}
+
+fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration {
+ let begin = Instant::now();
+ let mut success = 0;
+ let mut total = 0;
+
+ ["tests/rust/compiler", "tests/rust/library"]
+ .iter()
+ .flat_map(|dir| {
+ walkdir::WalkDir::new(dir)
+ .into_iter()
+ .filter_entry(repo::base_dir_filter)
+ })
+ .for_each(|entry| {
+ let entry = entry.unwrap();
+ let path = entry.path();
+ if path.is_dir() {
+ return;
+ }
+ let content = fs::read_to_string(path).unwrap();
+ let ok = codepath(&content).is_ok();
+ success += ok as usize;
+ total += 1;
+ if !ok {
+ eprintln!("FAIL {}", path.display());
+ }
+ });
+
+ assert_eq!(success, total);
+ begin.elapsed()
+}
+
+fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ [
+ $(
+ $(#[$cfg])*
+ (stringify!($name), $name::bench as fn(&str) -> Result<(),
()>),
+ )*
+ ]
+ };
+ }
+
+ #[cfg(not(syn_only))]
+ {
+ let mut lines = 0;
+ let mut files = 0;
+ exec(|content| {
+ lines += content.lines().count();
+ files += 1;
+ Ok(())
+ });
+ eprintln!("\n{} lines in {} files", lines, files);
+ }
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+ read_from_disk,
+ #[cfg(not(syn_only))]
+ tokenstream_parse,
+ syn_parse,
+ #[cfg(not(syn_only))]
+ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+ eprintln!(
+ "elapsed={}.{:03}s",
+ elapsed.as_secs(),
+ elapsed.subsec_millis(),
+ );
+ }
+ eprintln!();
+}
diff --git a/rust/hw/char/pl011/vendor/syn/meson.build
b/rust/hw/char/pl011/vendor/syn/meson.build
new file mode 100644
index 0000000000..87b9628671
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/meson.build
@@ -0,0 +1,24 @@
+_syn_rs = static_library(
+ 'syn',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ '--cfg', 'feature="full"',
+ '--cfg', 'feature="derive"',
+ '--cfg', 'feature="parsing"',
+ '--cfg', 'feature="printing"',
+ '--cfg', 'feature="clone-impls"',
+ '--cfg', 'feature="proc-macro"',
+ ],
+ dependencies: [
+ dep_quote,
+ dep_proc_macro2,
+ dep_unicode_ident,
+ ],
+)
+
+dep_syn = declare_dependency(
+ link_with: _syn_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/syn/src/attr.rs
b/rust/hw/char/pl011/vendor/syn/src/attr.rs
new file mode 100644
index 0000000000..c19715cb3b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/attr.rs
@@ -0,0 +1,793 @@
+#[cfg(feature = "parsing")]
+use crate::error::Error;
+#[cfg(feature = "parsing")]
+use crate::error::Result;
+use crate::expr::Expr;
+use crate::mac::MacroDelimiter;
+#[cfg(feature = "parsing")]
+use crate::meta::{self, ParseNestedMeta};
+#[cfg(feature = "parsing")]
+use crate::parse::{Parse, ParseStream, Parser};
+use crate::path::Path;
+use crate::token;
+use proc_macro2::TokenStream;
+#[cfg(feature = "printing")]
+use std::iter;
+#[cfg(feature = "printing")]
+use std::slice;
+
+ast_struct! {
+ /// An attribute, like `#[repr(transparent)]`.
+ ///
+ /// <br>
+ ///
+ /// # Syntax
+ ///
+ /// Rust has six types of attributes.
+ ///
+ /// - Outer attributes like `#[repr(transparent)]`. These appear outside or
+ /// in front of the item they describe.
+ ///
+ /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
+ /// of the item they describe, usually a module.
+ ///
+ /// - Outer one-line doc comments like `/// Example`.
+ ///
+ /// - Inner one-line doc comments like `//! Please file an issue`.
+ ///
+ /// - Outer documentation blocks `/** Example */`.
+ ///
+ /// - Inner documentation blocks `/*! Please file an issue */`.
+ ///
+ /// The `style` field of type `AttrStyle` distinguishes whether an
attribute
+ /// is outer or inner.
+ ///
+ /// Every attribute has a `path` that indicates the intended interpretation
+ /// of the rest of the attribute's contents. The path and the optional
+ /// additional contents are represented together in the `meta` field of the
+ /// attribute in three possible varieties:
+ ///
+ /// - Meta::Path — attributes whose information content conveys just
a
+ /// path, for example the `#[test]` attribute.
+ ///
+ /// - Meta::List — attributes that carry arbitrary tokens after the
+ /// path, surrounded by a delimiter (parenthesis, bracket, or brace). For
+ /// example `#[derive(Copy)]` or `#[precondition(x < 5)]`.
+ ///
+ /// - Meta::NameValue — attributes with an `=` sign after the path,
+ /// followed by a Rust expression. For example `#[path =
+ /// "sys/windows.rs"]`.
+ ///
+ /// All doc comments are represented in the NameValue style with a path of
+ /// "doc", as this is how they are processed by the compiler and by
+ /// `macro_rules!` macros.
+ ///
+ /// ```text
+ /// #[derive(Copy, Clone)]
+ /// ~~~~~~Path
+ /// ^^^^^^^^^^^^^^^^^^^Meta::List
+ ///
+ /// #[path = "sys/windows.rs"]
+ /// ~~~~Path
+ /// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue
+ ///
+ /// #[test]
+ /// ^^^^Meta::Path
+ /// ```
+ ///
+ /// <br>
+ ///
+ /// # Parsing from tokens to Attribute
+ ///
+ /// This type does not implement the [`Parse`] trait and thus cannot be
+ /// parsed directly by [`ParseStream::parse`]. Instead use
+ /// [`ParseStream::call`] with one of the two parser functions
+ /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
+ /// which you intend to parse.
+ ///
+ /// [`Parse`]: crate::parse::Parse
+ /// [`ParseStream::parse`]: crate::parse::ParseBuffer::parse
+ /// [`ParseStream::call`]: crate::parse::ParseBuffer::call
+ ///
+ /// ```
+ /// use syn::{Attribute, Ident, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Parses a unit struct with attributes.
+ /// //
+ /// // #[path = "s.tmpl"]
+ /// // struct S;
+ /// struct UnitStruct {
+ /// attrs: Vec<Attribute>,
+ /// struct_token: Token![struct],
+ /// name: Ident,
+ /// semi_token: Token![;],
+ /// }
+ ///
+ /// impl Parse for UnitStruct {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// Ok(UnitStruct {
+ /// attrs: input.call(Attribute::parse_outer)?,
+ /// struct_token: input.parse()?,
+ /// name: input.parse()?,
+ /// semi_token: input.parse()?,
+ /// })
+ /// }
+ /// }
+ /// ```
+ ///
+ /// <p><br></p>
+ ///
+ /// # Parsing from Attribute to structured arguments
+ ///
+ /// The grammar of attributes in Rust is very flexible, which makes the
+ /// syntax tree not that useful on its own. In particular, arguments of the
+ /// `Meta::List` variety of attribute are held in an arbitrary `tokens:
+ /// TokenStream`. Macros are expected to check the `path` of the attribute,
+ /// decide whether they recognize it, and then parse the remaining tokens
+ /// according to whatever grammar they wish to require for that kind of
+ /// attribute. Use [`parse_args()`] to parse those tokens into the expected
+ /// data structure.
+ ///
+ /// [`parse_args()`]: Attribute::parse_args
+ ///
+ /// <p><br></p>
+ ///
+ /// # Doc comments
+ ///
+ /// The compiler transforms doc comments, such as `/// comment` and `/*!
+ /// comment */`, into attributes before macros are expanded. Each comment
is
+ /// expanded into an attribute of the form `#[doc = r"comment"]`.
+ ///
+ /// As an example, the following `mod` items are expanded identically:
+ ///
+ /// ```
+ /// # use syn::{ItemMod, parse_quote};
+ /// let doc: ItemMod = parse_quote! {
+ /// /// Single line doc comments
+ /// /// We write so many!
+ /// /**
+ /// * Multi-line comments...
+ /// * May span many lines
+ /// */
+ /// mod example {
+ /// //! Of course, they can be inner too
+ /// /*! And fit in a single line */
+ /// }
+ /// };
+ /// let attr: ItemMod = parse_quote! {
+ /// #[doc = r" Single line doc comments"]
+ /// #[doc = r" We write so many!"]
+ /// #[doc = r"
+ /// * Multi-line comments...
+ /// * May span many lines
+ /// "]
+ /// mod example {
+ /// #![doc = r" Of course, they can be inner too"]
+ /// #![doc = r" And fit in a single line "]
+ /// }
+ /// };
+ /// assert_eq!(doc, attr);
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+ pub meta: Meta,
+ }
+}
+
+impl Attribute {
+ /// Returns the path that identifies the interpretation of this attribute.
+ ///
+ /// For example this would return the `test` in `#[test]`, the `derive` in
+ /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
+ pub fn path(&self) -> &Path {
+ self.meta.path()
+ }
+
+ /// Parse the arguments to the attribute as a syntax tree.
+ ///
+ /// This is similar to pulling out the `TokenStream` from `Meta::List` and
+ /// doing `syn::parse2::<T>(meta_list.tokens)`, except that using
+ /// `parse_args` the error message has a more useful span when `tokens` is
+ /// empty.
+ ///
+ /// The surrounding delimiters are *not* included in the input to the
+ /// parser.
+ ///
+ /// ```text
+ /// #[my_attr(value < 5)]
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{parse_quote, Attribute, Expr};
+ ///
+ /// let attr: Attribute = parse_quote! {
+ /// #[precondition(value < 5)]
+ /// };
+ ///
+ /// if attr.path().is_ident("precondition") {
+ /// let precondition: Expr = attr.parse_args()?;
+ /// // ...
+ /// }
+ /// # anyhow::Ok(())
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+ self.parse_args_with(T::parse)
+ }
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{parse_quote, Attribute};
+ ///
+ /// let attr: Attribute = parse_quote! {
+ /// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }]
+ /// };
+ ///
+ /// let bwom = attr.parse_args_with(Attribute::parse_outer)?;
+ ///
+ /// // Attribute does not have a Parse impl, so we couldn't directly do:
+ /// // let bwom: Attribute = attr.parse_args()?;
+ /// # anyhow::Ok(())
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ match &self.meta {
+ Meta::Path(path) => Err(crate::error::new2(
+ path.segments.first().unwrap().ident.span(),
+ path.segments.last().unwrap().ident.span(),
+ format!(
+ "expected attribute arguments in parentheses: {}[{}(...)]",
+ parsing::DisplayAttrStyle(&self.style),
+ parsing::DisplayPath(path),
+ ),
+ )),
+ Meta::NameValue(meta) => Err(Error::new(
+ meta.eq_token.span,
+ format_args!(
+ "expected parentheses: {}[{}(...)]",
+ parsing::DisplayAttrStyle(&self.style),
+ parsing::DisplayPath(&meta.path),
+ ),
+ )),
+ Meta::List(meta) => meta.parse_args_with(parser),
+ }
+ }
+
+ /// Parse the arguments to the attribute, expecting it to follow the
+ /// conventional structure used by most of Rust's built-in attributes.
+ ///
+ /// The [*Meta Item Attribute Syntax*][syntax] section in the Rust
reference
+ /// explains the convention in more detail. Not all attributes follow this
+ /// convention, so [`parse_args()`][Self::parse_args] is available if you
+ /// need to parse arbitrarily goofy attribute syntax.
+ ///
+ /// [syntax]:
https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax
+ ///
+ /// # Example
+ ///
+ /// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute
+ /// syntax.
+ ///
+ /// ```
+ /// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt};
+ ///
+ /// let input: ItemStruct = parse_quote! {
+ /// #[repr(C, align(4))]
+ /// pub struct MyStruct(u16, u32);
+ /// };
+ ///
+ /// let mut repr_c = false;
+ /// let mut repr_transparent = false;
+ /// let mut repr_align = None::<usize>;
+ /// let mut repr_packed = None::<usize>;
+ /// for attr in &input.attrs {
+ /// if attr.path().is_ident("repr") {
+ /// attr.parse_nested_meta(|meta| {
+ /// // #[repr(C)]
+ /// if meta.path.is_ident("C") {
+ /// repr_c = true;
+ /// return Ok(());
+ /// }
+ ///
+ /// // #[repr(transparent)]
+ /// if meta.path.is_ident("transparent") {
+ /// repr_transparent = true;
+ /// return Ok(());
+ /// }
+ ///
+ /// // #[repr(align(N))]
+ /// if meta.path.is_ident("align") {
+ /// let content;
+ /// parenthesized!(content in meta.input);
+ /// let lit: LitInt = content.parse()?;
+ /// let n: usize = lit.base10_parse()?;
+ /// repr_align = Some(n);
+ /// return Ok(());
+ /// }
+ ///
+ /// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1
+ /// if meta.path.is_ident("packed") {
+ /// if meta.input.peek(token::Paren) {
+ /// let content;
+ /// parenthesized!(content in meta.input);
+ /// let lit: LitInt = content.parse()?;
+ /// let n: usize = lit.base10_parse()?;
+ /// repr_packed = Some(n);
+ /// } else {
+ /// repr_packed = Some(1);
+ /// }
+ /// return Ok(());
+ /// }
+ ///
+ /// Err(meta.error("unrecognized repr"))
+ /// })?;
+ /// }
+ /// }
+ /// # anyhow::Ok(())
+ /// ```
+ ///
+ /// # Alternatives
+ ///
+ /// In some cases, for attributes which have nested layers of structured
+ /// content, the following less flexible approach might be more convenient:
+ ///
+ /// ```
+ /// # use syn::{parse_quote, ItemStruct};
+ /// #
+ /// # let input: ItemStruct = parse_quote! {
+ /// # #[repr(C, align(4))]
+ /// # pub struct MyStruct(u16, u32);
+ /// # };
+ /// #
+ /// use syn::punctuated::Punctuated;
+ /// use syn::{parenthesized, token, Error, LitInt, Meta, Token};
+ ///
+ /// let mut repr_c = false;
+ /// let mut repr_transparent = false;
+ /// let mut repr_align = None::<usize>;
+ /// let mut repr_packed = None::<usize>;
+ /// for attr in &input.attrs {
+ /// if attr.path().is_ident("repr") {
+ /// let nested = attr.parse_args_with(Punctuated::<Meta,
Token![,]>::parse_terminated)?;
+ /// for meta in nested {
+ /// match meta {
+ /// // #[repr(C)]
+ /// Meta::Path(path) if path.is_ident("C") => {
+ /// repr_c = true;
+ /// }
+ ///
+ /// // #[repr(align(N))]
+ /// Meta::List(meta) if meta.path.is_ident("align") => {
+ /// let lit: LitInt = meta.parse_args()?;
+ /// let n: usize = lit.base10_parse()?;
+ /// repr_align = Some(n);
+ /// }
+ ///
+ /// /* ... */
+ ///
+ /// _ => {
+ /// return Err(Error::new_spanned(meta, "unrecognized
repr"));
+ /// }
+ /// }
+ /// }
+ /// }
+ /// }
+ /// # Ok(())
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_nested_meta(
+ &self,
+ logic: impl FnMut(ParseNestedMeta) -> Result<()>,
+ ) -> Result<()> {
+ self.parse_args_with(meta::parser(logic))
+ }
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+ /// # Example
+ ///
+ /// See
+ /// [*Parsing from tokens to
Attribute*](#parsing-from-tokens-to-attribute).
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ while input.peek(Token![#]) {
+ attrs.push(input.call(parsing::single_parse_outer)?);
+ }
+ Ok(attrs)
+ }
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+ /// # Example
+ ///
+ /// See
+ /// [*Parsing from tokens to
Attribute*](#parsing-from-tokens-to-attribute).
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ parsing::parse_inner(input, &mut attrs)?;
+ Ok(attrs)
+ }
+}
+
+ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+ /// # Outer attributes
+ ///
+ /// - `#[repr(transparent)]`
+ /// - `/// # Example`
+ /// - `/** Please file an issue */`
+ ///
+ /// # Inner attributes
+ ///
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+ }
+}
+
+ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+ /// ## Path
+ ///
+ /// A meta path is like the `test` in `#[test]`.
+ ///
+ /// ## List
+ ///
+ /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
+ ///
+ /// ## NameValue
+ ///
+ /// A name-value meta is like the `path = "..."` in `#[path =
+ /// "sys/windows.rs"]`.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum Meta {
+ Path(Path),
+
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ List(MetaList),
+
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ NameValue(MetaNameValue),
+ }
+}
+
+ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct MetaList {
+ pub path: Path,
+ pub delimiter: MacroDelimiter,
+ pub tokens: TokenStream,
+ }
+}
+
+ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct MetaNameValue {
+ pub path: Path,
+ pub eq_token: Token![=],
+ pub value: Expr,
+ }
+}
+
+impl Meta {
+ /// Returns the path that begins this structured meta item.
+ ///
+ /// For example this would return the `test` in `#[test]`, the `derive` in
+ /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
+ pub fn path(&self) -> &Path {
+ match self {
+ Meta::Path(path) => path,
+ Meta::List(meta) => &meta.path,
+ Meta::NameValue(meta) => &meta.path,
+ }
+ }
+
+ /// Error if this is a `Meta::List` or `Meta::NameValue`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn require_path_only(&self) -> Result<&Path> {
+ let error_span = match self {
+ Meta::Path(path) => return Ok(path),
+ Meta::List(meta) => meta.delimiter.span().open(),
+ Meta::NameValue(meta) => meta.eq_token.span,
+ };
+ Err(Error::new(error_span, "unexpected token in attribute"))
+ }
+
+ /// Error if this is a `Meta::Path` or `Meta::NameValue`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn require_list(&self) -> Result<&MetaList> {
+ match self {
+ Meta::List(meta) => Ok(meta),
+ Meta::Path(path) => Err(crate::error::new2(
+ path.segments.first().unwrap().ident.span(),
+ path.segments.last().unwrap().ident.span(),
+ format!(
+ "expected attribute arguments in parentheses: `{}(...)`",
+ parsing::DisplayPath(path),
+ ),
+ )),
+ Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span,
"expected `(`")),
+ }
+ }
+
+ /// Error if this is a `Meta::Path` or `Meta::List`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn require_name_value(&self) -> Result<&MetaNameValue> {
+ match self {
+ Meta::NameValue(meta) => Ok(meta),
+ Meta::Path(path) => Err(crate::error::new2(
+ path.segments.first().unwrap().ident.span(),
+ path.segments.last().unwrap().ident.span(),
+ format!(
+ "expected a value for this attribute: `{} = ...`",
+ parsing::DisplayPath(path),
+ ),
+ )),
+ Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(),
"expected `=`")),
+ }
+ }
+}
+
+impl MetaList {
+ /// See [`Attribute::parse_args`].
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+ self.parse_args_with(T::parse)
+ }
+
+ /// See [`Attribute::parse_args_with`].
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ let scope = self.delimiter.span().close();
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+
+ /// See [`Attribute::parse_nested_meta`].
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_nested_meta(
+ &self,
+ logic: impl FnMut(ParseNestedMeta) -> Result<()>,
+ ) -> Result<()> {
+ self.parse_args_with(meta::parser(logic))
+ }
+}
+
+#[cfg(feature = "printing")]
+pub(crate) trait FilterAttrs<'a> {
+ type Ret: Iterator<Item = &'a Attribute>;
+
+ fn outer(self) -> Self::Ret;
+ #[cfg(feature = "full")]
+ fn inner(self) -> Self::Ret;
+}
+
+#[cfg(feature = "printing")]
+impl<'a> FilterAttrs<'a> for &'a [Attribute] {
+ type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) ->
bool>;
+
+ fn outer(self) -> Self::Ret {
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.iter().filter(is_outer)
+ }
+
+ #[cfg(feature = "full")]
+ fn inner(self) -> Self::Ret {
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+ AttrStyle::Outer => false,
+ }
+ }
+ self.iter().filter(is_inner)
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue};
+ use crate::error::Result;
+ use crate::expr::{Expr, ExprLit};
+ use crate::lit::Lit;
+ use crate::parse::discouraged::Speculative as _;
+ use crate::parse::{Parse, ParseStream};
+ use crate::path::Path;
+ use crate::{mac, token};
+ use std::fmt::{self, Display};
+
+ pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>)
-> Result<()> {
+ while input.peek(Token![#]) && input.peek2(Token![!]) {
+ attrs.push(input.call(single_parse_inner)?);
+ }
+ Ok(())
+ }
+
+ pub(crate) fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
+ let content;
+ Ok(Attribute {
+ pound_token: input.parse()?,
+ style: AttrStyle::Inner(input.parse()?),
+ bracket_token: bracketed!(content in input),
+ meta: content.parse()?,
+ })
+ }
+
+ pub(crate) fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
+ let content;
+ Ok(Attribute {
+ pound_token: input.parse()?,
+ style: AttrStyle::Outer,
+ bracket_token: bracketed!(content in input),
+ meta: content.parse()?,
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Meta {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let path = input.call(Path::parse_mod_style)?;
+ parse_meta_after_path(path, input)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for MetaList {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let path = input.call(Path::parse_mod_style)?;
+ parse_meta_list_after_path(path, input)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for MetaNameValue {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let path = input.call(Path::parse_mod_style)?;
+ parse_meta_name_value_after_path(path, input)
+ }
+ }
+
+ pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) ->
Result<Meta> {
+ if input.peek(token::Paren) || input.peek(token::Bracket) ||
input.peek(token::Brace) {
+ parse_meta_list_after_path(path, input).map(Meta::List)
+ } else if input.peek(Token![=]) {
+ parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
+ } else {
+ Ok(Meta::Path(path))
+ }
+ }
+
+ fn parse_meta_list_after_path(path: Path, input: ParseStream) ->
Result<MetaList> {
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ Ok(MetaList {
+ path,
+ delimiter,
+ tokens,
+ })
+ }
+
+ fn parse_meta_name_value_after_path(path: Path, input: ParseStream) ->
Result<MetaNameValue> {
+ let eq_token: Token![=] = input.parse()?;
+ let ahead = input.fork();
+ let lit: Option<Lit> = ahead.parse()?;
+ let value = if let (Some(lit), true) = (lit, ahead.is_empty()) {
+ input.advance_to(&ahead);
+ Expr::Lit(ExprLit {
+ attrs: Vec::new(),
+ lit,
+ })
+ } else if input.peek(Token![#]) && input.peek2(token::Bracket) {
+ return Err(input.error("unexpected attribute inside of
attribute"));
+ } else {
+ input.parse()?
+ };
+ Ok(MetaNameValue {
+ path,
+ eq_token,
+ value,
+ })
+ }
+
+ pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle);
+
+ impl<'a> Display for DisplayAttrStyle<'a> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(match self.0 {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+ })
+ }
+ }
+
+ pub(super) struct DisplayPath<'a>(pub &'a Path);
+
+ impl<'a> Display for DisplayPath<'a> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ for (i, segment) in self.0.segments.iter().enumerate() {
+ if i > 0 || self.0.leading_colon.is_some() {
+ formatter.write_str("::")?;
+ }
+ write!(formatter, "{}", segment.ident)?;
+ }
+ Ok(())
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::{AttrStyle, Attribute, MetaList, MetaNameValue};
+ use proc_macro2::TokenStream;
+ use quote::ToTokens;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Attribute {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.pound_token.to_tokens(tokens);
+ if let AttrStyle::Inner(b) = &self.style {
+ b.to_tokens(tokens);
+ }
+ self.bracket_token.surround(tokens, |tokens| {
+ self.meta.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for MetaList {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.path.to_tokens(tokens);
+ self.delimiter.surround(tokens, self.tokens.clone());
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for MetaNameValue {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.path.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.value.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/bigint.rs
b/rust/hw/char/pl011/vendor/syn/src/bigint.rs
new file mode 100644
index 0000000000..66aaa93725
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/bigint.rs
@@ -0,0 +1,66 @@
+use std::ops::{AddAssign, MulAssign};
+
+// For implementing base10_digits() accessor on LitInt.
+pub(crate) struct BigInt {
+ digits: Vec<u8>,
+}
+
+impl BigInt {
+ pub(crate) fn new() -> Self {
+ BigInt { digits: Vec::new() }
+ }
+
+ pub(crate) fn to_string(&self) -> String {
+ let mut repr = String::with_capacity(self.digits.len());
+
+ let mut has_nonzero = false;
+ for digit in self.digits.iter().rev() {
+ has_nonzero |= *digit != 0;
+ if has_nonzero {
+ repr.push((*digit + b'0') as char);
+ }
+ }
+
+ if repr.is_empty() {
+ repr.push('0');
+ }
+
+ repr
+ }
+
+ fn reserve_two_digits(&mut self) {
+ let len = self.digits.len();
+ let desired =
+ len + !self.digits.ends_with(&[0, 0]) as usize +
!self.digits.ends_with(&[0]) as usize;
+ self.digits.resize(desired, 0);
+ }
+}
+
+impl AddAssign<u8> for BigInt {
+ // Assumes increment <16.
+ fn add_assign(&mut self, mut increment: u8) {
+ self.reserve_two_digits();
+
+ let mut i = 0;
+ while increment > 0 {
+ let sum = self.digits[i] + increment;
+ self.digits[i] = sum % 10;
+ increment = sum / 10;
+ i += 1;
+ }
+ }
+}
+
+impl MulAssign<u8> for BigInt {
+ // Assumes base <=16.
+ fn mul_assign(&mut self, base: u8) {
+ self.reserve_two_digits();
+
+ let mut carry = 0;
+ for digit in &mut self.digits {
+ let prod = *digit * base + carry;
+ *digit = prod % 10;
+ carry = prod / 10;
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/buffer.rs
b/rust/hw/char/pl011/vendor/syn/src/buffer.rs
new file mode 100644
index 0000000000..1686e28209
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/buffer.rs
@@ -0,0 +1,434 @@
+//! A stably addressed token buffer supporting efficient traversal based on a
+//! cheaply copyable cursor.
+
+// This module is heavily commented as it contains most of the unsafe code in
+// Syn, and caution should be used when editing it. The public-facing interface
+// is 100% safe but the implementation is fragile internally.
+
+use crate::Lifetime;
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span,
TokenStream, TokenTree};
+use std::cmp::Ordering;
+use std::marker::PhantomData;
+
+/// Internal type which is used instead of `TokenTree` to represent a token
tree
+/// within a `TokenBuffer`.
+enum Entry {
+ // Mimicking types from proc-macro.
+ // Group entries contain the offset to the matching End entry.
+ Group(Group, usize),
+ Ident(Ident),
+ Punct(Punct),
+ Literal(Literal),
+ // End entries contain the offset (negative) to the start of the buffer.
+ End(isize),
+}
+
+/// A buffer that can be efficiently traversed multiple times, unlike
+/// `TokenStream` which requires a deep copy in order to traverse more than
+/// once.
+pub struct TokenBuffer {
+ // NOTE: Do not implement clone on this - while the current design could be
+ // cloned, other designs which could be desirable may not be cloneable.
+ entries: Box<[Entry]>,
+}
+
+impl TokenBuffer {
+ fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
+ for tt in stream {
+ match tt {
+ TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
+ TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
+ TokenTree::Literal(literal) =>
entries.push(Entry::Literal(literal)),
+ TokenTree::Group(group) => {
+ let group_start_index = entries.len();
+ entries.push(Entry::End(0)); // we replace this below
+ Self::recursive_new(entries, group.stream());
+ let group_end_index = entries.len();
+ entries.push(Entry::End(-(group_end_index as isize)));
+ let group_end_offset = group_end_index - group_start_index;
+ entries[group_start_index] = Entry::Group(group,
group_end_offset);
+ }
+ }
+ }
+ }
+
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `proc_macro::TokenStream`.
+ #[cfg(feature = "proc-macro")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))]
+ pub fn new(stream: proc_macro::TokenStream) -> Self {
+ Self::new2(stream.into())
+ }
+
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `proc_macro2::TokenStream`.
+ pub fn new2(stream: TokenStream) -> Self {
+ let mut entries = Vec::new();
+ Self::recursive_new(&mut entries, stream);
+ entries.push(Entry::End(-(entries.len() as isize)));
+ Self {
+ entries: entries.into_boxed_slice(),
+ }
+ }
+
+ /// Creates a cursor referencing the first token in the buffer and able to
+ /// traverse until the end of the buffer.
+ pub fn begin(&self) -> Cursor {
+ let ptr = self.entries.as_ptr();
+ unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
+ }
+}
+
+/// A cheaply copyable cursor into a `TokenBuffer`.
+///
+/// This cursor holds a shared reference into the immutable data which is used
+/// internally to represent a `TokenStream`, and can be efficiently manipulated
+/// and copied around.
+///
+/// An empty `Cursor` can be created directly, or one may create a
`TokenBuffer`
+/// object and get a cursor to its first token with `begin()`.
+pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+ // This is the only `Entry::End` object which this cursor is allowed to
+ // point at. All other `End` objects are skipped over in `Cursor::create`.
+ scope: *const Entry,
+ // Cursor is covariant in 'a. This field ensures that our pointers are
still
+ // valid.
+ marker: PhantomData<&'a Entry>,
+}
+
+impl<'a> Cursor<'a> {
+ /// Creates a cursor referencing a static empty TokenStream.
+ pub fn empty() -> Self {
+ // It's safe in this situation for us to put an `Entry` object in
global
+ // storage, despite it not actually being safe to send across threads
+ // (`Ident` is a reference into a thread-local table). This is because
+ // this entry never includes a `Ident` object.
+ //
+ // This wrapper struct allows us to break the rules and put a `Sync`
+ // object in global storage.
+ struct UnsafeSyncEntry(Entry);
+ unsafe impl Sync for UnsafeSyncEntry {}
+ static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));
+
+ Cursor {
+ ptr: &EMPTY_ENTRY.0,
+ scope: &EMPTY_ENTRY.0,
+ marker: PhantomData,
+ }
+ }
+
+ /// This create method intelligently exits non-explicitly-entered
+ /// `None`-delimited scopes when the cursor reaches the end of them,
+ /// allowing for them to be treated transparently.
+ unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
+ // NOTE: If we're looking at a `End`, we want to advance the cursor
+ // past it, unless `ptr == scope`, which means that we're at the edge
of
+ // our cursor's scope. We should only have `ptr != scope` at the exit
+ // from None-delimited groups entered with `ignore_none`.
+ while let Entry::End(_) = unsafe { &*ptr } {
+ if ptr == scope {
+ break;
+ }
+ ptr = unsafe { ptr.add(1) };
+ }
+
+ Cursor {
+ ptr,
+ scope,
+ marker: PhantomData,
+ }
+ }
+
+ /// Get the current entry.
+ fn entry(self) -> &'a Entry {
+ unsafe { &*self.ptr }
+ }
+
+ /// Bump the cursor to point at the next token after the current one. This
+ /// is undefined behavior if the cursor is currently looking at an
+ /// `Entry::End`.
+ ///
+ /// If the cursor is looking at an `Entry::Group`, the bumped cursor will
+ /// point at the first token in the group (with the same scope end).
+ unsafe fn bump_ignore_group(self) -> Cursor<'a> {
+ unsafe { Cursor::create(self.ptr.offset(1), self.scope) }
+ }
+
+ /// While the cursor is looking at a `None`-delimited group, move it to
look
+ /// at the first token inside instead. If the group is empty, this will
move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+ while let Entry::Group(group, _) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ unsafe { *self = self.bump_ignore_group() };
+ } else {
+ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+ }
+
+ /// If the cursor is pointing at a `Group` with the given delimiter,
returns
+ /// a cursor into that group and one pointing to the next `TokenTree`.
+ pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan,
Cursor<'a>)> {
+ // If we're not trying to enter a none-delimited group, we want to
+ // ignore them. We have to make sure to _not_ ignore them when we want
+ // to enter them, of course. For obvious reasons.
+ if delim != Delimiter::None {
+ self.ignore_none();
+ }
+
+ if let Entry::Group(group, end_offset) = self.entry() {
+ if group.delimiter() == delim {
+ let span = group.delim_span();
+ let end_of_group = unsafe { self.ptr.add(*end_offset) };
+ let inside_of_group = unsafe { Cursor::create(self.ptr.add(1),
end_of_group) };
+ let after_group = unsafe { Cursor::create(end_of_group,
self.scope) };
+ return Some((inside_of_group, span, after_group));
+ }
+ }
+
+ None
+ }
+
+ pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan,
Cursor<'a>)> {
+ if let Entry::Group(group, end_offset) = self.entry() {
+ let delimiter = group.delimiter();
+ let span = group.delim_span();
+ let end_of_group = unsafe { self.ptr.add(*end_offset) };
+ let inside_of_group = unsafe { Cursor::create(self.ptr.add(1),
end_of_group) };
+ let after_group = unsafe { Cursor::create(end_of_group,
self.scope) };
+ return Some((inside_of_group, delimiter, span, after_group));
+ }
+
+ None
+ }
+
+ pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
+ if let Entry::Group(group, end_offset) = self.entry() {
+ let end_of_group = unsafe { self.ptr.add(*end_offset) };
+ let after_group = unsafe { Cursor::create(end_of_group,
self.scope) };
+ return Some((group.clone(), after_group));
+ }
+
+ None
+ }
+
+ /// If the cursor is pointing at a `Ident`, returns it along with a cursor
+ /// pointing at the next `TokenTree`.
+ pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
+ self.ignore_none();
+ match self.entry() {
+ Entry::Ident(ident) => Some((ident.clone(), unsafe {
self.bump_ignore_group() })),
+ _ => None,
+ }
+ }
+
+ /// If the cursor is pointing at a `Punct`, returns it along with a cursor
+ /// pointing at the next `TokenTree`.
+ pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
+ self.ignore_none();
+ match self.entry() {
+ Entry::Punct(punct) if punct.as_char() != '\'' => {
+ Some((punct.clone(), unsafe { self.bump_ignore_group() }))
+ }
+ _ => None,
+ }
+ }
+
+ /// If the cursor is pointing at a `Literal`, return it along with a cursor
+ /// pointing at the next `TokenTree`.
+ pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
+ self.ignore_none();
+ match self.entry() {
+ Entry::Literal(literal) => Some((literal.clone(), unsafe {
self.bump_ignore_group() })),
+ _ => None,
+ }
+ }
+
+ /// If the cursor is pointing at a `Lifetime`, returns it along with a
+ /// cursor pointing at the next `TokenTree`.
+ pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
+ self.ignore_none();
+ match self.entry() {
+ Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing()
== Spacing::Joint => {
+ let next = unsafe { self.bump_ignore_group() };
+ let (ident, rest) = next.ident()?;
+ let lifetime = Lifetime {
+ apostrophe: punct.span(),
+ ident,
+ };
+ Some((lifetime, rest))
+ }
+ _ => None,
+ }
+ }
+
+ /// Copies all remaining tokens visible from this cursor into a
+ /// `TokenStream`.
+ pub fn token_stream(self) -> TokenStream {
+ let mut tts = Vec::new();
+ let mut cursor = self;
+ while let Some((tt, rest)) = cursor.token_tree() {
+ tts.push(tt);
+ cursor = rest;
+ }
+ tts.into_iter().collect()
+ }
+
+ /// If the cursor is pointing at a `TokenTree`, returns it along with a
+ /// cursor pointing at the next `TokenTree`.
+ ///
+ /// Returns `None` if the cursor has reached the end of its stream.
+ ///
+ /// This method does not treat `None`-delimited groups as transparent, and
+ /// will return a `Group(None, ..)` if the cursor is looking at one.
+ pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
+ let (tree, len) = match self.entry() {
+ Entry::Group(group, end_offset) => (group.clone().into(),
*end_offset),
+ Entry::Literal(literal) => (literal.clone().into(), 1),
+ Entry::Ident(ident) => (ident.clone().into(), 1),
+ Entry::Punct(punct) => (punct.clone().into(), 1),
+ Entry::End(_) => return None,
+ };
+
+ let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
+ Some((tree, rest))
+ }
+
+ /// Returns the `Span` of the current token, or `Span::call_site()` if this
+ /// cursor points to eof.
+ pub fn span(self) -> Span {
+ match self.entry() {
+ Entry::Group(group, _) => group.span(),
+ Entry::Literal(literal) => literal.span(),
+ Entry::Ident(ident) => ident.span(),
+ Entry::Punct(punct) => punct.span(),
+ Entry::End(_) => Span::call_site(),
+ }
+ }
+
+ /// Returns the `Span` of the token immediately prior to the position of
+ /// this cursor, or of the current token if there is no previous one.
+ #[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn prev_span(mut self) -> Span {
+ if start_of_buffer(self) < self.ptr {
+ self.ptr = unsafe { self.ptr.offset(-1) };
+ if let Entry::End(_) = self.entry() {
+ // Locate the matching Group begin token.
+ let mut depth = 1;
+ loop {
+ self.ptr = unsafe { self.ptr.offset(-1) };
+ match self.entry() {
+ Entry::Group(group, _) => {
+ depth -= 1;
+ if depth == 0 {
+ return group.span();
+ }
+ }
+ Entry::End(_) => depth += 1,
+ Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_)
=> {}
+ }
+ }
+ }
+ }
+ self.span()
+ }
+
+ /// Skip over the next token that is not a None-delimited group, without
+ /// cloning it. Returns `None` if this cursor points to eof.
+ ///
+ /// This method treats `'lifetimes` as a single token.
+ pub(crate) fn skip(mut self) -> Option<Cursor<'a>> {
+ self.ignore_none();
+
+ let len = match self.entry() {
+ Entry::End(_) => return None,
+
+ // Treat lifetimes as a single tt for the purposes of 'skip'.
+ Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing()
== Spacing::Joint => {
+ match unsafe { &*self.ptr.add(1) } {
+ Entry::Ident(_) => 2,
+ _ => 1,
+ }
+ }
+
+ Entry::Group(_, end_offset) => *end_offset,
+ _ => 1,
+ };
+
+ Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
+ }
+}
+
+impl<'a> Copy for Cursor<'a> {}
+
+impl<'a> Clone for Cursor<'a> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<'a> Eq for Cursor<'a> {}
+
+impl<'a> PartialEq for Cursor<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ self.ptr == other.ptr
+ }
+}
+
+impl<'a> PartialOrd for Cursor<'a> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ if same_buffer(*self, *other) {
+ Some(cmp_assuming_same_buffer(*self, *other))
+ } else {
+ None
+ }
+ }
+}
+
+pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+ a.scope == b.scope
+}
+
+pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
+ start_of_buffer(a) == start_of_buffer(b)
+}
+
+fn start_of_buffer(cursor: Cursor) -> *const Entry {
+ unsafe {
+ match &*cursor.scope {
+ Entry::End(offset) => cursor.scope.offset(*offset),
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
+ a.ptr.cmp(&b.ptr)
+}
+
+pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
+ match cursor.entry() {
+ Entry::Group(group, _) => group.span_open(),
+ _ => cursor.span(),
+ }
+}
+
+pub(crate) fn close_span_of_group(cursor: Cursor) -> Span {
+ match cursor.entry() {
+ Entry::Group(group, _) => group.span_close(),
+ _ => cursor.span(),
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/classify.rs
b/rust/hw/char/pl011/vendor/syn/src/classify.rs
new file mode 100644
index 0000000000..1b0ff30040
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/classify.rs
@@ -0,0 +1,377 @@
+use crate::expr::Expr;
+use crate::generics::TypeParamBound;
+use crate::path::{Path, PathArguments};
+use crate::punctuated::Punctuated;
+use crate::ty::{ReturnType, Type};
+#[cfg(feature = "full")]
+use proc_macro2::{Delimiter, TokenStream, TokenTree};
+use std::ops::ControlFlow;
+
+#[cfg(feature = "full")]
+pub(crate) fn requires_semi_to_be_stmt(expr: &Expr) -> bool {
+ match expr {
+ Expr::Macro(expr) => !expr.mac.delimiter.is_brace(),
+ _ => requires_comma_to_be_match_arm(expr),
+ }
+}
+
+#[cfg(feature = "full")]
+pub(crate) fn requires_comma_to_be_match_arm(expr: &Expr) -> bool {
+ match expr {
+ Expr::If(_)
+ | Expr::Match(_)
+ | Expr::Block(_) | Expr::Unsafe(_) // both under ExprKind::Block in
rustc
+ | Expr::While(_)
+ | Expr::Loop(_)
+ | Expr::ForLoop(_)
+ | Expr::TryBlock(_)
+ | Expr::Const(_) => false,
+
+ Expr::Array(_)
+ | Expr::Assign(_)
+ | Expr::Async(_)
+ | Expr::Await(_)
+ | Expr::Binary(_)
+ | Expr::Break(_)
+ | Expr::Call(_)
+ | Expr::Cast(_)
+ | Expr::Closure(_)
+ | Expr::Continue(_)
+ | Expr::Field(_)
+ | Expr::Group(_)
+ | Expr::Index(_)
+ | Expr::Infer(_)
+ | Expr::Let(_)
+ | Expr::Lit(_)
+ | Expr::Macro(_)
+ | Expr::MethodCall(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Range(_)
+ | Expr::Reference(_)
+ | Expr::Repeat(_)
+ | Expr::Return(_)
+ | Expr::Struct(_)
+ | Expr::Try(_)
+ | Expr::Tuple(_)
+ | Expr::Unary(_)
+ | Expr::Yield(_)
+ | Expr::Verbatim(_) => true
+ }
+}
+
+#[cfg(all(feature = "printing", feature = "full"))]
+pub(crate) fn confusable_with_adjacent_block(mut expr: &Expr) -> bool {
+ let mut stack = Vec::new();
+
+ while let Some(next) = match expr {
+ Expr::Assign(e) => {
+ stack.push(&e.right);
+ Some(&e.left)
+ }
+ Expr::Await(e) => Some(&e.base),
+ Expr::Binary(e) => {
+ stack.push(&e.right);
+ Some(&e.left)
+ }
+ Expr::Break(e) => {
+ if let Some(Expr::Block(_)) = e.expr.as_deref() {
+ return true;
+ }
+ stack.pop()
+ }
+ Expr::Call(e) => Some(&e.func),
+ Expr::Cast(e) => Some(&e.expr),
+ Expr::Closure(e) => Some(&e.body),
+ Expr::Field(e) => Some(&e.base),
+ Expr::Index(e) => Some(&e.expr),
+ Expr::MethodCall(e) => Some(&e.receiver),
+ Expr::Range(e) => {
+ if let Some(Expr::Block(_)) = e.end.as_deref() {
+ return true;
+ }
+ match (&e.start, &e.end) {
+ (Some(start), end) => {
+ stack.extend(end);
+ Some(start)
+ }
+ (None, Some(end)) => Some(end),
+ (None, None) => stack.pop(),
+ }
+ }
+ Expr::Reference(e) => Some(&e.expr),
+ Expr::Return(e) => {
+ if e.expr.is_none() && stack.is_empty() {
+ return true;
+ }
+ stack.pop()
+ }
+ Expr::Struct(_) => return true,
+ Expr::Try(e) => Some(&e.expr),
+ Expr::Unary(e) => Some(&e.expr),
+ Expr::Yield(e) => {
+ if e.expr.is_none() && stack.is_empty() {
+ return true;
+ }
+ stack.pop()
+ }
+
+ Expr::Array(_)
+ | Expr::Async(_)
+ | Expr::Block(_)
+ | Expr::Const(_)
+ | Expr::Continue(_)
+ | Expr::ForLoop(_)
+ | Expr::Group(_)
+ | Expr::If(_)
+ | Expr::Infer(_)
+ | Expr::Let(_)
+ | Expr::Lit(_)
+ | Expr::Loop(_)
+ | Expr::Macro(_)
+ | Expr::Match(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Repeat(_)
+ | Expr::TryBlock(_)
+ | Expr::Tuple(_)
+ | Expr::Unsafe(_)
+ | Expr::Verbatim(_)
+ | Expr::While(_) => stack.pop(),
+ } {
+ expr = next;
+ }
+
+ false
+}
+
+#[cfg(feature = "printing")]
+pub(crate) fn confusable_with_adjacent_lt(mut expr: &Expr) -> bool {
+ loop {
+ match expr {
+ Expr::Binary(e) => expr = &e.right,
+ Expr::Cast(e) => return trailing_unparameterized_path(&e.ty),
+ Expr::Reference(e) => expr = &e.expr,
+ Expr::Unary(e) => expr = &e.expr,
+
+ Expr::Array(_)
+ | Expr::Assign(_)
+ | Expr::Async(_)
+ | Expr::Await(_)
+ | Expr::Block(_)
+ | Expr::Break(_)
+ | Expr::Call(_)
+ | Expr::Closure(_)
+ | Expr::Const(_)
+ | Expr::Continue(_)
+ | Expr::Field(_)
+ | Expr::ForLoop(_)
+ | Expr::Group(_)
+ | Expr::If(_)
+ | Expr::Index(_)
+ | Expr::Infer(_)
+ | Expr::Let(_)
+ | Expr::Lit(_)
+ | Expr::Loop(_)
+ | Expr::Macro(_)
+ | Expr::Match(_)
+ | Expr::MethodCall(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Range(_)
+ | Expr::Repeat(_)
+ | Expr::Return(_)
+ | Expr::Struct(_)
+ | Expr::Try(_)
+ | Expr::TryBlock(_)
+ | Expr::Tuple(_)
+ | Expr::Unsafe(_)
+ | Expr::Verbatim(_)
+ | Expr::While(_)
+ | Expr::Yield(_) => return false,
+ }
+ }
+
+ fn trailing_unparameterized_path(mut ty: &Type) -> bool {
+ loop {
+ match ty {
+ Type::BareFn(t) => match &t.output {
+ ReturnType::Default => return false,
+ ReturnType::Type(_, ret) => ty = ret,
+ },
+ Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
+ ControlFlow::Break(trailing_path) => return trailing_path,
+ ControlFlow::Continue(t) => ty = t,
+ },
+ Type::Path(t) => match last_type_in_path(&t.path) {
+ ControlFlow::Break(trailing_path) => return trailing_path,
+ ControlFlow::Continue(t) => ty = t,
+ },
+ Type::Ptr(t) => ty = &t.elem,
+ Type::Reference(t) => ty = &t.elem,
+ Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
+ ControlFlow::Break(trailing_path) => return trailing_path,
+ ControlFlow::Continue(t) => ty = t,
+ },
+
+ Type::Array(_)
+ | Type::Group(_)
+ | Type::Infer(_)
+ | Type::Macro(_)
+ | Type::Never(_)
+ | Type::Paren(_)
+ | Type::Slice(_)
+ | Type::Tuple(_)
+ | Type::Verbatim(_) => return false,
+ }
+ }
+ }
+
+ fn last_type_in_path(path: &Path) -> ControlFlow<bool, &Type> {
+ match &path.segments.last().unwrap().arguments {
+ PathArguments::None => ControlFlow::Break(true),
+ PathArguments::AngleBracketed(_) => ControlFlow::Break(false),
+ PathArguments::Parenthesized(arg) => match &arg.output {
+ ReturnType::Default => ControlFlow::Break(false),
+ ReturnType::Type(_, ret) => ControlFlow::Continue(ret),
+ },
+ }
+ }
+
+ fn last_type_in_bounds(
+ bounds: &Punctuated<TypeParamBound, Token![+]>,
+ ) -> ControlFlow<bool, &Type> {
+ match bounds.last().unwrap() {
+ TypeParamBound::Trait(t) => last_type_in_path(&t.path),
+ TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) =>
ControlFlow::Break(false),
+ }
+ }
+}
+
+/// Whether the expression's last token is `}`.
+#[cfg(feature = "full")]
+pub(crate) fn expr_trailing_brace(mut expr: &Expr) -> bool {
+ loop {
+ match expr {
+ Expr::Async(_)
+ | Expr::Block(_)
+ | Expr::Const(_)
+ | Expr::ForLoop(_)
+ | Expr::If(_)
+ | Expr::Loop(_)
+ | Expr::Match(_)
+ | Expr::Struct(_)
+ | Expr::TryBlock(_)
+ | Expr::Unsafe(_)
+ | Expr::While(_) => return true,
+
+ Expr::Assign(e) => expr = &e.right,
+ Expr::Binary(e) => expr = &e.right,
+ Expr::Break(e) => match &e.expr {
+ Some(e) => expr = e,
+ None => return false,
+ },
+ Expr::Cast(e) => return type_trailing_brace(&e.ty),
+ Expr::Closure(e) => expr = &e.body,
+ Expr::Let(e) => expr = &e.expr,
+ Expr::Macro(e) => return e.mac.delimiter.is_brace(),
+ Expr::Range(e) => match &e.end {
+ Some(end) => expr = end,
+ None => return false,
+ },
+ Expr::Reference(e) => expr = &e.expr,
+ Expr::Return(e) => match &e.expr {
+ Some(e) => expr = e,
+ None => return false,
+ },
+ Expr::Unary(e) => expr = &e.expr,
+ Expr::Verbatim(e) => return tokens_trailing_brace(e),
+ Expr::Yield(e) => match &e.expr {
+ Some(e) => expr = e,
+ None => return false,
+ },
+
+ Expr::Array(_)
+ | Expr::Await(_)
+ | Expr::Call(_)
+ | Expr::Continue(_)
+ | Expr::Field(_)
+ | Expr::Group(_)
+ | Expr::Index(_)
+ | Expr::Infer(_)
+ | Expr::Lit(_)
+ | Expr::MethodCall(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Repeat(_)
+ | Expr::Try(_)
+ | Expr::Tuple(_) => return false,
+ }
+ }
+
+ fn type_trailing_brace(mut ty: &Type) -> bool {
+ loop {
+ match ty {
+ Type::BareFn(t) => match &t.output {
+ ReturnType::Default => return false,
+ ReturnType::Type(_, ret) => ty = ret,
+ },
+ Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
+ ControlFlow::Break(trailing_brace) => return
trailing_brace,
+ ControlFlow::Continue(t) => ty = t,
+ },
+ Type::Macro(t) => return t.mac.delimiter.is_brace(),
+ Type::Path(t) => match last_type_in_path(&t.path) {
+ Some(t) => ty = t,
+ None => return false,
+ },
+ Type::Ptr(t) => ty = &t.elem,
+ Type::Reference(t) => ty = &t.elem,
+ Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
+ ControlFlow::Break(trailing_brace) => return
trailing_brace,
+ ControlFlow::Continue(t) => ty = t,
+ },
+ Type::Verbatim(t) => return tokens_trailing_brace(t),
+
+ Type::Array(_)
+ | Type::Group(_)
+ | Type::Infer(_)
+ | Type::Never(_)
+ | Type::Paren(_)
+ | Type::Slice(_)
+ | Type::Tuple(_) => return false,
+ }
+ }
+ }
+
+ fn last_type_in_path(path: &Path) -> Option<&Type> {
+ match &path.segments.last().unwrap().arguments {
+ PathArguments::None | PathArguments::AngleBracketed(_) => None,
+ PathArguments::Parenthesized(arg) => match &arg.output {
+ ReturnType::Default => None,
+ ReturnType::Type(_, ret) => Some(ret),
+ },
+ }
+ }
+
+ fn last_type_in_bounds(
+ bounds: &Punctuated<TypeParamBound, Token![+]>,
+ ) -> ControlFlow<bool, &Type> {
+ match bounds.last().unwrap() {
+ TypeParamBound::Trait(t) => match last_type_in_path(&t.path) {
+ Some(t) => ControlFlow::Continue(t),
+ None => ControlFlow::Break(false),
+ },
+ TypeParamBound::Lifetime(_) => ControlFlow::Break(false),
+ TypeParamBound::Verbatim(t) =>
ControlFlow::Break(tokens_trailing_brace(t)),
+ }
+ }
+
+ fn tokens_trailing_brace(tokens: &TokenStream) -> bool {
+ if let Some(TokenTree::Group(last)) =
tokens.clone().into_iter().last() {
+ last.delimiter() == Delimiter::Brace
+ } else {
+ false
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/custom_keyword.rs
b/rust/hw/char/pl011/vendor/syn/src/custom_keyword.rs
new file mode 100644
index 0000000000..cc4f632c98
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/custom_keyword.rs
@@ -0,0 +1,260 @@
+/// Define a type that supports parsing and printing a given identifier as if
it
+/// were a keyword.
+///
+/// # Usage
+///
+/// As a convention, it is recommended that this macro be invoked within a
+/// module called `kw` or `keyword` and that the resulting parser be invoked
+/// with a `kw::` or `keyword::` prefix.
+///
+/// ```
+/// mod kw {
+/// syn::custom_keyword!(whatever);
+/// }
+/// ```
+///
+/// The generated syntax tree node supports the following operations just like
+/// any built-in keyword token.
+///
+/// - [Peeking] — `input.peek(kw::whatever)`
+///
+/// - [Parsing] — `input.parse::<kw::whatever>()?`
+///
+/// - [Printing] — `quote!( ... #whatever_token ... )`
+///
+/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)`
+///
+/// - Field access to its span — `let sp = whatever_token.span`
+///
+/// [Peeking]: crate::parse::ParseBuffer::peek
+/// [Parsing]: crate::parse::ParseBuffer::parse
+/// [Printing]: quote::ToTokens
+/// [`Span`]: proc_macro2::Span
+///
+/// # Example
+///
+/// This example parses input that looks like `bool = true` or `str = "value"`.
+/// The key must be either the identifier `bool` or the identifier `str`. If
+/// `bool`, the value may be either `true` or `false`. If `str`, the value may
+/// be any string literal.
+///
+/// The symbols `bool` and `str` are not reserved keywords in Rust so these are
+/// not considered keywords in the `syn::token` module. Like any other
+/// identifier that is not a keyword, these can be declared as custom keywords
+/// by crates that need to use them as such.
+///
+/// ```
+/// use syn::{LitBool, LitStr, Result, Token};
+/// use syn::parse::{Parse, ParseStream};
+///
+/// mod kw {
+/// syn::custom_keyword!(bool);
+/// syn::custom_keyword!(str);
+/// }
+///
+/// enum Argument {
+/// Bool {
+/// bool_token: kw::bool,
+/// eq_token: Token![=],
+/// value: LitBool,
+/// },
+/// Str {
+/// str_token: kw::str,
+/// eq_token: Token![=],
+/// value: LitStr,
+/// },
+/// }
+///
+/// impl Parse for Argument {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let lookahead = input.lookahead1();
+/// if lookahead.peek(kw::bool) {
+/// Ok(Argument::Bool {
+/// bool_token: input.parse::<kw::bool>()?,
+/// eq_token: input.parse()?,
+/// value: input.parse()?,
+/// })
+/// } else if lookahead.peek(kw::str) {
+/// Ok(Argument::Str {
+/// str_token: input.parse::<kw::str>()?,
+/// eq_token: input.parse()?,
+/// value: input.parse()?,
+/// })
+/// } else {
+/// Err(lookahead.error())
+/// }
+/// }
+/// }
+/// ```
+#[macro_export]
+macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+ pub struct $ident {
+ #[allow(dead_code)]
+ pub span: $crate::__private::Span,
+ }
+
+ #[doc(hidden)]
+ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S:
$crate::__private::IntoSpans<$crate::__private::Span>>(
+ span: __S,
+ ) -> $ident {
+ $ident {
+ span: $crate::__private::IntoSpans::into_spans(span),
+ }
+ }
+
+ const _: () = {
+ impl $crate::__private::Default for $ident {
+ fn default() -> Self {
+ $ident {
+ span: $crate::__private::Span::call_site(),
+ }
+ }
+ }
+
+ $crate::impl_parse_for_custom_keyword!($ident);
+ $crate::impl_to_tokens_for_custom_keyword!($ident);
+ $crate::impl_clone_for_custom_keyword!($ident);
+ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ };
+}
+
+// Not public API.
+#[cfg(feature = "parsing")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_parse_for_custom_keyword {
+ ($ident:ident) => {
+ // For peek.
+ impl $crate::__private::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool
{
+ if let $crate::__private::Some((ident, _rest)) =
cursor.ident() {
+ ident == $crate::__private::stringify!($ident)
+ } else {
+ false
+ }
+ }
+
+ fn display() -> &'static $crate::__private::str {
+ $crate::__private::concat!("`",
$crate::__private::stringify!($ident), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) ->
$crate::parse::Result<$ident> {
+ input.step(|cursor| {
+ if let $crate::__private::Some((ident, rest)) =
cursor.ident() {
+ if ident == $crate::__private::stringify!($ident) {
+ return $crate::__private::Ok(($ident { span:
ident.span() }, rest));
+ }
+ }
+
$crate::__private::Err(cursor.error($crate::__private::concat!(
+ "expected `",
+ $crate::__private::stringify!($ident),
+ "`",
+ )))
+ })
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "parsing"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_parse_for_custom_keyword {
+ ($ident:ident) => {};
+}
+
+// Not public API.
+#[cfg(feature = "printing")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_to_tokens_for_custom_keyword {
+ ($ident:ident) => {
+ impl $crate::__private::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
+ let ident =
$crate::Ident::new($crate::__private::stringify!($ident), self.span);
+ $crate::__private::TokenStreamExt::append(tokens, ident);
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "printing"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_to_tokens_for_custom_keyword {
+ ($ident:ident) => {};
+}
+
+// Not public API.
+#[cfg(feature = "clone-impls")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_clone_for_custom_keyword {
+ ($ident:ident) => {
+ impl $crate::__private::Copy for $ident {}
+
+ #[allow(clippy::expl_impl_clone_on_copy)]
+ impl $crate::__private::Clone for $ident {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "clone-impls"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_clone_for_custom_keyword {
+ ($ident:ident) => {};
+}
+
+// Not public API.
+#[cfg(feature = "extra-traits")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_extra_traits_for_custom_keyword {
+ ($ident:ident) => {
+ impl $crate::__private::Debug for $ident {
+ fn fmt(&self, f: &mut $crate::__private::Formatter) ->
$crate::__private::FmtResult {
+ $crate::__private::Formatter::write_str(
+ f,
+ $crate::__private::concat!(
+ "Keyword [",
+ $crate::__private::stringify!($ident),
+ "]",
+ ),
+ )
+ }
+ }
+
+ impl $crate::__private::Eq for $ident {}
+
+ impl $crate::__private::PartialEq for $ident {
+ fn eq(&self, _other: &Self) -> $crate::__private::bool {
+ true
+ }
+ }
+
+ impl $crate::__private::Hash for $ident {
+ fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "extra-traits"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_extra_traits_for_custom_keyword {
+ ($ident:ident) => {};
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/custom_punctuation.rs
b/rust/hw/char/pl011/vendor/syn/src/custom_punctuation.rs
new file mode 100644
index 0000000000..eef5f54584
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/custom_punctuation.rs
@@ -0,0 +1,304 @@
+/// Define a type that supports parsing and printing a multi-character symbol
+/// as if it were a punctuation token.
+///
+/// # Usage
+///
+/// ```
+/// syn::custom_punctuation!(LeftRightArrow, <=>);
+/// ```
+///
+/// The generated syntax tree node supports the following operations just like
+/// any built-in punctuation token.
+///
+/// - [Peeking] — `input.peek(LeftRightArrow)`
+///
+/// - [Parsing] — `input.parse::<LeftRightArrow>()?`
+///
+/// - [Printing] — `quote!( ... #lrarrow ... )`
+///
+/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)`
+///
+/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp,
sp, sp])`
+///
+/// - Field access to its spans — `let spans = lrarrow.spans`
+///
+/// [Peeking]: crate::parse::ParseBuffer::peek
+/// [Parsing]: crate::parse::ParseBuffer::parse
+/// [Printing]: quote::ToTokens
+/// [`Span`]: proc_macro2::Span
+///
+/// # Example
+///
+/// ```
+/// use proc_macro2::{TokenStream, TokenTree};
+/// use syn::parse::{Parse, ParseStream, Peek, Result};
+/// use syn::punctuated::Punctuated;
+/// use syn::Expr;
+///
+/// syn::custom_punctuation!(PathSeparator, </>);
+///
+/// // expr </> expr </> expr ...
+/// struct PathSegments {
+/// segments: Punctuated<Expr, PathSeparator>,
+/// }
+///
+/// impl Parse for PathSegments {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let mut segments = Punctuated::new();
+///
+/// let first = parse_until(input, PathSeparator)?;
+/// segments.push_value(syn::parse2(first)?);
+///
+/// while input.peek(PathSeparator) {
+/// segments.push_punct(input.parse()?);
+///
+/// let next = parse_until(input, PathSeparator)?;
+/// segments.push_value(syn::parse2(next)?);
+/// }
+///
+/// Ok(PathSegments { segments })
+/// }
+/// }
+///
+/// fn parse_until<E: Peek>(input: ParseStream, end: E) -> Result<TokenStream>
{
+/// let mut tokens = TokenStream::new();
+/// while !input.is_empty() && !input.peek(end) {
+/// let next: TokenTree = input.parse()?;
+/// tokens.extend(Some(next));
+/// }
+/// Ok(tokens)
+/// }
+///
+/// fn main() {
+/// let input = r#" a::b </> c::d::e "#;
+/// let _: PathSegments = syn::parse_str(input).unwrap();
+/// }
+/// ```
+#[macro_export]
+macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+ #[allow(dead_code)]
+ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S:
$crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict,
$tt))*;
+ $ident {
+ spans: $crate::__private::IntoSpans::into_spans(spans)
+ }
+ }
+
+ const _: () = {
+ impl $crate::__private::Default for $ident {
+ fn default() -> Self {
+ $ident($crate::__private::Span::call_site())
+ }
+ }
+
+ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
+ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
+ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ };
+}
+
+// Not public API.
+#[cfg(feature = "parsing")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::__private::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool
{
+ $crate::__private::peek_punct(cursor,
$crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::__private::str {
+ $crate::__private::concat!("`",
$crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) ->
$crate::parse::Result<$ident> {
+ let spans: $crate::custom_punctuation_repr!($($tt)+) =
+ $crate::__private::parse_punct(input,
$crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "parsing"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+}
+
+// Not public API.
+#[cfg(feature = "printing")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::__private::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
+
$crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans,
tokens)
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "printing"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+}
+
+// Not public API.
+#[cfg(feature = "clone-impls")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_clone_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::__private::Copy for $ident {}
+
+ #[allow(clippy::expl_impl_clone_on_copy)]
+ impl $crate::__private::Clone for $ident {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "clone-impls"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_clone_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+}
+
+// Not public API.
+#[cfg(feature = "extra-traits")]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_extra_traits_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::__private::Debug for $ident {
+ fn fmt(&self, f: &mut $crate::__private::Formatter) ->
$crate::__private::FmtResult {
+ $crate::__private::Formatter::write_str(f,
$crate::__private::stringify!($ident))
+ }
+ }
+
+ impl $crate::__private::Eq for $ident {}
+
+ impl $crate::__private::PartialEq for $ident {
+ fn eq(&self, _other: &Self) -> $crate::__private::bool {
+ true
+ }
+ }
+
+ impl $crate::__private::Hash for $ident {
+ fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
+ }
+ };
+}
+
+// Not public API.
+#[cfg(not(feature = "extra-traits"))]
+#[doc(hidden)]
+#[macro_export]
+macro_rules! impl_extra_traits_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+}
+
+// Not public API.
+#[doc(hidden)]
+#[macro_export]
+macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+ [$crate::__private::Span; 0 $(+
$crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+}
+
+// Not public API.
+#[doc(hidden)]
+#[macro_export]
+#[rustfmt::skip]
+macro_rules! custom_punctuation_len {
+ ($mode:ident, &) => { 1 };
+ ($mode:ident, &&) => { 2 };
+ ($mode:ident, &=) => { 2 };
+ ($mode:ident, @) => { 1 };
+ ($mode:ident, ^) => { 1 };
+ ($mode:ident, ^=) => { 2 };
+ ($mode:ident, :) => { 1 };
+ ($mode:ident, ,) => { 1 };
+ ($mode:ident, $) => { 1 };
+ ($mode:ident, .) => { 1 };
+ ($mode:ident, ..) => { 2 };
+ ($mode:ident, ...) => { 3 };
+ ($mode:ident, ..=) => { 3 };
+ ($mode:ident, =) => { 1 };
+ ($mode:ident, ==) => { 2 };
+ ($mode:ident, =>) => { 2 };
+ ($mode:ident, >=) => { 2 };
+ ($mode:ident, >) => { 1 };
+ ($mode:ident, <-) => { 2 };
+ ($mode:ident, <=) => { 2 };
+ ($mode:ident, <) => { 1 };
+ ($mode:ident, -) => { 1 };
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, !=) => { 2 };
+ ($mode:ident, !) => { 1 };
+ ($mode:ident, |) => { 1 };
+ ($mode:ident, |=) => { 2 };
+ ($mode:ident, ||) => { 2 };
+ ($mode:ident, ::) => { 2 };
+ ($mode:ident, %) => { 1 };
+ ($mode:ident, %=) => { 2 };
+ ($mode:ident, +) => { 1 };
+ ($mode:ident, +=) => { 2 };
+ ($mode:ident, #) => { 1 };
+ ($mode:ident, ?) => { 1 };
+ ($mode:ident, ->) => { 2 };
+ ($mode:ident, ;) => { 1 };
+ ($mode:ident, <<) => { 2 };
+ ($mode:ident, <<=) => { 3 };
+ ($mode:ident, >>) => { 2 };
+ ($mode:ident, >>=) => { 3 };
+ ($mode:ident, /) => { 1 };
+ ($mode:ident, /=) => { 2 };
+ ($mode:ident, *) => { 1 };
+ ($mode:ident, *=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0
}};
+}
+
+// Not public API.
+#[doc(hidden)]
+#[macro_export]
+macro_rules! custom_punctuation_unexpected {
+ () => {};
+}
+
+// Not public API.
+#[doc(hidden)]
+#[macro_export]
+macro_rules! stringify_punct {
+ ($($tt:tt)+) => {
+ $crate::__private::concat!($($crate::__private::stringify!($tt)),+)
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/data.rs
b/rust/hw/char/pl011/vendor/syn/src/data.rs
new file mode 100644
index 0000000000..a44cdf341d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/data.rs
@@ -0,0 +1,423 @@
+use crate::attr::Attribute;
+use crate::expr::Expr;
+use crate::ident::Ident;
+use crate::punctuated::{self, Punctuated};
+use crate::restriction::{FieldMutability, Visibility};
+use crate::token;
+use crate::ty::Type;
+
+ast_struct! {
+ /// An enum variant.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Variant {
+ pub attrs: Vec<Attribute>,
+
+ /// Name of the variant.
+ pub ident: Ident,
+
+ /// Content stored in the variant.
+ pub fields: Fields,
+
+ /// Explicit discriminant: `Variant = 1`
+ pub discriminant: Option<(Token![=], Expr)>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum Fields {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ Named(FieldsNamed),
+
+ /// Unnamed fields of a tuple struct or tuple variant such as
`Some(T)`.
+ Unnamed(FieldsUnnamed),
+
+ /// Unit struct or unit variant such as `None`.
+ Unit,
+ }
+}
+
+ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+ pub named: Punctuated<Field, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+ pub unnamed: Punctuated<Field, Token![,]>,
+ }
+}
+
+impl Fields {
+ /// Get an iterator over the borrowed [`Field`] items in this object. This
+ /// iterator can be used to iterate over a named or unnamed struct or
+ /// variant's fields uniformly.
+ pub fn iter(&self) -> punctuated::Iter<Field> {
+ match self {
+ Fields::Unit => crate::punctuated::empty_punctuated_iter(),
+ Fields::Named(f) => f.named.iter(),
+ Fields::Unnamed(f) => f.unnamed.iter(),
+ }
+ }
+
+ /// Get an iterator over the mutably borrowed [`Field`] items in this
+ /// object. This iterator can be used to iterate over a named or unnamed
+ /// struct or variant's fields uniformly.
+ pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
+ match self {
+ Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
+ Fields::Named(f) => f.named.iter_mut(),
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
+
+ /// Returns the number of fields.
+ pub fn len(&self) -> usize {
+ match self {
+ Fields::Unit => 0,
+ Fields::Named(f) => f.named.len(),
+ Fields::Unnamed(f) => f.unnamed.len(),
+ }
+ }
+
+ /// Returns `true` if there are zero fields.
+ pub fn is_empty(&self) -> bool {
+ match self {
+ Fields::Unit => true,
+ Fields::Named(f) => f.named.is_empty(),
+ Fields::Unnamed(f) => f.unnamed.is_empty(),
+ }
+ }
+}
+
+impl IntoIterator for Fields {
+ type Item = Field;
+ type IntoIter = punctuated::IntoIter<Field>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ match self {
+ Fields::Unit => Punctuated::<Field, ()>::new().into_iter(),
+ Fields::Named(f) => f.named.into_iter(),
+ Fields::Unnamed(f) => f.unnamed.into_iter(),
+ }
+ }
+}
+
+impl<'a> IntoIterator for &'a Fields {
+ type Item = &'a Field;
+ type IntoIter = punctuated::Iter<'a, Field>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<'a> IntoIterator for &'a mut Fields {
+ type Item = &'a mut Field;
+ type IntoIter = punctuated::IterMut<'a, Field>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter_mut()
+ }
+}
+
+ast_struct! {
+ /// A field of a struct or enum variant.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Field {
+ pub attrs: Vec<Attribute>,
+
+ pub vis: Visibility,
+
+ pub mutability: FieldMutability,
+
+ /// Name of the field, if any.
+ ///
+ /// Fields of tuple structs have no names.
+ pub ident: Option<Ident>,
+
+ pub colon_token: Option<Token![:]>,
+
+ pub ty: Type,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant};
+ use crate::error::Result;
+ use crate::expr::Expr;
+ use crate::ext::IdentExt as _;
+ use crate::ident::Ident;
+ #[cfg(not(feature = "full"))]
+ use crate::parse::discouraged::Speculative as _;
+ use crate::parse::{Parse, ParseStream};
+ use crate::restriction::{FieldMutability, Visibility};
+ use crate::token;
+ use crate::ty::Type;
+ use crate::verbatim;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let _visibility: Visibility = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let fields = if input.peek(token::Brace) {
+ Fields::Named(input.parse()?)
+ } else if input.peek(token::Paren) {
+ Fields::Unnamed(input.parse()?)
+ } else {
+ Fields::Unit
+ };
+ let discriminant = if input.peek(Token![=]) {
+ let eq_token: Token![=] = input.parse()?;
+ #[cfg(feature = "full")]
+ let discriminant: Expr = input.parse()?;
+ #[cfg(not(feature = "full"))]
+ let discriminant = {
+ let begin = input.fork();
+ let ahead = input.fork();
+ let mut discriminant: Result<Expr> = ahead.parse();
+ if discriminant.is_ok() {
+ input.advance_to(&ahead);
+ } else if scan_lenient_discriminant(input).is_ok() {
+ discriminant =
Ok(Expr::Verbatim(verbatim::between(&begin, input)));
+ }
+ discriminant?
+ };
+ Some((eq_token, discriminant))
+ } else {
+ None
+ };
+ Ok(Variant {
+ attrs,
+ ident,
+ fields,
+ discriminant,
+ })
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
+ use crate::expr::Member;
+ use crate::lifetime::Lifetime;
+ use crate::lit::Lit;
+ use crate::lit::LitFloat;
+ use crate::op::{BinOp, UnOp};
+ use crate::path::{self, AngleBracketedGenericArguments};
+ use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
+
+ let consume = |delimiter: Delimiter| {
+ Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
+ Some((_inside, _span, rest)) => Ok((true, rest)),
+ None => Ok((false, *cursor)),
+ }))
+ };
+
+ macro_rules! consume {
+ [$token:tt] => {
+ input.parse::<Option<Token![$token]>>().unwrap().is_some()
+ };
+ }
+
+ let mut initial = true;
+ let mut depth = 0usize;
+ loop {
+ if initial {
+ if consume![&] {
+ input.parse::<Option<Token![mut]>>()?;
+ } else if consume![if] || consume![match] || consume![while] {
+ depth += 1;
+ } else if input.parse::<Option<Lit>>()?.is_some()
+ || (consume(Brace) || consume(Bracket) ||
consume(Parenthesis))
+ || (consume![async] || consume![const] || consume![loop]
|| consume![unsafe])
+ && (consume(Brace) || break)
+ {
+ initial = false;
+ } else if consume![let] {
+ while !consume![=] {
+ if !((consume![|] || consume![ref] || consume![mut] ||
consume![@])
+ || (consume![!] ||
input.parse::<Option<Lit>>()?.is_some())
+ || (consume![..=] || consume![..] || consume![&]
|| consume![_])
+ || (consume(Brace) || consume(Bracket) ||
consume(Parenthesis)))
+ {
+ path::parsing::qpath(input, true)?;
+ }
+ }
+ } else if input.parse::<Option<Lifetime>>()?.is_some() &&
!consume![:] {
+ break;
+ } else if input.parse::<UnOp>().is_err() {
+ path::parsing::qpath(input, true)?;
+ initial = consume![!] || depth == 0 &&
input.peek(token::Brace);
+ }
+ } else if input.is_empty() || input.peek(Token![,]) {
+ return Ok(());
+ } else if depth > 0 && consume(Brace) {
+ if consume![else] && !consume(Brace) {
+ initial = consume![if] || break;
+ } else {
+ depth -= 1;
+ }
+ } else if input.parse::<BinOp>().is_ok() || (consume![..] |
consume![=]) {
+ initial = true;
+ } else if consume![.] {
+ if input.parse::<Option<LitFloat>>()?.is_none()
+ && (input.parse::<Member>()?.is_named() && consume![::])
+ {
+ AngleBracketedGenericArguments::do_parse(None, input)?;
+ }
+ } else if consume![as] {
+ input.parse::<Type>()?;
+ } else if !(consume(Brace) || consume(Bracket) ||
consume(Parenthesis)) {
+ break;
+ }
+ }
+
+ Err(input.error("unsupported expression"))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for FieldsNamed {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(FieldsNamed {
+ brace_token: braced!(content in input),
+ named: content.parse_terminated(Field::parse_named,
Token![,])?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for FieldsUnnamed {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(FieldsUnnamed {
+ paren_token: parenthesized!(content in input),
+ unnamed: content.parse_terminated(Field::parse_unnamed,
Token![,])?,
+ })
+ }
+ }
+
+ impl Field {
+ /// Parses a named (braced struct) field.
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_named(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+
+ let unnamed_field = cfg!(feature = "full") &&
input.peek(Token![_]);
+ let ident = if unnamed_field {
+ input.call(Ident::parse_any)
+ } else {
+ input.parse()
+ }?;
+
+ let colon_token: Token![:] = input.parse()?;
+
+ let ty: Type = if unnamed_field
+ && (input.peek(Token![struct])
+ || input.peek(Token![union]) && input.peek2(token::Brace))
+ {
+ let begin = input.fork();
+ input.call(Ident::parse_any)?;
+ input.parse::<FieldsNamed>()?;
+ Type::Verbatim(verbatim::between(&begin, input))
+ } else {
+ input.parse()?
+ };
+
+ Ok(Field {
+ attrs,
+ vis,
+ mutability: FieldMutability::None,
+ ident: Some(ident),
+ colon_token: Some(colon_token),
+ ty,
+ })
+ }
+
+ /// Parses an unnamed (tuple struct) field.
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_unnamed(input: ParseStream) -> Result<Self> {
+ Ok(Field {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ mutability: FieldMutability::None,
+ ident: None,
+ colon_token: None,
+ ty: input.parse()?,
+ })
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::data::{Field, FieldsNamed, FieldsUnnamed, Variant};
+ use crate::print::TokensOrDefault;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Variant {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(&self.attrs);
+ self.ident.to_tokens(tokens);
+ self.fields.to_tokens(tokens);
+ if let Some((eq_token, disc)) = &self.discriminant {
+ eq_token.to_tokens(tokens);
+ disc.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for FieldsNamed {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.brace_token.surround(tokens, |tokens| {
+ self.named.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for FieldsUnnamed {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.paren_token.surround(tokens, |tokens| {
+ self.unnamed.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Field {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(&self.attrs);
+ self.vis.to_tokens(tokens);
+ if let Some(ident) = &self.ident {
+ ident.to_tokens(tokens);
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ }
+ self.ty.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/derive.rs
b/rust/hw/char/pl011/vendor/syn/src/derive.rs
new file mode 100644
index 0000000000..3443ecfc05
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/derive.rs
@@ -0,0 +1,259 @@
+use crate::attr::Attribute;
+use crate::data::{Fields, FieldsNamed, Variant};
+use crate::generics::Generics;
+use crate::ident::Ident;
+use crate::punctuated::Punctuated;
+use crate::restriction::Visibility;
+use crate::token;
+
+ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ pub struct DeriveInput {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub ident: Ident,
+ pub generics: Generics,
+ pub data: Data,
+ }
+}
+
+ast_enum! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ pub enum Data {
+ Struct(DataStruct),
+ Enum(DataEnum),
+ Union(DataUnion),
+ }
+}
+
+ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+ pub fields: Fields,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+ pub brace_token: token::Brace,
+ pub variants: Punctuated<Variant, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ pub struct DataUnion {
+ pub union_token: Token![union],
+ pub fields: FieldsNamed,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::data::{Fields, FieldsNamed, Variant};
+ use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::error::Result;
+ use crate::generics::{Generics, WhereClause};
+ use crate::ident::Ident;
+ use crate::parse::{Parse, ParseStream};
+ use crate::punctuated::Punctuated;
+ use crate::restriction::Visibility;
+ use crate::token;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for DeriveInput {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis = input.parse::<Visibility>()?;
+
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Token![struct]) {
+ let struct_token = input.parse::<Token![struct]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, fields, semi) = data_struct(input)?;
+ Ok(DeriveInput {
+ attrs,
+ vis,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ data: Data::Struct(DataStruct {
+ struct_token,
+ fields,
+ semi_token: semi,
+ }),
+ })
+ } else if lookahead.peek(Token![enum]) {
+ let enum_token = input.parse::<Token![enum]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, brace, variants) = data_enum(input)?;
+ Ok(DeriveInput {
+ attrs,
+ vis,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ data: Data::Enum(DataEnum {
+ enum_token,
+ brace_token: brace,
+ variants,
+ }),
+ })
+ } else if lookahead.peek(Token![union]) {
+ let union_token = input.parse::<Token![union]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, fields) = data_union(input)?;
+ Ok(DeriveInput {
+ attrs,
+ vis,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ data: Data::Union(DataUnion {
+ union_token,
+ fields,
+ }),
+ })
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ pub(crate) fn data_struct(
+ input: ParseStream,
+ ) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
+ let mut lookahead = input.lookahead1();
+ let mut where_clause = None;
+ if lookahead.peek(Token![where]) {
+ where_clause = Some(input.parse()?);
+ lookahead = input.lookahead1();
+ }
+
+ if where_clause.is_none() && lookahead.peek(token::Paren) {
+ let fields = input.parse()?;
+
+ lookahead = input.lookahead1();
+ if lookahead.peek(Token![where]) {
+ where_clause = Some(input.parse()?);
+ lookahead = input.lookahead1();
+ }
+
+ if lookahead.peek(Token![;]) {
+ let semi = input.parse()?;
+ Ok((where_clause, Fields::Unnamed(fields), Some(semi)))
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(token::Brace) {
+ let fields = input.parse()?;
+ Ok((where_clause, Fields::Named(fields), None))
+ } else if lookahead.peek(Token![;]) {
+ let semi = input.parse()?;
+ Ok((where_clause, Fields::Unit, Some(semi)))
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ pub(crate) fn data_enum(
+ input: ParseStream,
+ ) -> Result<(
+ Option<WhereClause>,
+ token::Brace,
+ Punctuated<Variant, Token![,]>,
+ )> {
+ let where_clause = input.parse()?;
+
+ let content;
+ let brace = braced!(content in input);
+ let variants = content.parse_terminated(Variant::parse, Token![,])?;
+
+ Ok((where_clause, brace, variants))
+ }
+
+ pub(crate) fn data_union(input: ParseStream) ->
Result<(Option<WhereClause>, FieldsNamed)> {
+ let where_clause = input.parse()?;
+ let fields = input.parse()?;
+ Ok((where_clause, fields))
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::FilterAttrs;
+ use crate::data::Fields;
+ use crate::derive::{Data, DeriveInput};
+ use crate::print::TokensOrDefault;
+ use proc_macro2::TokenStream;
+ use quote::ToTokens;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for DeriveInput {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ for attr in self.attrs.outer() {
+ attr.to_tokens(tokens);
+ }
+ self.vis.to_tokens(tokens);
+ match &self.data {
+ Data::Struct(d) => d.struct_token.to_tokens(tokens),
+ Data::Enum(d) => d.enum_token.to_tokens(tokens),
+ Data::Union(d) => d.union_token.to_tokens(tokens),
+ }
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ match &self.data {
+ Data::Struct(data) => match &data.fields {
+ Fields::Named(fields) => {
+ self.generics.where_clause.to_tokens(tokens);
+ fields.to_tokens(tokens);
+ }
+ Fields::Unnamed(fields) => {
+ fields.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ TokensOrDefault(&data.semi_token).to_tokens(tokens);
+ }
+ Fields::Unit => {
+ self.generics.where_clause.to_tokens(tokens);
+ TokensOrDefault(&data.semi_token).to_tokens(tokens);
+ }
+ },
+ Data::Enum(data) => {
+ self.generics.where_clause.to_tokens(tokens);
+ data.brace_token.surround(tokens, |tokens| {
+ data.variants.to_tokens(tokens);
+ });
+ }
+ Data::Union(data) => {
+ self.generics.where_clause.to_tokens(tokens);
+ data.fields.to_tokens(tokens);
+ }
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/discouraged.rs
b/rust/hw/char/pl011/vendor/syn/src/discouraged.rs
new file mode 100644
index 0000000000..4109c670e7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/discouraged.rs
@@ -0,0 +1,225 @@
+//! Extensions to the parsing API with niche applicability.
+
+use crate::buffer::Cursor;
+use crate::error::Result;
+use crate::parse::{inner_unexpected, ParseBuffer, Unexpected};
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::Delimiter;
+use std::cell::Cell;
+use std::mem;
+use std::rc::Rc;
+
+/// Extensions to the `ParseStream` API to support speculative parsing.
+pub trait Speculative {
+ /// Advance this parse stream to the position of a forked parse stream.
+ ///
+ /// This is the opposite operation to [`ParseStream::fork`]. You can fork a
+ /// parse stream, perform some speculative parsing, then join the original
+ /// stream to the fork to "commit" the parsing from the fork to the main
+ /// stream.
+ ///
+ /// If you can avoid doing this, you should, as it limits the ability to
+ /// generate useful errors. That said, it is often the only way to parse
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The
problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to
provide
+ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+ /// parsing `B*` and removing the leading members of `A` from the
+ /// repetition, bypassing the need to involve the downsides associated with
+ /// speculative parsing.
+ ///
+ /// [`ParseStream::fork`]: ParseBuffer::fork
+ ///
+ /// # Example
+ ///
+ /// There has been chatter about the possibility of making the colons in
the
+ /// turbofish syntax like `path::to::<T>` no longer required by accepting
+ /// `path::to<T>` in expression position. Specifically, according to [RFC
+ /// 2544], [`PathSegment`] parsing should always try to consume a following
+ /// `<` token as the start of generic arguments, and reset to the `<` if
+ /// that fails (e.g. the token is acting as a less-than operator).
+ ///
+ /// This is the exact kind of parsing behavior which requires the "fork,
+ /// try, commit" behavior that [`ParseStream::fork`] discourages. With
+ /// `advance_to`, we can avoid having to parse the speculatively parsed
+ /// content a second time.
+ ///
+ /// This change in behavior can be implemented in syn by replacing just the
+ /// `Parse` implementation for `PathSegment`:
+ ///
+ /// ```
+ /// # use syn::ext::IdentExt;
+ /// use syn::parse::discouraged::Speculative;
+ /// # use syn::parse::{Parse, ParseStream};
+ /// # use syn::{Ident, PathArguments, Result, Token};
+ ///
+ /// pub struct PathSegment {
+ /// pub ident: Ident,
+ /// pub arguments: PathArguments,
+ /// }
+ /// #
+ /// # impl<T> From<T> for PathSegment
+ /// # where
+ /// # T: Into<Ident>,
+ /// # {
+ /// # fn from(ident: T) -> Self {
+ /// # PathSegment {
+ /// # ident: ident.into(),
+ /// # arguments: PathArguments::None,
+ /// # }
+ /// # }
+ /// # }
+ ///
+ /// impl Parse for PathSegment {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// if input.peek(Token![super])
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+ /// }
+ ///
+ /// let ident = input.parse()?;
+ /// if input.peek(Token![::]) && input.peek3(Token![<]) {
+ /// return Ok(PathSegment {
+ /// ident,
+ /// arguments:
PathArguments::AngleBracketed(input.parse()?),
+ /// });
+ /// }
+ /// if input.peek(Token![<]) && !input.peek(Token![<=]) {
+ /// let fork = input.fork();
+ /// if let Ok(arguments) = fork.parse() {
+ /// input.advance_to(&fork);
+ /// return Ok(PathSegment {
+ /// ident,
+ /// arguments:
PathArguments::AngleBracketed(arguments),
+ /// });
+ /// }
+ /// }
+ /// Ok(PathSegment::from(ident))
+ /// }
+ /// }
+ ///
+ /// # syn::parse_str::<PathSegment>("a<b,c>").unwrap();
+ /// ```
+ ///
+ /// # Drawbacks
+ ///
+ /// The main drawback of this style of speculative parsing is in error
+ /// presentation. Even if the lookahead is the "correct" parse, the error
+ /// that is shown is that of the "fallback" parse. To use the same example
+ /// as the turbofish above, take the following unfinished "turbofish":
+ ///
+ /// ```text
+ /// let _ = f<&'a fn(), for<'a> serde::>();
+ /// ```
+ ///
+ /// If this is parsed as generic arguments, we can provide the error
message
+ ///
+ /// ```text
+ /// error: expected identifier
+ /// --> src.rs:L:C
+ /// |
+ /// L | let _ = f<&'a fn(), for<'a> serde::>();
+ /// | ^
+ /// ```
+ ///
+ /// but if parsed using the above speculative parsing, it falls back to
+ /// assuming that the `<` is a less-than when it fails to parse the generic
+ /// arguments, and tries to interpret the `&'a` as the start of a labelled
+ /// loop, resulting in the much less helpful error
+ ///
+ /// ```text
+ /// error: expected `:`
+ /// --> src.rs:L:C
+ /// |
+ /// L | let _ = f<&'a fn(), for<'a> serde::>();
+ /// | ^^
+ /// ```
+ ///
+ /// This can be mitigated with various heuristics (two examples: show both
+ /// forks' parse errors, or show the one that consumed more tokens), but
+ /// when you can control the grammar, sticking to something that can be
+ /// parsed LL(3) and without the LL(*) speculative parsing this makes
+ /// possible, displaying reasonable errors becomes much more simple.
+ ///
+ /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544
+ /// [`PathSegment`]: crate::PathSegment
+ ///
+ /// # Performance
+ ///
+ /// This method performs a cheap fixed amount of work that does not depend
+ /// on how far apart the two streams are positioned.
+ ///
+ /// # Panics
+ ///
+ /// The forked stream in the argument of `advance_to` must have been
+ /// obtained by forking `self`. Attempting to advance to any other stream
+ /// will cause a panic.
+ fn advance_to(&self, fork: &Self);
+}
+
+impl<'a> Speculative for ParseBuffer<'a> {
+ fn advance_to(&self, fork: &Self) {
+ if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
+ panic!("fork was not derived from the advancing parse stream");
+ }
+
+ let (self_unexp, self_sp) = inner_unexpected(self);
+ let (fork_unexp, fork_sp) = inner_unexpected(fork);
+ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
+ match (fork_sp, self_sp) {
+ // Unexpected set on the fork, but not on `self`, copy it over.
+ (Some(span), None) => {
+ self_unexp.set(Unexpected::Some(span));
+ }
+ // Unexpected unset. Use chain to propagate errors from fork.
+ (None, None) => {
+ fork_unexp.set(Unexpected::Chain(self_unexp));
+
+ // Ensure toplevel 'unexpected' tokens from the fork don't
+ // bubble up the chain by replacing the root `unexpected`
+ // pointer, only 'unexpected' tokens from existing group
+ // parsers should bubble.
+ fork.unexpected
+ .set(Some(Rc::new(Cell::new(Unexpected::None))));
+ }
+ // Unexpected has been set on `self`. No changes needed.
+ (_, Some(_)) => {}
+ }
+ }
+
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor,
Cursor<'static>>(fork.cursor()) });
+ }
+}
+
+/// Extensions to the `ParseStream` API to support manipulating invisible
+/// delimiters the same as if they were visible.
+pub trait AnyDelimiter {
+ /// Returns the delimiter, the span of the delimiter token, and the nested
+ /// contents for further parsing.
+ fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan,
ParseBuffer)>;
+}
+
+impl<'a> AnyDelimiter for ParseBuffer<'a> {
+ fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan,
ParseBuffer)> {
+ self.step(|cursor| {
+ if let Some((content, delimiter, span, rest)) = cursor.any_group()
{
+ let scope = crate::buffer::close_span_of_group(*cursor);
+ let nested = crate::parse::advance_step_cursor(cursor,
content);
+ let unexpected = crate::parse::get_unexpected(self);
+ let content = crate::parse::new_parse_buffer(scope, nested,
unexpected);
+ Ok(((delimiter, span, content), rest))
+ } else {
+ Err(cursor.error("expected any delimiter"))
+ }
+ })
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/drops.rs
b/rust/hw/char/pl011/vendor/syn/src/drops.rs
new file mode 100644
index 0000000000..89b42d82ef
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/drops.rs
@@ -0,0 +1,58 @@
+use std::iter;
+use std::mem::ManuallyDrop;
+use std::ops::{Deref, DerefMut};
+use std::option;
+use std::slice;
+
+#[repr(transparent)]
+pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
+
+impl<T> NoDrop<T> {
+ pub(crate) fn new(value: T) -> Self
+ where
+ T: TrivialDrop,
+ {
+ NoDrop(ManuallyDrop::new(value))
+ }
+}
+
+impl<T: ?Sized> Deref for NoDrop<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<T: ?Sized> DerefMut for NoDrop<T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+pub(crate) trait TrivialDrop {}
+
+impl<T> TrivialDrop for iter::Empty<T> {}
+impl<'a, T> TrivialDrop for slice::Iter<'a, T> {}
+impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {}
+
+#[test]
+fn test_needs_drop() {
+ use std::mem::needs_drop;
+
+ struct NeedsDrop;
+
+ impl Drop for NeedsDrop {
+ fn drop(&mut self) {}
+ }
+
+ assert!(needs_drop::<NeedsDrop>());
+
+ // Test each of the types with a handwritten TrivialDrop impl above.
+ assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
+ assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
+ assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
+ assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
+ assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/error.rs
b/rust/hw/char/pl011/vendor/syn/src/error.rs
new file mode 100644
index 0000000000..63310543a3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/error.rs
@@ -0,0 +1,467 @@
+#[cfg(feature = "parsing")]
+use crate::buffer::Cursor;
+use crate::thread::ThreadBound;
+use proc_macro2::{
+ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span,
TokenStream, TokenTree,
+};
+#[cfg(feature = "printing")]
+use quote::ToTokens;
+use std::fmt::{self, Debug, Display};
+use std::slice;
+use std::vec;
+
+/// The result of a Syn parser.
+pub type Result<T> = std::result::Result<T, Error>;
+
+/// Error returned when a Syn parser cannot parse the input tokens.
+///
+/// # Error reporting in proc macros
+///
+/// The correct way to report errors back to the compiler from a procedural
+/// macro is by emitting an appropriately spanned invocation of
+/// [`compile_error!`] in the generated code. This produces a better diagnostic
+/// message than simply panicking the macro.
+///
+/// [`compile_error!`]: std::compile_error!
+///
+/// When parsing macro input, the [`parse_macro_input!`] macro handles the
+/// conversion to `compile_error!` automatically.
+///
+/// [`parse_macro_input!`]: crate::parse_macro_input!
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{parse_macro_input, ItemFn};
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
+/// let args = parse_macro_input!(args as MyAttrArgs);
+/// let input = parse_macro_input!(input as ItemFn);
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+///
+/// struct MyAttrArgs {
+/// # _k: [(); { stringify! {
+/// ...
+/// # }; 0 }]
+/// }
+///
+/// impl Parse for MyAttrArgs {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// # stringify! {
+/// ...
+/// # };
+/// # unimplemented!()
+/// }
+/// }
+/// ```
+///
+/// For errors that arise later than the initial parsing stage, the
+/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to
+/// perform an explicit conversion to `compile_error!`.
+///
+/// [`.to_compile_error()`]: Error::to_compile_error
+/// [`.into_compile_error()`]: Error::into_compile_error
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// # use proc_macro::TokenStream;
+/// # use syn::{parse_macro_input, DeriveInput};
+/// #
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_derive(MyDerive)]
+/// # };
+/// pub fn my_derive(input: TokenStream) -> TokenStream {
+/// let input = parse_macro_input!(input as DeriveInput);
+///
+/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream>
+/// expand::my_derive(input)
+/// .unwrap_or_else(syn::Error::into_compile_error)
+/// .into()
+/// }
+/// #
+/// # mod expand {
+/// # use proc_macro2::TokenStream;
+/// # use syn::{DeriveInput, Result};
+/// #
+/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
+/// # unimplemented!()
+/// # }
+/// # }
+/// ```
+pub struct Error {
+ messages: Vec<ErrorMessage>,
+}
+
+struct ErrorMessage {
+ // Span is implemented as an index into a thread-local interner to keep the
+ // size small. It is not safe to access from a different thread. We want
+ // errors to be Send and Sync to play nicely with ecosystem crates for
error
+ // handling, so pin the span we're given to its original thread and assume
+ // it is Span::call_site if accessed from any other thread.
+ span: ThreadBound<SpanRange>,
+ message: String,
+}
+
+// Cannot use std::ops::Range<Span> because that does not implement Copy,
+// whereas ThreadBound<T> requires a Copy impl as a way to ensure no Drop impls
+// are involved.
+struct SpanRange {
+ start: Span,
+ end: Span,
+}
+
+#[cfg(test)]
+struct _Test
+where
+ Error: Send + Sync;
+
+impl Error {
+ /// Usually the [`ParseStream::error`] method will be used instead, which
+ /// automatically uses the correct span from the current position of the
+ /// parse stream.
+ ///
+ /// Use `Error::new` when the error needs to be triggered on some span
other
+ /// than where the parse stream is currently positioned.
+ ///
+ /// [`ParseStream::error`]: crate::parse::ParseBuffer::error
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Error, Ident, LitStr, Result, Token};
+ /// use syn::parse::ParseStream;
+ ///
+ /// // Parses input that looks like `name = "string"` where the key must be
+ /// // the identifier `name` and the value may be any string literal.
+ /// // Returns the string literal.
+ /// fn parse_name(input: ParseStream) -> Result<LitStr> {
+ /// let name_token: Ident = input.parse()?;
+ /// if name_token != "name" {
+ /// // Trigger an error not on the current position of the stream,
+ /// // but on the position of the unexpected identifier.
+ /// return Err(Error::new(name_token.span(), "expected `name`"));
+ /// }
+ /// input.parse::<Token![=]>()?;
+ /// let s: LitStr = input.parse()?;
+ /// Ok(s)
+ /// }
+ /// ```
+ pub fn new<T: Display>(span: Span, message: T) -> Self {
+ return new(span, message.to_string());
+
+ fn new(span: Span, message: String) -> Error {
+ Error {
+ messages: vec![ErrorMessage {
+ span: ThreadBound::new(SpanRange {
+ start: span,
+ end: span,
+ }),
+ message,
+ }],
+ }
+ }
+ }
+
+ /// Creates an error with the specified message spanning the given syntax
+ /// tree node.
+ ///
+ /// Unlike the `Error::new` constructor, this constructor takes an argument
+ /// `tokens` which is a syntax tree node. This allows the resulting `Error`
+ /// to attempt to span all tokens inside of `tokens`. While you would
+ /// typically be able to use the `Spanned` trait with the above
`Error::new`
+ /// constructor, implementation limitations today mean that
+ /// `Error::new_spanned` may provide a higher-quality error message on
+ /// stable Rust.
+ ///
+ /// When in doubt it's recommended to stick to `Error::new` (or
+ /// `ParseStream::error`)!
+ #[cfg(feature = "printing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self
{
+ return new_spanned(tokens.into_token_stream(), message.to_string());
+
+ fn new_spanned(tokens: TokenStream, message: String) -> Error {
+ let mut iter = tokens.into_iter();
+ let start = iter.next().map_or_else(Span::call_site, |t| t.span());
+ let end = iter.last().map_or(start, |t| t.span());
+ Error {
+ messages: vec![ErrorMessage {
+ span: ThreadBound::new(SpanRange { start, end }),
+ message,
+ }],
+ }
+ }
+ }
+
+ /// The source location of the error.
+ ///
+ /// Spans are not thread-safe so this function returns `Span::call_site()`
+ /// if called from a different thread than the one on which the `Error` was
+ /// originally created.
+ pub fn span(&self) -> Span {
+ let SpanRange { start, end } = match self.messages[0].span.get() {
+ Some(span) => *span,
+ None => return Span::call_site(),
+ };
+ start.join(end).unwrap_or(start)
+ }
+
+ /// Render the error as an invocation of [`compile_error!`].
+ ///
+ /// The [`parse_macro_input!`] macro provides a convenient way to invoke
+ /// this method correctly in a procedural macro.
+ ///
+ /// [`compile_error!`]: std::compile_error!
+ /// [`parse_macro_input!`]: crate::parse_macro_input!
+ pub fn to_compile_error(&self) -> TokenStream {
+ self.messages
+ .iter()
+ .map(ErrorMessage::to_compile_error)
+ .collect()
+ }
+
+ /// Render the error as an invocation of [`compile_error!`].
+ ///
+ /// [`compile_error!`]: std::compile_error!
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # extern crate proc_macro;
+ /// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, DeriveInput, Error};
+ ///
+ /// # const _: &str = stringify! {
+ /// #[proc_macro_derive(MyTrait)]
+ /// # };
+ /// pub fn derive_my_trait(input: TokenStream) -> TokenStream {
+ /// let input = parse_macro_input!(input as DeriveInput);
+ /// my_trait::expand(input)
+ /// .unwrap_or_else(Error::into_compile_error)
+ /// .into()
+ /// }
+ ///
+ /// mod my_trait {
+ /// use proc_macro2::TokenStream;
+ /// use syn::{DeriveInput, Result};
+ ///
+ /// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> {
+ /// /* ... */
+ /// # unimplemented!()
+ /// }
+ /// }
+ /// ```
+ pub fn into_compile_error(self) -> TokenStream {
+ self.to_compile_error()
+ }
+
+ /// Add another error message to self such that when `to_compile_error()`
is
+ /// called, both errors will be emitted together.
+ pub fn combine(&mut self, another: Error) {
+ self.messages.extend(another.messages);
+ }
+}
+
+impl ErrorMessage {
+ fn to_compile_error(&self) -> TokenStream {
+ let (start, end) = match self.span.get() {
+ Some(range) => (range.start, range.end),
+ None => (Span::call_site(), Span::call_site()),
+ };
+
+ // ::core::compile_error!($message)
+ TokenStream::from_iter([
+ TokenTree::Punct({
+ let mut punct = Punct::new(':', Spacing::Joint);
+ punct.set_span(start);
+ punct
+ }),
+ TokenTree::Punct({
+ let mut punct = Punct::new(':', Spacing::Alone);
+ punct.set_span(start);
+ punct
+ }),
+ TokenTree::Ident(Ident::new("core", start)),
+ TokenTree::Punct({
+ let mut punct = Punct::new(':', Spacing::Joint);
+ punct.set_span(start);
+ punct
+ }),
+ TokenTree::Punct({
+ let mut punct = Punct::new(':', Spacing::Alone);
+ punct.set_span(start);
+ punct
+ }),
+ TokenTree::Ident(Ident::new("compile_error", start)),
+ TokenTree::Punct({
+ let mut punct = Punct::new('!', Spacing::Alone);
+ punct.set_span(start);
+ punct
+ }),
+ TokenTree::Group({
+ let mut group = Group::new(Delimiter::Brace, {
+ TokenStream::from_iter([TokenTree::Literal({
+ let mut string = Literal::string(&self.message);
+ string.set_span(end);
+ string
+ })])
+ });
+ group.set_span(end);
+ group
+ }),
+ ])
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) ->
Error {
+ if cursor.eof() {
+ Error::new(scope, format!("unexpected end of input, {}", message))
+ } else {
+ let span = crate::buffer::open_span_of_group(cursor);
+ Error::new(span, message)
+ }
+}
+
+#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
+pub(crate) fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
+ return new2(start, end, message.to_string());
+
+ fn new2(start: Span, end: Span, message: String) -> Error {
+ Error {
+ messages: vec![ErrorMessage {
+ span: ThreadBound::new(SpanRange { start, end }),
+ message,
+ }],
+ }
+ }
+}
+
+impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+ formatter
+ .debug_tuple("Error")
+ .field(&self.messages[0])
+ .finish()
+ } else {
+ formatter
+ .debug_tuple("Error")
+ .field(&self.messages)
+ .finish()
+ }
+ }
+}
+
+impl Debug for ErrorMessage {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.message, formatter)
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(&self.messages[0].message)
+ }
+}
+
+impl Clone for Error {
+ fn clone(&self) -> Self {
+ Error {
+ messages: self.messages.clone(),
+ }
+ }
+}
+
+impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ ErrorMessage {
+ span: self.span,
+ message: self.message.clone(),
+ }
+ }
+}
+
+impl Clone for SpanRange {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl Copy for SpanRange {}
+
+impl std::error::Error for Error {}
+
+impl From<LexError> for Error {
+ fn from(err: LexError) -> Self {
+ Error::new(err.span(), err)
+ }
+}
+
+impl IntoIterator for Error {
+ type Item = Error;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ IntoIter {
+ messages: self.messages.into_iter(),
+ }
+ }
+}
+
+pub struct IntoIter {
+ messages: vec::IntoIter<ErrorMessage>,
+}
+
+impl Iterator for IntoIter {
+ type Item = Error;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(Error {
+ messages: vec![self.messages.next()?],
+ })
+ }
+}
+
+impl<'a> IntoIterator for &'a Error {
+ type Item = Error;
+ type IntoIter = Iter<'a>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Iter {
+ messages: self.messages.iter(),
+ }
+ }
+}
+
+pub struct Iter<'a> {
+ messages: slice::Iter<'a, ErrorMessage>,
+}
+
+impl<'a> Iterator for Iter<'a> {
+ type Item = Error;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(Error {
+ messages: vec![self.messages.next()?.clone()],
+ })
+ }
+}
+
+impl Extend<Error> for Error {
+ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
+ for err in iter {
+ self.combine(err);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/export.rs
b/rust/hw/char/pl011/vendor/syn/src/export.rs
new file mode 100644
index 0000000000..b9ea5c747b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/export.rs
@@ -0,0 +1,73 @@
+#[doc(hidden)]
+pub use std::clone::Clone;
+#[doc(hidden)]
+pub use std::cmp::{Eq, PartialEq};
+#[doc(hidden)]
+pub use std::concat;
+#[doc(hidden)]
+pub use std::default::Default;
+#[doc(hidden)]
+pub use std::fmt::Debug;
+#[doc(hidden)]
+pub use std::hash::{Hash, Hasher};
+#[doc(hidden)]
+pub use std::marker::Copy;
+#[doc(hidden)]
+pub use std::option::Option::{None, Some};
+#[doc(hidden)]
+pub use std::result::Result::{Err, Ok};
+#[doc(hidden)]
+pub use std::stringify;
+
+#[doc(hidden)]
+pub type Formatter<'a> = std::fmt::Formatter<'a>;
+#[doc(hidden)]
+pub type FmtResult = std::fmt::Result;
+
+#[doc(hidden)]
+pub type bool = std::primitive::bool;
+#[doc(hidden)]
+pub type str = std::primitive::str;
+
+#[cfg(feature = "printing")]
+#[doc(hidden)]
+pub use quote;
+
+#[doc(hidden)]
+pub type Span = proc_macro2::Span;
+#[doc(hidden)]
+pub type TokenStream2 = proc_macro2::TokenStream;
+
+#[cfg(feature = "parsing")]
+#[doc(hidden)]
+pub use crate::group::{parse_braces, parse_brackets, parse_parens};
+
+#[doc(hidden)]
+pub use crate::span::IntoSpans;
+
+#[cfg(all(feature = "parsing", feature = "printing"))]
+#[doc(hidden)]
+pub use crate::parse_quote::parse as parse_quote;
+
+#[cfg(feature = "parsing")]
+#[doc(hidden)]
+pub use crate::token::parsing::{peek_punct, punct as parse_punct};
+
+#[cfg(feature = "printing")]
+#[doc(hidden)]
+pub use crate::token::printing::punct as print_punct;
+
+#[cfg(feature = "parsing")]
+#[doc(hidden)]
+pub use crate::token::private::CustomToken;
+
+#[cfg(feature = "proc-macro")]
+#[doc(hidden)]
+pub type TokenStream = proc_macro::TokenStream;
+
+#[cfg(feature = "printing")]
+#[doc(hidden)]
+pub use quote::{ToTokens, TokenStreamExt};
+
+#[doc(hidden)]
+pub struct private(pub(crate) ());
diff --git a/rust/hw/char/pl011/vendor/syn/src/expr.rs
b/rust/hw/char/pl011/vendor/syn/src/expr.rs
new file mode 100644
index 0000000000..c60bcf4771
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/expr.rs
@@ -0,0 +1,3960 @@
+use crate::attr::Attribute;
+#[cfg(all(feature = "parsing", feature = "full"))]
+use crate::error::Result;
+#[cfg(feature = "full")]
+use crate::generics::BoundLifetimes;
+use crate::ident::Ident;
+#[cfg(feature = "full")]
+use crate::lifetime::Lifetime;
+use crate::lit::Lit;
+use crate::mac::Macro;
+use crate::op::{BinOp, UnOp};
+#[cfg(all(feature = "parsing", feature = "full"))]
+use crate::parse::ParseStream;
+#[cfg(feature = "full")]
+use crate::pat::Pat;
+use crate::path::{AngleBracketedGenericArguments, Path, QSelf};
+use crate::punctuated::Punctuated;
+#[cfg(feature = "full")]
+use crate::stmt::Block;
+use crate::token;
+#[cfg(feature = "full")]
+use crate::ty::ReturnType;
+use crate::ty::Type;
+use proc_macro2::{Span, TokenStream};
+#[cfg(feature = "printing")]
+use quote::IdentFragment;
+#[cfg(feature = "printing")]
+use std::fmt::{self, Display};
+use std::hash::{Hash, Hasher};
+#[cfg(all(feature = "parsing", feature = "full"))]
+use std::mem;
+
+ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+ /// *This type is available only if Syn is built with the `"derive"` or
`"full"`
+ /// feature, but most of the variants are not available unless "full" is
enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+ /// This type is a syntax tree enum. In Syn this and other syntax tree
enums
+ /// are designed to be traversed using the following rebinding idiom.
+ ///
+ /// ```
+ /// # use syn::Expr;
+ /// #
+ /// # fn example(expr: Expr) {
+ /// # const IGNORE: &str = stringify! {
+ /// let expr: Expr = /* ... */;
+ /// # };
+ /// match expr {
+ /// Expr::MethodCall(expr) => {
+ /// /* ... */
+ /// }
+ /// Expr::Cast(expr) => {
+ /// /* ... */
+ /// }
+ /// Expr::If(expr) => {
+ /// /* ... */
+ /// }
+ ///
+ /// /* ... */
+ /// # _ => {}
+ /// # }
+ /// # }
+ /// ```
+ ///
+ /// We begin with a variable `expr` of type `Expr` that has no fields
+ /// (because it is an enum), and by matching on it and rebinding a variable
+ /// with the same name `expr` we effectively imbue our variable with all of
+ /// the data fields provided by the variant that it turned out to be. So
for
+ /// example above if we ended up in the `MethodCall` case then we get to
use
+ /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we
get
+ /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`.
+ ///
+ /// This approach avoids repeating the variant names twice on every line.
+ ///
+ /// ```
+ /// # use syn::{Expr, ExprMethodCall};
+ /// #
+ /// # fn example(expr: Expr) {
+ /// // Repetitive; recommend not doing this.
+ /// match expr {
+ /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => {
+ /// # }
+ /// # _ => {}
+ /// # }
+ /// # }
+ /// ```
+ ///
+ /// In general, the name to which a syntax tree enum variant is bound
should
+ /// be a suitable name for the complete syntax tree enum type.
+ ///
+ /// ```
+ /// # use syn::{Expr, ExprField};
+ /// #
+ /// # fn example(discriminant: ExprField) {
+ /// // Binding is called `base` which is the name I would use if I were
+ /// // assigning `*discriminant.base` without an `if let`.
+ /// if let Expr::Tuple(base) = *discriminant.base {
+ /// # }
+ /// # }
+ /// ```
+ ///
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+ /// An assignment expression: `a = compute()`.
+ Assign(ExprAssign),
+
+ /// An async block: `async { ... }`.
+ Async(ExprAsync),
+
+ /// An await expression: `fut.await`.
+ Await(ExprAwait),
+
+ /// A binary operation: `a + b`, `a += b`.
+ Binary(ExprBinary),
+
+ /// A blocked scope: `{ ... }`.
+ Block(ExprBlock),
+
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ Break(ExprBreak),
+
+ /// A function call expression: `invoke(a, b)`.
+ Call(ExprCall),
+
+ /// A cast expression: `foo as f64`.
+ Cast(ExprCast),
+
+ /// A closure expression: `|a, b| a + b`.
+ Closure(ExprClosure),
+
+ /// A const block: `const { ... }`.
+ Const(ExprConst),
+
+ /// A `continue`, with an optional label.
+ Continue(ExprContinue),
+
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ Field(ExprField),
+
+ /// A for loop: `for pat in expr { ... }`.
+ ForLoop(ExprForLoop),
+
+ /// An expression contained within invisible delimiters.
+ ///
+ /// This variant is important for faithfully representing the
precedence
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ Group(ExprGroup),
+
+ /// An `if` expression with an optional `else` block: `if expr { ... }
+ /// else { ... }`.
+ ///
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ If(ExprIf),
+
+ /// A square bracketed indexing expression: `vector[2]`.
+ Index(ExprIndex),
+
+ /// The inferred value of a const generic argument, denoted `_`.
+ Infer(ExprInfer),
+
+ /// A `let` guard: `let Some(x) = opt`.
+ Let(ExprLet),
+
+ /// A literal in place of an expression: `1`, `"foo"`.
+ Lit(ExprLit),
+
+ /// Conditionless loop: `loop { ... }`.
+ Loop(ExprLoop),
+
+ /// A macro invocation expression: `format!("{}", q)`.
+ Macro(ExprMacro),
+
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ Match(ExprMatch),
+
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ MethodCall(ExprMethodCall),
+
+ /// A parenthesized expression: `(a + b)`.
+ Paren(ExprParen),
+
+ /// A path like `std::mem::replace` possibly containing generic
+ /// parameters and a qualified self-type.
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ Path(ExprPath),
+
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ Range(ExprRange),
+
+ /// A referencing operation: `&a` or `&mut a`.
+ Reference(ExprReference),
+
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ Repeat(ExprRepeat),
+
+ /// A `return`, with an optional value to be returned.
+ Return(ExprReturn),
+
+ /// A struct literal expression: `Point { x: 1, y: 1 }`.
+ ///
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ Struct(ExprStruct),
+
+ /// A try-expression: `expr?`.
+ Try(ExprTry),
+
+ /// A try block: `try { ... }`.
+ TryBlock(ExprTryBlock),
+
+ /// A tuple expression: `(a, b, c, d)`.
+ Tuple(ExprTuple),
+
+ /// A unary operation: `!x`, `*x`.
+ Unary(ExprUnary),
+
+ /// An unsafe block: `unsafe { ... }`.
+ Unsafe(ExprUnsafe),
+
+ /// Tokens in expression position not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ /// A while loop: `while expr { ... }`.
+ While(ExprWhile),
+
+ /// A yield expression: `yield expr`.
+ Yield(ExprYield),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match expr {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // Expr::Array(expr) => {...}
+ // Expr::Assign(expr) => {...}
+ // ...
+ // Expr::Yield(expr) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub eq_token: Token![=],
+ pub right: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// An async block: `async { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+ pub capture: Option<Token![move]>,
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// An await expression: `fut.await`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub await_token: Token![await],
+ }
+}
+
+ast_struct! {
+ /// A binary operation: `a + b`, `a += b`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub op: BinOp,
+ pub right: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+ pub label: Option<Lifetime>,
+ pub expr: Option<Box<Expr>>,
+ }
+}
+
+ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+ pub func: Box<Expr>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A cast expression: `foo as f64`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub as_token: Token![as],
+ pub ty: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub lifetimes: Option<BoundLifetimes>,
+ pub constness: Option<Token![const]>,
+ pub movability: Option<Token![static]>,
+ pub asyncness: Option<Token![async]>,
+ pub capture: Option<Token![move]>,
+ pub or1_token: Token![|],
+ pub inputs: Punctuated<Pat, Token![,]>,
+ pub or2_token: Token![|],
+ pub output: ReturnType,
+ pub body: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// A const block: `const { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprConst #full {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// A `continue`, with an optional label.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+ pub label: Option<Lifetime>,
+ }
+}
+
+ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub member: Member,
+ }
+}
+
+ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub for_token: Token![for],
+ pub pat: Box<Pat>,
+ pub in_token: Token![in],
+ pub expr: Box<Expr>,
+ pub body: Block,
+ }
+}
+
+ast_struct! {
+ /// An expression contained within invisible delimiters.
+ ///
+ /// This variant is important for faithfully representing the precedence
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprGroup {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+ pub expr: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// An `if` expression with an optional `else` block: `if expr { ... }
+ /// else { ... }`.
+ ///
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+ pub cond: Box<Expr>,
+ pub then_branch: Block,
+ pub else_branch: Option<(Token![else], Box<Expr>)>,
+ }
+}
+
+ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub bracket_token: token::Bracket,
+ pub index: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// The inferred value of a const generic argument, denoted `_`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprInfer #full {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+ }
+}
+
+ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+ pub pat: Box<Pat>,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+ pub lit: Lit,
+ }
+}
+
+ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub loop_token: Token![loop],
+ pub body: Block,
+ }
+}
+
+ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ }
+}
+
+ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+ pub expr: Box<Expr>,
+ pub brace_token: token::Brace,
+ pub arms: Vec<Arm>,
+ }
+}
+
+ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprMethodCall {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+ pub dot_token: Token![.],
+ pub method: Ident,
+ pub turbofish: Option<AngleBracketedGenericArguments>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub expr: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// A path like `std::mem::replace` possibly containing generic
+ /// parameters and a qualified self-type.
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+}
+
+ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub start: Option<Box<Expr>>,
+ pub limits: RangeLimits,
+ pub end: Option<Box<Expr>>,
+ }
+}
+
+ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+ pub mutability: Option<Token![mut]>,
+ pub expr: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ pub len: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+ pub expr: Option<Box<Expr>>,
+ }
+}
+
+ast_struct! {
+ /// A struct literal expression: `Point { x: 1, y: 1 }`.
+ ///
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprStruct {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ pub brace_token: token::Brace,
+ pub fields: Punctuated<FieldValue, Token![,]>,
+ pub dot2_token: Option<Token![..]>,
+ pub rest: Option<Box<Expr>>,
+ }
+}
+
+ast_struct! {
+ /// A try-expression: `expr?`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub question_token: Token![?],
+ }
+}
+
+ast_struct! {
+ /// A try block: `try { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+ pub op: UnOp,
+ pub expr: Box<Expr>,
+ }
+}
+
+ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub while_token: Token![while],
+ pub cond: Box<Expr>,
+ pub body: Block,
+ }
+}
+
+ast_struct! {
+ /// A yield expression: `yield expr`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+ pub expr: Option<Box<Expr>>,
+ }
+}
+
+impl Expr {
+ /// An unspecified invalid expression.
+ ///
+ /// ```
+ /// use quote::ToTokens;
+ /// use std::mem;
+ /// use syn::{parse_quote, Expr};
+ ///
+ /// fn unparenthesize(e: &mut Expr) {
+ /// while let Expr::Paren(paren) = e {
+ /// *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER);
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let mut e: Expr = parse_quote! { ((1 + 1)) };
+ /// unparenthesize(&mut e);
+ /// assert_eq!("1 + 1", e.to_token_stream().to_string());
+ /// }
+ /// ```
+ pub const PLACEHOLDER: Self = Expr::Path(ExprPath {
+ attrs: Vec::new(),
+ qself: None,
+ path: Path {
+ leading_colon: None,
+ segments: Punctuated::new(),
+ },
+ });
+
+ /// An alternative to the primary `Expr::parse` parser (from the [`Parse`]
+ /// trait) for ambiguous syntactic positions in which a trailing brace
+ /// should not be taken as part of the expression.
+ ///
+ /// [`Parse`]: crate::parse::Parse
+ ///
+ /// Rust grammar has an ambiguity where braces sometimes turn a path
+ /// expression into a struct initialization and sometimes do not. In the
+ /// following code, the expression `S {}` is one expression. Presumably
+ /// there is an empty struct `struct S {}` defined somewhere which it is
+ /// instantiating.
+ ///
+ /// ```
+ /// # struct S;
+ /// # impl std::ops::Deref for S {
+ /// # type Target = bool;
+ /// # fn deref(&self) -> &Self::Target {
+ /// # &true
+ /// # }
+ /// # }
+ /// let _ = *S {};
+ ///
+ /// // parsed by rustc as: `*(S {})`
+ /// ```
+ ///
+ /// We would want to parse the above using `Expr::parse` after the `=`
+ /// token.
+ ///
+ /// But in the following, `S {}` is *not* a struct init expression.
+ ///
+ /// ```
+ /// # const S: &bool = &true;
+ /// if *S {} {}
+ ///
+ /// // parsed by rustc as:
+ /// //
+ /// // if (*S) {
+ /// // /* empty block */
+ /// // }
+ /// // {
+ /// // /* another empty block */
+ /// // }
+ /// ```
+ ///
+ /// For that reason we would want to parse if-conditions using
+ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
similar
+ /// syntactic positions such as the condition expr after a `while` token or
+ /// the expr at the top of a `match`.
+ ///
+ /// The Rust grammar's choices around which way this ambiguity is resolved
+ /// at various syntactic positions is fairly arbitrary. Really either parse
+ /// behavior could work in most positions, and language designers just
+ /// decide each case based on which is more likely to be what the
programmer
+ /// had in mind most of the time.
+ ///
+ /// ```
+ /// # struct S;
+ /// # fn doc() -> S {
+ /// if return S {} {}
+ /// # unreachable!()
+ /// # }
+ ///
+ /// // parsed by rustc as:
+ /// //
+ /// // if (return (S {})) {
+ /// // }
+ /// //
+ /// // but could equally well have been this other arbitrary choice:
+ /// //
+ /// // if (return S) {
+ /// // }
+ /// // {}
+ /// ```
+ ///
+ /// Note the grammar ambiguity on trailing braces is distinct from
+ /// precedence and is not captured by assigning a precedence level to the
+ /// braced struct init expr in relation to other operators. This can be
+ /// illustrated by `return 0..S {}` vs `match 0..S {}`. The former parses
as
+ /// `return (0..(S {}))` implying tighter precedence for struct init than
+ /// `..`, while the latter parses as `match (0..S) {}` implying tighter
+ /// precedence for `..` than struct init, a contradiction.
+ #[cfg(all(feature = "full", feature = "parsing"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "full", feature = "parsing"))))]
+ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
+ parsing::ambiguous_expr(input, parsing::AllowStruct(false))
+ }
+
+ /// An alternative to the primary `Expr::parse` parser (from the [`Parse`]
+ /// trait) for syntactic positions in which expression boundaries are
placed
+ /// more eagerly than done by the typical expression grammar. This includes
+ /// expressions at the head of a statement or in the right-hand side of a
+ /// `match` arm.
+ ///
+ /// [`Parse`]: crate::parse::Parse
+ ///
+ /// Compare the following cases:
+ ///
+ /// 1.
+ /// ```
+ /// # let result = ();
+ /// # let guard = false;
+ /// # let cond = true;
+ /// # let f = true;
+ /// # let g = f;
+ /// #
+ /// let _ = match result {
+ /// () if guard => if cond { f } else { g }
+ /// () => false,
+ /// };
+ /// ```
+ ///
+ /// 2.
+ /// ```
+ /// # let cond = true;
+ /// # let f = ();
+ /// # let g = f;
+ /// #
+ /// let _ = || {
+ /// if cond { f } else { g }
+ /// ()
+ /// };
+ /// ```
+ ///
+ /// 3.
+ /// ```
+ /// # let cond = true;
+ /// # let f = || ();
+ /// # let g = f;
+ /// #
+ /// let _ = [if cond { f } else { g } ()];
+ /// ```
+ ///
+ /// The same sequence of tokens `if cond { f } else { g } ()` appears in
+ /// expression position 3 times. The first two syntactic positions use
eager
+ /// placement of expression boundaries, and parse as `Expr::If`, with the
+ /// adjacent `()` becoming `Pat::Tuple` or `Expr::Tuple`. In contrast, the
+ /// third case uses standard expression boundaries and parses as
+ /// `Expr::Call`.
+ ///
+ /// As with [`parse_without_eager_brace`], this ambiguity in the Rust
+ /// grammar is independent of precedence.
+ ///
+ /// [`parse_without_eager_brace`]: Self::parse_without_eager_brace
+ #[cfg(all(feature = "full", feature = "parsing"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "full", feature = "parsing"))))]
+ pub fn parse_with_earlier_boundary_rule(input: ParseStream) ->
Result<Expr> {
+ parsing::parse_with_earlier_boundary_rule(input)
+ }
+
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) ->
Vec<Attribute> {
+ match self {
+ Expr::Array(ExprArray { attrs, .. })
+ | Expr::Assign(ExprAssign { attrs, .. })
+ | Expr::Async(ExprAsync { attrs, .. })
+ | Expr::Await(ExprAwait { attrs, .. })
+ | Expr::Binary(ExprBinary { attrs, .. })
+ | Expr::Block(ExprBlock { attrs, .. })
+ | Expr::Break(ExprBreak { attrs, .. })
+ | Expr::Call(ExprCall { attrs, .. })
+ | Expr::Cast(ExprCast { attrs, .. })
+ | Expr::Closure(ExprClosure { attrs, .. })
+ | Expr::Const(ExprConst { attrs, .. })
+ | Expr::Continue(ExprContinue { attrs, .. })
+ | Expr::Field(ExprField { attrs, .. })
+ | Expr::ForLoop(ExprForLoop { attrs, .. })
+ | Expr::Group(ExprGroup { attrs, .. })
+ | Expr::If(ExprIf { attrs, .. })
+ | Expr::Index(ExprIndex { attrs, .. })
+ | Expr::Infer(ExprInfer { attrs, .. })
+ | Expr::Let(ExprLet { attrs, .. })
+ | Expr::Lit(ExprLit { attrs, .. })
+ | Expr::Loop(ExprLoop { attrs, .. })
+ | Expr::Macro(ExprMacro { attrs, .. })
+ | Expr::Match(ExprMatch { attrs, .. })
+ | Expr::MethodCall(ExprMethodCall { attrs, .. })
+ | Expr::Paren(ExprParen { attrs, .. })
+ | Expr::Path(ExprPath { attrs, .. })
+ | Expr::Range(ExprRange { attrs, .. })
+ | Expr::Reference(ExprReference { attrs, .. })
+ | Expr::Repeat(ExprRepeat { attrs, .. })
+ | Expr::Return(ExprReturn { attrs, .. })
+ | Expr::Struct(ExprStruct { attrs, .. })
+ | Expr::Try(ExprTry { attrs, .. })
+ | Expr::TryBlock(ExprTryBlock { attrs, .. })
+ | Expr::Tuple(ExprTuple { attrs, .. })
+ | Expr::Unary(ExprUnary { attrs, .. })
+ | Expr::Unsafe(ExprUnsafe { attrs, .. })
+ | Expr::While(ExprWhile { attrs, .. })
+ | Expr::Yield(ExprYield { attrs, .. }) => mem::replace(attrs, new),
+ Expr::Verbatim(_) => Vec::new(),
+ }
+ }
+}
+
+ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum Member {
+ /// A named field like `self.x`.
+ Named(Ident),
+ /// An unnamed field like `self.0`.
+ Unnamed(Index),
+ }
+}
+
+impl From<Ident> for Member {
+ fn from(ident: Ident) -> Member {
+ Member::Named(ident)
+ }
+}
+
+impl From<Index> for Member {
+ fn from(index: Index) -> Member {
+ Member::Unnamed(index)
+ }
+}
+
+impl From<usize> for Member {
+ fn from(index: usize) -> Member {
+ Member::Unnamed(Index::from(index))
+ }
+}
+
+impl Eq for Member {}
+
+impl PartialEq for Member {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Member::Named(this), Member::Named(other)) => this == other,
+ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
+ _ => false,
+ }
+ }
+}
+
+impl Hash for Member {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ match self {
+ Member::Named(m) => m.hash(state),
+ Member::Unnamed(m) => m.hash(state),
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+impl IdentFragment for Member {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Member::Named(m) => Display::fmt(m, formatter),
+ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
+ }
+ }
+
+ fn span(&self) -> Option<Span> {
+ match self {
+ Member::Named(m) => Some(m.span()),
+ Member::Unnamed(m) => Some(m.span),
+ }
+ }
+}
+
+ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+}
+
+impl From<usize> for Index {
+ fn from(index: usize) -> Index {
+ assert!(index < u32::MAX as usize);
+ Index {
+ index: index as u32,
+ span: Span::call_site(),
+ }
+ }
+}
+
+impl Eq for Index {}
+
+impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+}
+
+impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+}
+
+#[cfg(feature = "printing")]
+impl IdentFragment for Index {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.index, formatter)
+ }
+
+ fn span(&self) -> Option<Span> {
+ Some(self.span)
+ }
+}
+
+ast_struct! {
+ /// A field-value pair in a struct literal.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct FieldValue {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+
+ /// The colon in `Struct { x: x }`. If written in shorthand like
+ /// `Struct { x }`, there is no colon.
+ pub colon_token: Option<Token![:]>,
+
+ pub expr: Expr,
+ }
+}
+
+#[cfg(feature = "full")]
+ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+ }
+}
+
+#[cfg(feature = "full")]
+ast_struct! {
+ /// One arm of a `match` expression: `0..=10 => { return true; }`.
+ ///
+ /// As in:
+ ///
+ /// ```
+ /// # fn f() -> bool {
+ /// # let n = 0;
+ /// match n {
+ /// 0..=10 => {
+ /// return true;
+ /// }
+ /// // ...
+ /// # _ => {}
+ /// }
+ /// # false
+ /// # }
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+ pub guard: Option<(Token![if], Box<Expr>)>,
+ pub fat_arrow_token: Token![=>],
+ pub body: Box<Expr>,
+ pub comma: Option<Token![,]>,
+ }
+}
+
+#[cfg(feature = "full")]
+ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+ /// Inclusive at the beginning and end.
+ Closed(Token![..=]),
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ use crate::attr;
+ use crate::attr::Attribute;
+ #[cfg(feature = "full")]
+ use crate::classify;
+ use crate::error::{Error, Result};
+ #[cfg(feature = "full")]
+ use crate::expr::{
+ Arm, ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock,
ExprBreak, ExprClosure,
+ ExprConst, ExprContinue, ExprForLoop, ExprIf, ExprInfer, ExprLet,
ExprLoop, ExprMatch,
+ ExprRange, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprTuple,
ExprUnsafe, ExprWhile,
+ ExprYield, Label, RangeLimits,
+ };
+ use crate::expr::{
+ Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprGroup, ExprIndex,
ExprLit, ExprMacro,
+ ExprMethodCall, ExprParen, ExprPath, ExprReference, ExprStruct,
ExprUnary, FieldValue,
+ Index, Member,
+ };
+ #[cfg(feature = "full")]
+ use crate::ext::IdentExt as _;
+ #[cfg(feature = "full")]
+ use crate::generics::BoundLifetimes;
+ use crate::ident::Ident;
+ #[cfg(feature = "full")]
+ use crate::lifetime::Lifetime;
+ use crate::lit::{Lit, LitFloat, LitInt};
+ use crate::mac::{self, Macro};
+ use crate::op::BinOp;
+ use crate::parse::discouraged::Speculative as _;
+ #[cfg(feature = "full")]
+ use crate::parse::ParseBuffer;
+ use crate::parse::{Parse, ParseStream};
+ #[cfg(feature = "full")]
+ use crate::pat::{Pat, PatType};
+ use crate::path::{self, AngleBracketedGenericArguments, Path, QSelf};
+ use crate::precedence::Precedence;
+ use crate::punctuated::Punctuated;
+ #[cfg(feature = "full")]
+ use crate::stmt::Block;
+ use crate::token;
+ use crate::ty;
+ #[cfg(feature = "full")]
+ use crate::ty::{ReturnType, Type};
+ use crate::verbatim;
+ #[cfg(feature = "full")]
+ use proc_macro2::TokenStream;
+ use std::mem;
+
+ mod kw {
+ crate::custom_keyword!(builtin);
+ crate::custom_keyword!(raw);
+ }
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+ #[cfg(feature = "full")]
+ pub(super) struct AllowStruct(pub bool);
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Expr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ ambiguous_expr(
+ input,
+ #[cfg(feature = "full")]
+ AllowStruct(true),
+ )
+ }
+ }
+
+ #[cfg(feature = "full")]
+ pub(super) fn parse_with_earlier_boundary_rule(input: ParseStream) ->
Result<Expr> {
+ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(token::Group) {
+ let allow_struct = AllowStruct(true);
+ let atom = expr_group(input, allow_struct)?;
+ if continue_parsing_early(&atom) {
+ trailer_helper(input, atom)?
+ } else {
+ atom
+ }
+ } else if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+ Expr::While(input.parse()?)
+ } else if input.peek(Token![for])
+ && !(input.peek2(Token![<]) && (input.peek3(Lifetime) ||
input.peek3(Token![>])))
+ {
+ Expr::ForLoop(input.parse()?)
+ } else if input.peek(Token![loop]) {
+ Expr::Loop(input.parse()?)
+ } else if input.peek(Token![match]) {
+ Expr::Match(input.parse()?)
+ } else if input.peek(Token![try]) && input.peek2(token::Brace) {
+ Expr::TryBlock(input.parse()?)
+ } else if input.peek(Token![unsafe]) {
+ Expr::Unsafe(input.parse()?)
+ } else if input.peek(Token![const]) && input.peek2(token::Brace) {
+ Expr::Const(input.parse()?)
+ } else if input.peek(token::Brace) {
+ Expr::Block(input.parse()?)
+ } else if input.peek(Lifetime) {
+ atom_labeled(input)?
+ } else {
+ let allow_struct = AllowStruct(true);
+ unary_expr(input, allow_struct)?
+ };
+
+ if continue_parsing_early(&expr) {
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ let allow_struct = AllowStruct(true);
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ }
+
+ if input.peek(Token![.]) && !input.peek(Token![..]) ||
input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ let allow_struct = AllowStruct(true);
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ }
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+ Ok(expr)
+ }
+
+ #[cfg(feature = "full")]
+ impl Copy for AllowStruct {}
+
+ #[cfg(feature = "full")]
+ impl Clone for AllowStruct {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn can_begin_expr(input: ParseStream) -> bool {
+ input.peek(Ident::peek_any) // value name or keyword
+ || input.peek(token::Paren) // tuple
+ || input.peek(token::Bracket) // array
+ || input.peek(token::Brace) // block
+ || input.peek(Lit) // literal
+ || input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
+ || input.peek(Token![-]) && !input.peek(Token![-=]) &&
!input.peek(Token![->]) // unary minus
+ || input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
+ || input.peek(Token![|]) && !input.peek(Token![|=]) // closure
+ || input.peek(Token![&]) && !input.peek(Token![&=]) // reference
+ || input.peek(Token![..]) // range notation
+ || input.peek(Token![<]) && !input.peek(Token![<=]) &&
!input.peek(Token![<<=]) // associated path
+ || input.peek(Token![::]) // global path
+ || input.peek(Lifetime) // labeled loop
+ || input.peek(Token![#]) // expression attributes
+ }
+
+ #[cfg(feature = "full")]
+ fn parse_expr(
+ input: ParseStream,
+ mut lhs: Expr,
+ allow_struct: AllowStruct,
+ base: Precedence,
+ ) -> Result<Expr> {
+ loop {
+ let ahead = input.fork();
+ if let Expr::Range(ExprRange { end: Some(_), .. }) = lhs {
+ // A range with an upper bound cannot be the left-hand side of
+ // another binary operator.
+ break;
+ } else if let Ok(op) = ahead.parse::<BinOp>() {
+ let precedence = Precedence::of_binop(&op);
+ if precedence < base {
+ break;
+ }
+ if precedence == Precedence::Compare {
+ if let Expr::Binary(lhs) = &lhs {
+ if Precedence::of_binop(&lhs.op) ==
Precedence::Compare {
+ break;
+ }
+ }
+ }
+ input.advance_to(&ahead);
+ let right = parse_binop_rhs(input, allow_struct, precedence)?;
+ lhs = Expr::Binary(ExprBinary {
+ attrs: Vec::new(),
+ left: Box::new(lhs),
+ op,
+ right,
+ });
+ } else if Precedence::Assign >= base && input.peek(Token![=]) &&
!input.peek(Token![=>])
+ {
+ let eq_token: Token![=] = input.parse()?;
+ let right = parse_binop_rhs(input, allow_struct,
Precedence::Assign)?;
+ lhs = Expr::Assign(ExprAssign {
+ attrs: Vec::new(),
+ left: Box::new(lhs),
+ eq_token,
+ right,
+ });
+ } else if Precedence::Range >= base && input.peek(Token![..]) {
+ let limits: RangeLimits = input.parse()?;
+ let end = parse_range_end(input, &limits, allow_struct)?;
+ lhs = Expr::Range(ExprRange {
+ attrs: Vec::new(),
+ start: Some(Box::new(lhs)),
+ limits,
+ end,
+ });
+ } else if Precedence::Cast >= base && input.peek(Token![as]) {
+ let as_token: Token![as] = input.parse()?;
+ let allow_plus = false;
+ let allow_group_generic = false;
+ let ty = ty::parsing::ambig_ty(input, allow_plus,
allow_group_generic)?;
+ check_cast(input)?;
+ lhs = Expr::Cast(ExprCast {
+ attrs: Vec::new(),
+ expr: Box::new(lhs),
+ as_token,
+ ty: Box::new(ty),
+ });
+ } else {
+ break;
+ }
+ }
+ Ok(lhs)
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn parse_expr(input: ParseStream, mut lhs: Expr, base: Precedence) ->
Result<Expr> {
+ loop {
+ let ahead = input.fork();
+ if let Ok(op) = ahead.parse::<BinOp>() {
+ let precedence = Precedence::of_binop(&op);
+ if precedence < base {
+ break;
+ }
+ if precedence == Precedence::Compare {
+ if let Expr::Binary(lhs) = &lhs {
+ if Precedence::of_binop(&lhs.op) ==
Precedence::Compare {
+ break;
+ }
+ }
+ }
+ input.advance_to(&ahead);
+ let right = parse_binop_rhs(input, precedence)?;
+ lhs = Expr::Binary(ExprBinary {
+ attrs: Vec::new(),
+ left: Box::new(lhs),
+ op,
+ right,
+ });
+ } else if Precedence::Cast >= base && input.peek(Token![as]) {
+ let as_token: Token![as] = input.parse()?;
+ let allow_plus = false;
+ let allow_group_generic = false;
+ let ty = ty::parsing::ambig_ty(input, allow_plus,
allow_group_generic)?;
+ check_cast(input)?;
+ lhs = Expr::Cast(ExprCast {
+ attrs: Vec::new(),
+ expr: Box::new(lhs),
+ as_token,
+ ty: Box::new(ty),
+ });
+ } else {
+ break;
+ }
+ }
+ Ok(lhs)
+ }
+
+ fn parse_binop_rhs(
+ input: ParseStream,
+ #[cfg(feature = "full")] allow_struct: AllowStruct,
+ precedence: Precedence,
+ ) -> Result<Box<Expr>> {
+ let mut rhs = unary_expr(
+ input,
+ #[cfg(feature = "full")]
+ allow_struct,
+ )?;
+ loop {
+ let next = peek_precedence(input);
+ if next > precedence || next == precedence && precedence ==
Precedence::Assign {
+ rhs = parse_expr(
+ input,
+ rhs,
+ #[cfg(feature = "full")]
+ allow_struct,
+ next,
+ )?;
+ } else {
+ return Ok(Box::new(rhs));
+ }
+ }
+ }
+
+ fn peek_precedence(input: ParseStream) -> Precedence {
+ if let Ok(op) = input.fork().parse() {
+ Precedence::of_binop(&op)
+ } else if input.peek(Token![=]) && !input.peek(Token![=>]) {
+ Precedence::Assign
+ } else if input.peek(Token![..]) {
+ Precedence::Range
+ } else if input.peek(Token![as]) {
+ Precedence::Cast
+ } else {
+ Precedence::Any
+ }
+ }
+
+ // Parse an arbitrary expression.
+ pub(super) fn ambiguous_expr(
+ input: ParseStream,
+ #[cfg(feature = "full")] allow_struct: AllowStruct,
+ ) -> Result<Expr> {
+ let lhs = unary_expr(
+ input,
+ #[cfg(feature = "full")]
+ allow_struct,
+ )?;
+ parse_expr(
+ input,
+ lhs,
+ #[cfg(feature = "full")]
+ allow_struct,
+ Precedence::Any,
+ )
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
+ let mut attrs = Vec::new();
+ while !input.peek(token::Group) && input.peek(Token![#]) {
+ attrs.push(input.call(attr::parsing::single_parse_outer)?);
+ }
+ Ok(attrs)
+ }
+
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) ->
Result<Expr> {
+ let begin = input.fork();
+ let attrs = input.call(expr_attrs)?;
+ if input.peek(token::Group) {
+ return trailer_expr(begin, attrs, input, allow_struct);
+ }
+
+ if input.peek(Token![&]) {
+ let and_token: Token![&] = input.parse()?;
+ let raw: Option<kw::raw> = if input.peek(kw::raw)
+ && (input.peek2(Token![mut]) || input.peek2(Token![const]))
+ {
+ Some(input.parse()?)
+ } else {
+ None
+ };
+ let mutability: Option<Token![mut]> = input.parse()?;
+ if raw.is_some() && mutability.is_none() {
+ input.parse::<Token![const]>()?;
+ }
+ let expr = Box::new(unary_expr(input, allow_struct)?);
+ if raw.is_some() {
+ Ok(Expr::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+ and_token,
+ mutability,
+ expr,
+ }))
+ }
+ } else if input.peek(Token![*]) || input.peek(Token![!]) ||
input.peek(Token![-]) {
+ expr_unary(input, attrs, allow_struct).map(Expr::Unary)
+ } else {
+ trailer_expr(begin, attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream) -> Result<Expr> {
+ if input.peek(Token![&]) {
+ Ok(Expr::Reference(ExprReference {
+ attrs: Vec::new(),
+ and_token: input.parse()?,
+ mutability: input.parse()?,
+ expr: Box::new(unary_expr(input)?),
+ }))
+ } else if input.peek(Token![*]) || input.peek(Token![!]) ||
input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input)?),
+ }))
+ } else {
+ trailer_expr(input)
+ }
+ }
+
+ // <atom> (..<args>) ...
+ // <atom> . <ident> (..<args>) ...
+ // <atom> . <ident> ...
+ // <atom> . <lit> ...
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+ fn trailer_expr(
+ begin: ParseBuffer,
+ mut attrs: Vec<Attribute>,
+ input: ParseStream,
+ allow_struct: AllowStruct,
+ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+ if let Expr::Verbatim(tokens) = &mut e {
+ *tokens = verbatim::between(&begin, input);
+ } else {
+ let inner_attrs = e.replace_attrs(Vec::new());
+ attrs.extend(inner_attrs);
+ e.replace_attrs(attrs);
+ }
+
+ Ok(e)
+ }
+
+ #[cfg(feature = "full")]
+ fn trailer_helper(input: ParseStream, mut e: Expr) -> Result<Expr> {
+ loop {
+ if input.peek(token::Paren) {
+ let content;
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse, Token![,])?,
+ });
+ } else if input.peek(Token![.])
+ && !input.peek(Token![..])
+ && match e {
+ Expr::Range(_) => false,
+ _ => true,
+ }
+ {
+ let mut dot_token: Token![.] = input.parse()?;
+
+ let float_token: Option<LitFloat> = input.parse()?;
+ if let Some(float_token) = float_token {
+ if multi_index(&mut e, &mut dot_token, float_token)? {
+ continue;
+ }
+ }
+
+ let await_token: Option<Token![await]> = input.parse()?;
+ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+ await_token,
+ });
+ continue;
+ }
+
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::])
{
+
Some(AngleBracketedGenericArguments::parse_turbofish(input)?)
+ } else {
+ None
+ };
+
+ if turbofish.is_some() || input.peek(token::Paren) {
+ if let Member::Named(method) = member {
+ let content;
+ e = Expr::MethodCall(ExprMethodCall {
+ attrs: Vec::new(),
+ receiver: Box::new(e),
+ dot_token,
+ method,
+ turbofish,
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse,
Token![,])?,
+ });
+ continue;
+ }
+ }
+
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+ member,
+ });
+ } else if input.peek(token::Bracket) {
+ let content;
+ e = Expr::Index(ExprIndex {
+ attrs: Vec::new(),
+ expr: Box::new(e),
+ bracket_token: bracketed!(content in input),
+ index: content.parse()?,
+ });
+ } else if input.peek(Token![?]) {
+ e = Expr::Try(ExprTry {
+ attrs: Vec::new(),
+ expr: Box::new(e),
+ question_token: input.parse()?,
+ });
+ } else {
+ break;
+ }
+ }
+ Ok(e)
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn trailer_expr(input: ParseStream) -> Result<Expr> {
+ let mut e = atom_expr(input)?;
+
+ loop {
+ if input.peek(token::Paren) {
+ let content;
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse, Token![,])?,
+ });
+ } else if input.peek(Token![.])
+ && !input.peek(Token![..])
+ && !input.peek2(Token![await])
+ {
+ let mut dot_token: Token![.] = input.parse()?;
+
+ let float_token: Option<LitFloat> = input.parse()?;
+ if let Some(float_token) = float_token {
+ if multi_index(&mut e, &mut dot_token, float_token)? {
+ continue;
+ }
+ }
+
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::])
{
+ let colon2_token: Token![::] = input.parse()?;
+ let turbofish =
+
AngleBracketedGenericArguments::do_parse(Some(colon2_token), input)?;
+ Some(turbofish)
+ } else {
+ None
+ };
+
+ if turbofish.is_some() || input.peek(token::Paren) {
+ if let Member::Named(method) = member {
+ let content;
+ e = Expr::MethodCall(ExprMethodCall {
+ attrs: Vec::new(),
+ receiver: Box::new(e),
+ dot_token,
+ method,
+ turbofish,
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse,
Token![,])?,
+ });
+ continue;
+ }
+ }
+
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+ member,
+ });
+ } else if input.peek(token::Bracket) {
+ let content;
+ e = Expr::Index(ExprIndex {
+ attrs: Vec::new(),
+ expr: Box::new(e),
+ bracket_token: bracketed!(content in input),
+ index: content.parse()?,
+ });
+ } else {
+ break;
+ }
+ }
+
+ Ok(e)
+ }
+
+ // Parse all atomic expressions which don't have to worry about precedence
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) ->
Result<Expr> {
+ if input.peek(token::Group) {
+ expr_group(input, allow_struct)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+ } else if input.peek(Token![async])
+ && (input.peek2(token::Brace) || input.peek2(Token![move]) &&
input.peek3(token::Brace))
+ {
+ input.parse().map(Expr::Async)
+ } else if input.peek(Token![try]) && input.peek2(token::Brace) {
+ input.parse().map(Expr::TryBlock)
+ } else if input.peek(Token![|])
+ || input.peek(Token![move])
+ || input.peek(Token![for])
+ && input.peek2(Token![<])
+ && (input.peek3(Lifetime) || input.peek3(Token![>]))
+ || input.peek(Token![const]) && !input.peek2(token::Brace)
+ || input.peek(Token![static])
+ || input.peek(Token![async]) && (input.peek2(Token![|]) ||
input.peek2(Token![move]))
+ {
+ expr_closure(input, allow_struct).map(Expr::Closure)
+ } else if input.peek(kw::builtin) && input.peek2(Token![#]) {
+ expr_builtin(input)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+ || input.peek(Token![crate])
+ || input.peek(Token![try]) && (input.peek2(Token![!]) ||
input.peek2(Token![::]))
+ {
+ path_or_macro_or_struct(input, allow_struct)
+ } else if input.peek(token::Paren) {
+ paren_or_tuple(input)
+ } else if input.peek(Token![break]) {
+ expr_break(input, allow_struct).map(Expr::Break)
+ } else if input.peek(Token![continue]) {
+ input.parse().map(Expr::Continue)
+ } else if input.peek(Token![return]) {
+ input.parse().map(Expr::Return)
+ } else if input.peek(token::Bracket) {
+ array_or_repeat(input)
+ } else if input.peek(Token![let]) {
+ expr_let(input, allow_struct).map(Expr::Let)
+ } else if input.peek(Token![if]) {
+ input.parse().map(Expr::If)
+ } else if input.peek(Token![while]) {
+ input.parse().map(Expr::While)
+ } else if input.peek(Token![for]) {
+ input.parse().map(Expr::ForLoop)
+ } else if input.peek(Token![loop]) {
+ input.parse().map(Expr::Loop)
+ } else if input.peek(Token![match]) {
+ input.parse().map(Expr::Match)
+ } else if input.peek(Token![yield]) {
+ input.parse().map(Expr::Yield)
+ } else if input.peek(Token![unsafe]) {
+ input.parse().map(Expr::Unsafe)
+ } else if input.peek(Token![const]) {
+ input.parse().map(Expr::Const)
+ } else if input.peek(token::Brace) {
+ input.parse().map(Expr::Block)
+ } else if input.peek(Token![..]) {
+ expr_range(input, allow_struct).map(Expr::Range)
+ } else if input.peek(Token![_]) {
+ input.parse().map(Expr::Infer)
+ } else if input.peek(Lifetime) {
+ atom_labeled(input)
+ } else {
+ Err(input.error("expected an expression"))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn atom_labeled(input: ParseStream) -> Result<Expr> {
+ let the_label: Label = input.parse()?;
+ let mut expr = if input.peek(Token![while]) {
+ Expr::While(input.parse()?)
+ } else if input.peek(Token![for]) {
+ Expr::ForLoop(input.parse()?)
+ } else if input.peek(Token![loop]) {
+ Expr::Loop(input.parse()?)
+ } else if input.peek(token::Brace) {
+ Expr::Block(input.parse()?)
+ } else {
+ return Err(input.error("expected loop or block expression"));
+ };
+ match &mut expr {
+ Expr::While(ExprWhile { label, .. })
+ | Expr::ForLoop(ExprForLoop { label, .. })
+ | Expr::Loop(ExprLoop { label, .. })
+ | Expr::Block(ExprBlock { label, .. }) => *label = Some(the_label),
+ _ => unreachable!(),
+ }
+ Ok(expr)
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn atom_expr(input: ParseStream) -> Result<Expr> {
+ if input.peek(token::Group) {
+ expr_group(input)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+ } else if input.peek(token::Paren) {
+ input.call(expr_paren).map(Expr::Paren)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input)
+ } else if input.is_empty() {
+ Err(input.error("expected an expression"))
+ } else {
+ if input.peek(token::Brace) {
+ let scan = input.fork();
+ let content;
+ braced!(content in scan);
+ if content.parse::<Expr>().is_ok() && content.is_empty() {
+ let expr_block = verbatim::between(input, &scan);
+ input.advance_to(&scan);
+ return Ok(Expr::Verbatim(expr_block));
+ }
+ }
+ Err(input.error("unsupported expression; enable syn's
features=[\"full\"]"))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_builtin(input: ParseStream) -> Result<Expr> {
+ let begin = input.fork();
+
+ input.parse::<kw::builtin>()?;
+ input.parse::<Token![#]>()?;
+ input.parse::<Ident>()?;
+
+ let args;
+ parenthesized!(args in input);
+ args.parse::<TokenStream>()?;
+
+ Ok(Expr::Verbatim(verbatim::between(&begin, input)))
+ }
+
+ fn path_or_macro_or_struct(
+ input: ParseStream,
+ #[cfg(feature = "full")] allow_struct: AllowStruct,
+ ) -> Result<Expr> {
+ let (qself, path) = path::parsing::qpath(input, true)?;
+ rest_of_path_or_macro_or_struct(
+ qself,
+ path,
+ input,
+ #[cfg(feature = "full")]
+ allow_struct,
+ )
+ }
+
+ fn rest_of_path_or_macro_or_struct(
+ qself: Option<QSelf>,
+ path: Path,
+ input: ParseStream,
+ #[cfg(feature = "full")] allow_struct: AllowStruct,
+ ) -> Result<Expr> {
+ if qself.is_none()
+ && input.peek(Token![!])
+ && !input.peek(Token![!=])
+ && path.is_mod_style()
+ {
+ let bang_token: Token![!] = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ return Ok(Expr::Macro(ExprMacro {
+ attrs: Vec::new(),
+ mac: Macro {
+ path,
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ }));
+ }
+
+ #[cfg(not(feature = "full"))]
+ let allow_struct = (true,);
+ if allow_struct.0 && input.peek(token::Brace) {
+ return expr_struct_helper(input, qself, path).map(Expr::Struct);
+ }
+
+ Ok(Expr::Path(ExprPath {
+ attrs: Vec::new(),
+ qself,
+ path,
+ }))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprMacro {
+ attrs: Vec::new(),
+ mac: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn paren_or_tuple(input: ParseStream) -> Result<Expr> {
+ let content;
+ let paren_token = parenthesized!(content in input);
+ if content.is_empty() {
+ return Ok(Expr::Tuple(ExprTuple {
+ attrs: Vec::new(),
+ paren_token,
+ elems: Punctuated::new(),
+ }));
+ }
+
+ let first: Expr = content.parse()?;
+ if content.is_empty() {
+ return Ok(Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+ paren_token,
+ expr: Box::new(first),
+ }));
+ }
+
+ let mut elems = Punctuated::new();
+ elems.push_value(first);
+ while !content.is_empty() {
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ if content.is_empty() {
+ break;
+ }
+ let value = content.parse()?;
+ elems.push_value(value);
+ }
+ Ok(Expr::Tuple(ExprTuple {
+ attrs: Vec::new(),
+ paren_token,
+ elems,
+ }))
+ }
+
+ #[cfg(feature = "full")]
+ fn array_or_repeat(input: ParseStream) -> Result<Expr> {
+ let content;
+ let bracket_token = bracketed!(content in input);
+ if content.is_empty() {
+ return Ok(Expr::Array(ExprArray {
+ attrs: Vec::new(),
+ bracket_token,
+ elems: Punctuated::new(),
+ }));
+ }
+
+ let first: Expr = content.parse()?;
+ if content.is_empty() || content.peek(Token![,]) {
+ let mut elems = Punctuated::new();
+ elems.push_value(first);
+ while !content.is_empty() {
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ if content.is_empty() {
+ break;
+ }
+ let value = content.parse()?;
+ elems.push_value(value);
+ }
+ Ok(Expr::Array(ExprArray {
+ attrs: Vec::new(),
+ bracket_token,
+ elems,
+ }))
+ } else if content.peek(Token![;]) {
+ let semi_token: Token![;] = content.parse()?;
+ let len: Expr = content.parse()?;
+ Ok(Expr::Repeat(ExprRepeat {
+ attrs: Vec::new(),
+ bracket_token,
+ expr: Box::new(first),
+ semi_token,
+ len: Box::new(len),
+ }))
+ } else {
+ Err(content.error("expected `,` or `;`"))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprArray {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ let bracket_token = bracketed!(content in input);
+ let mut elems = Punctuated::new();
+
+ while !content.is_empty() {
+ let first: Expr = content.parse()?;
+ elems.push_value(first);
+ if content.is_empty() {
+ break;
+ }
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+ Ok(ExprArray {
+ attrs: Vec::new(),
+ bracket_token,
+ elems,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprRepeat {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(ExprRepeat {
+ bracket_token: bracketed!(content in input),
+ attrs: Vec::new(),
+ expr: content.parse()?,
+ semi_token: content.parse()?,
+ len: content.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn continue_parsing_early(mut expr: &Expr) -> bool {
+ while let Expr::Group(group) = expr {
+ expr = &group.expr;
+ }
+ match expr {
+ Expr::If(_)
+ | Expr::While(_)
+ | Expr::ForLoop(_)
+ | Expr::Loop(_)
+ | Expr::Match(_)
+ | Expr::TryBlock(_)
+ | Expr::Unsafe(_)
+ | Expr::Const(_)
+ | Expr::Block(_) => false,
+ _ => true,
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprLit {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprLit {
+ attrs: Vec::new(),
+ lit: input.parse()?,
+ })
+ }
+ }
+
+ fn expr_group(
+ input: ParseStream,
+ #[cfg(feature = "full")] allow_struct: AllowStruct,
+ ) -> Result<Expr> {
+ let group = crate::group::parse_group(input)?;
+ let mut inner: Expr = group.content.parse()?;
+
+ match inner {
+ Expr::Path(mut expr) if expr.attrs.is_empty() => {
+ let grouped_len = expr.path.segments.len();
+ Path::parse_rest(input, &mut expr.path, true)?;
+ match rest_of_path_or_macro_or_struct(
+ expr.qself,
+ expr.path,
+ input,
+ #[cfg(feature = "full")]
+ allow_struct,
+ )? {
+ Expr::Path(expr) if expr.path.segments.len() ==
grouped_len => {
+ inner = Expr::Path(expr);
+ }
+ extended => return Ok(extended),
+ }
+ }
+ _ => {}
+ }
+
+ Ok(Expr::Group(ExprGroup {
+ attrs: Vec::new(),
+ group_token: group.token,
+ expr: Box::new(inner),
+ }))
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+ expr_paren(input)
+ }
+ }
+
+ fn expr_paren(input: ParseStream) -> Result<ExprParen> {
+ let content;
+ Ok(ExprParen {
+ attrs: Vec::new(),
+ paren_token: parenthesized!(content in input),
+ expr: content.parse()?,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprLet {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_struct = AllowStruct(true);
+ expr_let(input, allow_struct)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_let(input: ParseStream, allow_struct: AllowStruct) ->
Result<ExprLet> {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+ pat: Box::new(Pat::parse_multi_with_leading_vert(input)?),
+ eq_token: input.parse()?,
+ expr: Box::new({
+ let lhs = unary_expr(input, allow_struct)?;
+ parse_expr(input, lhs, allow_struct, Precedence::Compare)?
+ }),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ let mut clauses = Vec::new();
+ let mut expr;
+ loop {
+ let if_token: Token![if] = input.parse()?;
+ let cond = input.call(Expr::parse_without_eager_brace)?;
+ let then_branch: Block = input.parse()?;
+
+ expr = ExprIf {
+ attrs: Vec::new(),
+ if_token,
+ cond: Box::new(cond),
+ then_branch,
+ else_branch: None,
+ };
+
+ if !input.peek(Token![else]) {
+ break;
+ }
+
+ let else_token: Token![else] = input.parse()?;
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Token![if]) {
+ expr.else_branch = Some((else_token,
Box::new(Expr::PLACEHOLDER)));
+ clauses.push(expr);
+ continue;
+ } else if lookahead.peek(token::Brace) {
+ expr.else_branch = Some((
+ else_token,
+ Box::new(Expr::Block(ExprBlock {
+ attrs: Vec::new(),
+ label: None,
+ block: input.parse()?,
+ })),
+ ));
+ break;
+ } else {
+ return Err(lookahead.error());
+ }
+ }
+
+ while let Some(mut prev) = clauses.pop() {
+ *prev.else_branch.as_mut().unwrap().1 = Expr::If(expr);
+ expr = prev;
+ }
+ expr.attrs = attrs;
+ Ok(expr)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprInfer {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprInfer {
+ attrs: input.call(Attribute::parse_outer)?,
+ underscore_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+ let pat = Pat::parse_multi_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+ attrs,
+ label,
+ for_token,
+ pat: Box::new(pat),
+ in_token,
+ expr: Box::new(expr),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+ attrs,
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+
+ let mut arms = Vec::new();
+ while !content.is_empty() {
+ arms.push(content.call(Arm::parse)?);
+ }
+
+ Ok(ExprMatch {
+ attrs,
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+ arms,
+ })
+ }
+ }
+
+ macro_rules! impl_by_parsing_expr {
+ (
+ $(
+ $expr_type:ty, $variant:ident, $msg:expr,
+ )*
+ ) => {
+ $(
+ #[cfg(all(feature = "full", feature = "printing"))]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for $expr_type {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut expr: Expr = input.parse()?;
+ loop {
+ match expr {
+ Expr::$variant(inner) => return Ok(inner),
+ Expr::Group(next) => expr = *next.expr,
+ _ => return Err(Error::new_spanned(expr,
$msg)),
+ }
+ }
+ }
+ }
+ )*
+ };
+ }
+
+ impl_by_parsing_expr! {
+ ExprAssign, Assign, "expected assignment expression",
+ ExprAwait, Await, "expected await expression",
+ ExprBinary, Binary, "expected binary operation",
+ ExprCall, Call, "expected function call expression",
+ ExprCast, Cast, "expected cast expression",
+ ExprField, Field, "expected struct field access",
+ ExprIndex, Index, "expected indexing expression",
+ ExprMethodCall, MethodCall, "expected method call expression",
+ ExprRange, Range, "expected range expression",
+ ExprTry, Try, "expected try expression",
+ ExprTuple, Tuple, "expected tuple expression",
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprUnary {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = Vec::new();
+ let allow_struct = AllowStruct(true);
+ expr_unary(input, attrs, allow_struct)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_unary(
+ input: ParseStream,
+ attrs: Vec<Attribute>,
+ allow_struct: AllowStruct,
+ ) -> Result<ExprUnary> {
+ Ok(ExprUnary {
+ attrs,
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprClosure {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_struct = AllowStruct(true);
+ expr_closure(input, allow_struct)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprReference {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_struct = AllowStruct(true);
+ Ok(ExprReference {
+ attrs: Vec::new(),
+ and_token: input.parse()?,
+ mutability: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprBreak {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_struct = AllowStruct(true);
+ expr_break(input, allow_struct)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprReturn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprReturn {
+ attrs: Vec::new(),
+ return_token: input.parse()?,
+ expr: {
+ if can_begin_expr(input) {
+ Some(input.parse()?)
+ } else {
+ None
+ }
+ },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprTryBlock {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprTryBlock {
+ attrs: Vec::new(),
+ try_token: input.parse()?,
+ block: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprYield {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprYield {
+ attrs: Vec::new(),
+ yield_token: input.parse()?,
+ expr: {
+ if can_begin_expr(input) {
+ Some(input.parse()?)
+ } else {
+ None
+ }
+ },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_closure(input: ParseStream, allow_struct: AllowStruct) ->
Result<ExprClosure> {
+ let lifetimes: Option<BoundLifetimes> = input.parse()?;
+ let constness: Option<Token![const]> = input.parse()?;
+ let movability: Option<Token![static]> = input.parse()?;
+ let asyncness: Option<Token![async]> = input.parse()?;
+ let capture: Option<Token![move]> = input.parse()?;
+ let or1_token: Token![|] = input.parse()?;
+
+ let mut inputs = Punctuated::new();
+ loop {
+ if input.peek(Token![|]) {
+ break;
+ }
+ let value = closure_arg(input)?;
+ inputs.push_value(value);
+ if input.peek(Token![|]) {
+ break;
+ }
+ let punct: Token![,] = input.parse()?;
+ inputs.push_punct(punct);
+ }
+
+ let or2_token: Token![|] = input.parse()?;
+
+ let (output, body) = if input.peek(Token![->]) {
+ let arrow_token: Token![->] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let body: Block = input.parse()?;
+ let output = ReturnType::Type(arrow_token, Box::new(ty));
+ let block = Expr::Block(ExprBlock {
+ attrs: Vec::new(),
+ label: None,
+ block: body,
+ });
+ (output, block)
+ } else {
+ let body = ambiguous_expr(input, allow_struct)?;
+ (ReturnType::Default, body)
+ };
+
+ Ok(ExprClosure {
+ attrs: Vec::new(),
+ lifetimes,
+ constness,
+ movability,
+ asyncness,
+ capture,
+ or1_token,
+ inputs,
+ or2_token,
+ output,
+ body: Box::new(body),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprAsync {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprAsync {
+ attrs: Vec::new(),
+ async_token: input.parse()?,
+ capture: input.parse()?,
+ block: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn closure_arg(input: ParseStream) -> Result<Pat> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mut pat = Pat::parse_single(input)?;
+
+ if input.peek(Token![:]) {
+ Ok(Pat::Type(PatType {
+ attrs,
+ pat: Box::new(pat),
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ }))
+ } else {
+ match &mut pat {
+ Pat::Const(pat) => pat.attrs = attrs,
+ Pat::Ident(pat) => pat.attrs = attrs,
+ Pat::Lit(pat) => pat.attrs = attrs,
+ Pat::Macro(pat) => pat.attrs = attrs,
+ Pat::Or(pat) => pat.attrs = attrs,
+ Pat::Paren(pat) => pat.attrs = attrs,
+ Pat::Path(pat) => pat.attrs = attrs,
+ Pat::Range(pat) => pat.attrs = attrs,
+ Pat::Reference(pat) => pat.attrs = attrs,
+ Pat::Rest(pat) => pat.attrs = attrs,
+ Pat::Slice(pat) => pat.attrs = attrs,
+ Pat::Struct(pat) => pat.attrs = attrs,
+ Pat::Tuple(pat) => pat.attrs = attrs,
+ Pat::TupleStruct(pat) => pat.attrs = attrs,
+ Pat::Type(_) => unreachable!(),
+ Pat::Verbatim(_) => {}
+ Pat::Wild(pat) => pat.attrs = attrs,
+ }
+ Ok(pat)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+ attrs,
+ label,
+ while_token,
+ cond: Box::new(cond),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let const_token: Token![const] = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprConst {
+ attrs: inner_attrs,
+ const_token,
+ block: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Label {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(Label {
+ name: input.parse()?,
+ colon_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Option<Label> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Lifetime) {
+ input.parse().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprContinue {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ExprContinue {
+ attrs: Vec::new(),
+ continue_token: input.parse()?,
+ label: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_break(input: ParseStream, allow_struct: AllowStruct) ->
Result<ExprBreak> {
+ let break_token: Token![break] = input.parse()?;
+
+ let ahead = input.fork();
+ let label: Option<Lifetime> = ahead.parse()?;
+ if label.is_some() && ahead.peek(Token![:]) {
+ // Not allowed: `break 'label: loop {...}`
+ // Parentheses are required. `break ('label: loop {...})`
+ let _: Expr = input.parse()?;
+ let start_span = label.unwrap().apostrophe;
+ let end_span = input.cursor().prev_span();
+ return Err(crate::error::new2(
+ start_span,
+ end_span,
+ "parentheses required",
+ ));
+ }
+
+ input.advance_to(&ahead);
+ let expr = if can_begin_expr(input) && (allow_struct.0 ||
!input.peek(token::Brace)) {
+ Some(input.parse()?)
+ } else {
+ None
+ };
+
+ Ok(ExprBreak {
+ attrs: Vec::new(),
+ break_token,
+ label,
+ expr,
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) ||
!member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+ let value: Expr = input.parse()?;
+ (Some(colon_token), value)
+ } else if let Member::Named(ident) = &member {
+ let value = Expr::Path(ExprPath {
+ attrs: Vec::new(),
+ qself: None,
+ path: Path::from(ident.clone()),
+ });
+ (None, value)
+ } else {
+ unreachable!()
+ };
+
+ Ok(FieldValue {
+ attrs,
+ member,
+ colon_token,
+ expr: value,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprStruct {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let (qself, path) = path::parsing::qpath(input, true)?;
+ expr_struct_helper(input, qself, path)
+ }
+ }
+
+ fn expr_struct_helper(
+ input: ParseStream,
+ qself: Option<QSelf>,
+ path: Path,
+ ) -> Result<ExprStruct> {
+ let content;
+ let brace_token = braced!(content in input);
+
+ let mut fields = Punctuated::new();
+ while !content.is_empty() {
+ if content.peek(Token![..]) {
+ return Ok(ExprStruct {
+ attrs: Vec::new(),
+ qself,
+ path,
+ brace_token,
+ fields,
+ dot2_token: Some(content.parse()?),
+ rest: if content.is_empty() {
+ None
+ } else {
+ Some(Box::new(content.parse()?))
+ },
+ });
+ }
+
+ fields.push(content.parse()?);
+ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+ Ok(ExprStruct {
+ attrs: Vec::new(),
+ qself,
+ path,
+ brace_token,
+ fields,
+ dot2_token: None,
+ rest: None,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprUnsafe {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let unsafe_token: Token![unsafe] = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprUnsafe {
+ attrs: inner_attrs,
+ unsafe_token,
+ block: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprBlock {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprBlock {
+ attrs,
+ label,
+ block: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_range(input: ParseStream, allow_struct: AllowStruct) ->
Result<ExprRange> {
+ let limits: RangeLimits = input.parse()?;
+ let end = parse_range_end(input, &limits, allow_struct)?;
+ Ok(ExprRange {
+ attrs: Vec::new(),
+ start: None,
+ limits,
+ end,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ fn parse_range_end(
+ input: ParseStream,
+ limits: &RangeLimits,
+ allow_struct: AllowStruct,
+ ) -> Result<Option<Box<Expr>>> {
+ if matches!(limits, RangeLimits::HalfOpen(_))
+ && (input.is_empty()
+ || input.peek(Token![,])
+ || input.peek(Token![;])
+ || input.peek(Token![.]) && !input.peek(Token![..])
+ || !allow_struct.0 && input.peek(token::Brace))
+ {
+ Ok(None)
+ } else {
+ let end = parse_binop_rhs(input, allow_struct, Precedence::Range)?;
+ Ok(Some(end))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for RangeLimits {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let lookahead = input.lookahead1();
+ let dot_dot = lookahead.peek(Token![..]);
+ let dot_dot_eq = dot_dot && lookahead.peek(Token![..=]);
+ let dot_dot_dot = dot_dot && input.peek(Token![...]);
+ if dot_dot_eq {
+ input.parse().map(RangeLimits::Closed)
+ } else if dot_dot && !dot_dot_dot {
+ input.parse().map(RangeLimits::HalfOpen)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl RangeLimits {
+ pub(crate) fn parse_obsolete(input: ParseStream) -> Result<Self> {
+ let lookahead = input.lookahead1();
+ let dot_dot = lookahead.peek(Token![..]);
+ let dot_dot_eq = dot_dot && lookahead.peek(Token![..=]);
+ let dot_dot_dot = dot_dot && input.peek(Token![...]);
+ if dot_dot_eq {
+ input.parse().map(RangeLimits::Closed)
+ } else if dot_dot_dot {
+ let dot3: Token![...] = input.parse()?;
+ Ok(RangeLimits::Closed(Token![..=](dot3.spans)))
+ } else if dot_dot {
+ input.parse().map(RangeLimits::HalfOpen)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ExprPath {
+ fn parse(input: ParseStream) -> Result<Self> {
+ #[cfg(not(feature = "full"))]
+ let attrs = Vec::new();
+ #[cfg(feature = "full")]
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ Ok(ExprPath { attrs, qself, path })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Member {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Ident) {
+ input.parse().map(Member::Named)
+ } else if input.peek(LitInt) {
+ input.parse().map(Member::Unnamed)
+ } else {
+ Err(input.error("expected identifier or integer"))
+ }
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Arm {
+ fn parse(input: ParseStream) -> Result<Arm> {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+ pat: Pat::parse_multi_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+ let guard: Expr = input.parse()?;
+ Some((if_token, Box::new(guard)))
+ } else {
+ None
+ }
+ },
+ fat_arrow_token: input.parse()?,
+ body: {
+ let body = Expr::parse_with_earlier_boundary_rule(input)?;
+ requires_comma =
classify::requires_comma_to_be_match_arm(&body);
+ Box::new(body)
+ },
+ comma: {
+ if requires_comma && !input.is_empty() {
+ Some(input.parse()?)
+ } else {
+ input.parse()?
+ }
+ },
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Index {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let lit: LitInt = input.parse()?;
+ if lit.suffix().is_empty() {
+ Ok(Index {
+ index: lit
+ .base10_digits()
+ .parse()
+ .map_err(|err| Error::new(lit.span(), err))?,
+ span: lit.span(),
+ })
+ } else {
+ Err(Error::new(lit.span(), "expected unsuffixed integer"))
+ }
+ }
+ }
+
+ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat)
-> Result<bool> {
+ let float_token = float.token();
+ let float_span = float_token.span();
+ let mut float_repr = float_token.to_string();
+ let trailing_dot = float_repr.ends_with('.');
+ if trailing_dot {
+ float_repr.truncate(float_repr.len() - 1);
+ }
+
+ let mut offset = 0;
+ for part in float_repr.split('.') {
+ let mut index: Index =
+ crate::parse_str(part).map_err(|err| Error::new(float_span,
err))?;
+ let part_end = offset + part.len();
+ index.span =
float_token.subspan(offset..part_end).unwrap_or(float_span);
+
+ let base = mem::replace(e, Expr::PLACEHOLDER);
+ *e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(base),
+ dot_token: Token![.](dot_token.span),
+ member: Member::Unnamed(index),
+ });
+
+ let dot_span = float_token
+ .subspan(part_end..part_end + 1)
+ .unwrap_or(float_span);
+ *dot_token = Token![.](dot_span);
+ offset = part_end + 1;
+ }
+
+ Ok(!trailing_dot)
+ }
+
+ impl Member {
+ pub(crate) fn is_named(&self) -> bool {
+ match self {
+ Member::Named(_) => true,
+ Member::Unnamed(_) => false,
+ }
+ }
+ }
+
+ fn check_cast(input: ParseStream) -> Result<()> {
+ let kind = if input.peek(Token![.]) && !input.peek(Token![..]) {
+ if input.peek2(Token![await]) {
+ "`.await`"
+ } else if input.peek2(Ident) && (input.peek3(token::Paren) ||
input.peek3(Token![::])) {
+ "a method call"
+ } else {
+ "a field access"
+ }
+ } else if input.peek(Token![?]) {
+ "`?`"
+ } else if input.peek(token::Bracket) {
+ "indexing"
+ } else if input.peek(token::Paren) {
+ "a function call"
+ } else {
+ return Ok(());
+ };
+ let msg = format!("casts cannot be followed by {}", kind);
+ Err(input.error(msg))
+ }
+}
+
+#[cfg(feature = "printing")]
+pub(crate) mod printing {
+ use crate::attr::Attribute;
+ #[cfg(feature = "full")]
+ use crate::attr::FilterAttrs;
+ use crate::classify;
+ #[cfg(feature = "full")]
+ use crate::expr::{
+ Arm, ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock,
ExprBreak, ExprClosure,
+ ExprConst, ExprContinue, ExprForLoop, ExprIf, ExprInfer, ExprLet,
ExprLoop, ExprMatch,
+ ExprRange, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprTuple,
ExprUnsafe, ExprWhile,
+ ExprYield, Label, RangeLimits,
+ };
+ use crate::expr::{
+ Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprGroup, ExprIndex,
ExprLit, ExprMacro,
+ ExprMethodCall, ExprParen, ExprPath, ExprReference, ExprStruct,
ExprUnary, FieldValue,
+ Index, Member,
+ };
+ #[cfg(feature = "full")]
+ use crate::fixup::FixupContext;
+ use crate::op::BinOp;
+ use crate::path;
+ use crate::precedence::Precedence;
+ use crate::token;
+ #[cfg(feature = "full")]
+ use crate::ty::ReturnType;
+ use proc_macro2::{Literal, Span, TokenStream};
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg(feature = "full")]
+ pub(crate) fn outer_attrs_to_tokens(attrs: &[Attribute], tokens: &mut
TokenStream) {
+ tokens.append_all(attrs.outer());
+ }
+
+ #[cfg(feature = "full")]
+ fn inner_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) {
+ tokens.append_all(attrs.inner());
+ }
+
+ #[cfg(not(feature = "full"))]
+ pub(crate) fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut
TokenStream) {}
+
+ #[cfg(feature = "full")]
+ fn print_condition(expr: &Expr, tokens: &mut TokenStream) {
+ print_subexpression(
+ expr,
+ classify::confusable_with_adjacent_block(expr),
+ tokens,
+ FixupContext::new_condition(),
+ );
+ }
+
+ fn print_subexpression(
+ expr: &Expr,
+ needs_group: bool,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] mut fixup: FixupContext,
+ ) {
+ #[cfg(not(feature = "full"))]
+ let do_print_expr = |tokens: &mut TokenStream| expr.to_tokens(tokens);
+
+ #[cfg(feature = "full")]
+ let do_print_expr = {
+ // If we are surrounding the whole cond in parentheses, such as:
+ //
+ // if (return Struct {}) {}
+ //
+ // then there is no need for parenthesizing the individual struct
+ // expressions within. On the other hand if the whole cond is not
+ // parenthesized, then print_expr must parenthesize exterior struct
+ // literals.
+ //
+ // if x == (Struct {}) {}
+ //
+ if needs_group {
+ fixup = FixupContext::NONE;
+ }
+ |tokens: &mut TokenStream| print_expr(expr, tokens, fixup)
+ };
+
+ if needs_group {
+ token::Paren::default().surround(tokens, do_print_expr);
+ } else {
+ do_print_expr(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ pub(crate) fn print_expr(expr: &Expr, tokens: &mut TokenStream, mut fixup:
FixupContext) {
+ let needs_group = fixup.would_cause_statement_boundary(expr);
+ if needs_group {
+ fixup = FixupContext::NONE;
+ }
+
+ let do_print_expr = |tokens: &mut TokenStream| match expr {
+ Expr::Array(e) => e.to_tokens(tokens),
+ Expr::Assign(e) => print_expr_assign(e, tokens, fixup),
+ Expr::Async(e) => e.to_tokens(tokens),
+ Expr::Await(e) => print_expr_await(e, tokens, fixup),
+ Expr::Binary(e) => print_expr_binary(e, tokens, fixup),
+ Expr::Block(e) => e.to_tokens(tokens),
+ Expr::Break(e) => print_expr_break(e, tokens, fixup),
+ Expr::Call(e) => print_expr_call(e, tokens, fixup),
+ Expr::Cast(e) => print_expr_cast(e, tokens, fixup),
+ Expr::Closure(e) => e.to_tokens(tokens),
+ Expr::Const(e) => e.to_tokens(tokens),
+ Expr::Continue(e) => e.to_tokens(tokens),
+ Expr::Field(e) => print_expr_field(e, tokens, fixup),
+ Expr::ForLoop(e) => e.to_tokens(tokens),
+ Expr::Group(e) => e.to_tokens(tokens),
+ Expr::If(e) => e.to_tokens(tokens),
+ Expr::Index(e) => print_expr_index(e, tokens, fixup),
+ Expr::Infer(e) => e.to_tokens(tokens),
+ Expr::Let(e) => print_expr_let(e, tokens, fixup),
+ Expr::Lit(e) => e.to_tokens(tokens),
+ Expr::Loop(e) => e.to_tokens(tokens),
+ Expr::Macro(e) => e.to_tokens(tokens),
+ Expr::Match(e) => e.to_tokens(tokens),
+ Expr::MethodCall(e) => print_expr_method_call(e, tokens, fixup),
+ Expr::Paren(e) => e.to_tokens(tokens),
+ Expr::Path(e) => e.to_tokens(tokens),
+ Expr::Range(e) => print_expr_range(e, tokens, fixup),
+ Expr::Reference(e) => print_expr_reference(e, tokens, fixup),
+ Expr::Repeat(e) => e.to_tokens(tokens),
+ Expr::Return(e) => print_expr_return(e, tokens, fixup),
+ Expr::Struct(e) => e.to_tokens(tokens),
+ Expr::Try(e) => print_expr_try(e, tokens, fixup),
+ Expr::TryBlock(e) => e.to_tokens(tokens),
+ Expr::Tuple(e) => e.to_tokens(tokens),
+ Expr::Unary(e) => print_expr_unary(e, tokens, fixup),
+ Expr::Unsafe(e) => e.to_tokens(tokens),
+ Expr::Verbatim(e) => e.to_tokens(tokens),
+ Expr::While(e) => e.to_tokens(tokens),
+ Expr::Yield(e) => print_expr_yield(e, tokens, fixup),
+ };
+
+ if needs_group {
+ token::Paren::default().surround(tokens, do_print_expr);
+ } else {
+ do_print_expr(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprArray {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprAssign {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_assign(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_assign(e: &ExprAssign, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.left,
+ Precedence::of(&e.left) <= Precedence::Assign,
+ tokens,
+ fixup.leftmost_subexpression(),
+ );
+ e.eq_token.to_tokens(tokens);
+ print_subexpression(
+ &e.right,
+ Precedence::of_rhs(&e.right) < Precedence::Assign,
+ tokens,
+ fixup.subsequent_subexpression(),
+ );
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprAsync {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.async_token.to_tokens(tokens);
+ self.capture.to_tokens(tokens);
+ self.block.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprAwait {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_await(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_await(e: &ExprAwait, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.base,
+ Precedence::of(&e.base) < Precedence::Postfix,
+ tokens,
+ fixup.leftmost_subexpression_with_dot(),
+ );
+ e.dot_token.to_tokens(tokens);
+ e.await_token.to_tokens(tokens);
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprBinary {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_binary(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_binary(
+ e: &ExprBinary,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+
+ let binop_prec = Precedence::of_binop(&e.op);
+ let left_prec = Precedence::of(&e.left);
+ let right_prec = Precedence::of_rhs(&e.right);
+ let (mut left_needs_group, right_needs_group) = if let
Precedence::Assign = binop_prec {
+ (left_prec <= binop_prec, right_prec < binop_prec)
+ } else {
+ (left_prec < binop_prec, right_prec <= binop_prec)
+ };
+
+ // These cases require parenthesization independently of precedence.
+ match (&*e.left, &e.op) {
+ // `x as i32 < y` has the parser thinking that `i32 < y` is the
+ // beginning of a path type. It starts trying to parse `x as (i32 <
+ // y ...` instead of `(x as i32) < ...`. We need to convince it
+ // _not_ to do that.
+ (_, BinOp::Lt(_) | BinOp::Shl(_)) if
classify::confusable_with_adjacent_lt(&e.left) => {
+ left_needs_group = true;
+ }
+
+ // We are given `(let _ = a) OP b`.
+ //
+ // - When `OP <= LAnd` we should print `let _ = a OP b` to avoid
+ // redundant parens as the parser will interpret this as `(let _
=
+ // a) OP b`.
+ //
+ // - Otherwise, e.g. when we have `(let a = b) < c` in AST, parens
+ // are required since the parser would interpret `let a = b < c`
+ // as `let a = (b < c)`. To achieve this, we force parens.
+ #[cfg(feature = "full")]
+ (Expr::Let(_), _) if binop_prec > Precedence::And => {
+ left_needs_group = true;
+ }
+
+ _ => {}
+ }
+
+ print_subexpression(
+ &e.left,
+ left_needs_group,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression(),
+ );
+ e.op.to_tokens(tokens);
+ print_subexpression(
+ &e.right,
+ right_needs_group,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.subsequent_subexpression(),
+ );
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprBlock {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.label.to_tokens(tokens);
+ self.block.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprBreak {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_break(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_break(e: &ExprBreak, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.break_token.to_tokens(tokens);
+ e.label.to_tokens(tokens);
+ if let Some(expr) = &e.expr {
+ print_expr(expr, tokens, fixup.subsequent_subexpression());
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprCall {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_call(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_call(
+ e: &ExprCall,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+
+ let precedence = if let Expr::Field(_) = &*e.func {
+ Precedence::Any
+ } else {
+ Precedence::Postfix
+ };
+ print_subexpression(
+ &e.func,
+ Precedence::of(&e.func) < precedence,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression(),
+ );
+
+ e.paren_token.surround(tokens, |tokens| {
+ e.args.to_tokens(tokens);
+ });
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprCast {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_cast(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_cast(
+ e: &ExprCast,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.expr,
+ Precedence::of(&e.expr) < Precedence::Cast,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression(),
+ );
+ e.as_token.to_tokens(tokens);
+ e.ty.to_tokens(tokens);
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprClosure {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.lifetimes.to_tokens(tokens);
+ self.constness.to_tokens(tokens);
+ self.movability.to_tokens(tokens);
+ self.asyncness.to_tokens(tokens);
+ self.capture.to_tokens(tokens);
+ self.or1_token.to_tokens(tokens);
+ self.inputs.to_tokens(tokens);
+ self.or2_token.to_tokens(tokens);
+ self.output.to_tokens(tokens);
+ if matches!(self.output, ReturnType::Default) ||
matches!(*self.body, Expr::Block(_)) {
+ self.body.to_tokens(tokens);
+ } else {
+ token::Brace::default().surround(tokens, |tokens| {
+ print_expr(&self.body, tokens, FixupContext::new_stmt());
+ });
+ }
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprConst {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.const_token.to_tokens(tokens);
+ self.block.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprContinue {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.continue_token.to_tokens(tokens);
+ self.label.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprField {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_field(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_field(
+ e: &ExprField,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.base,
+ Precedence::of(&e.base) < Precedence::Postfix,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression_with_dot(),
+ );
+ e.dot_token.to_tokens(tokens);
+ e.member.to_tokens(tokens);
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprForLoop {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.label.to_tokens(tokens);
+ self.for_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ self.in_token.to_tokens(tokens);
+ print_condition(&self.expr, tokens);
+ self.body.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.body.stmts);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprGroup {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.group_token.surround(tokens, |tokens| {
+ self.expr.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprIf {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+
+ let mut expr = self;
+ loop {
+ expr.if_token.to_tokens(tokens);
+ print_condition(&expr.cond, tokens);
+ expr.then_branch.to_tokens(tokens);
+
+ let (else_token, else_) = match &expr.else_branch {
+ Some(else_branch) => else_branch,
+ None => break,
+ };
+
+ else_token.to_tokens(tokens);
+ match &**else_ {
+ Expr::If(next) => {
+ expr = next;
+ }
+ Expr::Block(last) => {
+ last.to_tokens(tokens);
+ break;
+ }
+ // If this is not one of the valid expressions to exist in
+ // an else clause, wrap it in a block.
+ other => {
+ token::Brace::default().surround(tokens, |tokens| {
+ print_expr(other, tokens,
FixupContext::new_stmt());
+ });
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprIndex {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_index(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_index(
+ e: &ExprIndex,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.expr,
+ Precedence::of(&e.expr) < Precedence::Postfix,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression(),
+ );
+ e.bracket_token.surround(tokens, |tokens| {
+ e.index.to_tokens(tokens);
+ });
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprInfer {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprLet {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_let(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_let(e: &ExprLet, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.let_token.to_tokens(tokens);
+ e.pat.to_tokens(tokens);
+ e.eq_token.to_tokens(tokens);
+ print_subexpression(
+ &e.expr,
+ fixup.needs_group_as_let_scrutinee(&e.expr),
+ tokens,
+ FixupContext::NONE,
+ );
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.lit.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprLoop {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.label.to_tokens(tokens);
+ self.loop_token.to_tokens(tokens);
+ self.body.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.body.stmts);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprMatch {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.match_token.to_tokens(tokens);
+ print_condition(&self.expr, tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ for (i, arm) in self.arms.iter().enumerate() {
+ arm.to_tokens(tokens);
+ // Ensure that we have a comma after a non-block arm,
except
+ // for the last one.
+ let is_last = i == self.arms.len() - 1;
+ if !is_last
+ && classify::requires_comma_to_be_match_arm(&arm.body)
+ && arm.comma.is_none()
+ {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ }
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprMethodCall {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_method_call(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_method_call(
+ e: &ExprMethodCall,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.receiver,
+ Precedence::of(&e.receiver) < Precedence::Postfix,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.leftmost_subexpression_with_dot(),
+ );
+ e.dot_token.to_tokens(tokens);
+ e.method.to_tokens(tokens);
+ e.turbofish.to_tokens(tokens);
+ e.paren_token.surround(tokens, |tokens| {
+ e.args.to_tokens(tokens);
+ });
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprParen {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.paren_token.surround(tokens, |tokens| {
+ self.expr.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ path::printing::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_range(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_range(e: &ExprRange, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ if let Some(start) = &e.start {
+ print_subexpression(
+ start,
+ Precedence::of(start) <= Precedence::Range,
+ tokens,
+ fixup.leftmost_subexpression(),
+ );
+ }
+ e.limits.to_tokens(tokens);
+ if let Some(end) = &e.end {
+ print_subexpression(
+ end,
+ Precedence::of_rhs(end) <= Precedence::Range,
+ tokens,
+ fixup.subsequent_subexpression(),
+ );
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_reference(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_reference(
+ e: &ExprReference,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.and_token.to_tokens(tokens);
+ e.mutability.to_tokens(tokens);
+ print_subexpression(
+ &e.expr,
+ Precedence::of_rhs(&e.expr) < Precedence::Prefix,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.subsequent_subexpression(),
+ );
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprRepeat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.bracket_token.surround(tokens, |tokens| {
+ self.expr.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ self.len.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprReturn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_return(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_return(e: &ExprReturn, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.return_token.to_tokens(tokens);
+ if let Some(expr) = &e.expr {
+ print_expr(expr, tokens, fixup.subsequent_subexpression());
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ path::printing::print_path(tokens, &self.qself, &self.path);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+ if let Some(dot2_token) = &self.dot2_token {
+ dot2_token.to_tokens(tokens);
+ } else if self.rest.is_some() {
+ Token![..](Span::call_site()).to_tokens(tokens);
+ }
+ self.rest.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprTry {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_try(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_try(e: &ExprTry, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ print_subexpression(
+ &e.expr,
+ Precedence::of(&e.expr) < Precedence::Postfix,
+ tokens,
+ fixup.leftmost_subexpression_with_dot(),
+ );
+ e.question_token.to_tokens(tokens);
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprTryBlock {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.try_token.to_tokens(tokens);
+ self.block.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ // If we only have one argument, we need a trailing comma to
+ // distinguish ExprTuple from ExprParen.
+ if self.elems.len() == 1 && !self.elems.trailing_punct() {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprUnary {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_unary(
+ self,
+ tokens,
+ #[cfg(feature = "full")]
+ FixupContext::NONE,
+ );
+ }
+ }
+
+ fn print_expr_unary(
+ e: &ExprUnary,
+ tokens: &mut TokenStream,
+ #[cfg(feature = "full")] fixup: FixupContext,
+ ) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.op.to_tokens(tokens);
+ print_subexpression(
+ &e.expr,
+ Precedence::of_rhs(&e.expr) < Precedence::Prefix,
+ tokens,
+ #[cfg(feature = "full")]
+ fixup.subsequent_subexpression(),
+ );
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprUnsafe {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.unsafe_token.to_tokens(tokens);
+ self.block.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprWhile {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.label.to_tokens(tokens);
+ self.while_token.to_tokens(tokens);
+ print_condition(&self.cond, tokens);
+ self.body.brace_token.surround(tokens, |tokens| {
+ inner_attrs_to_tokens(&self.attrs, tokens);
+ tokens.append_all(&self.body.stmts);
+ });
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ExprYield {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ print_expr_yield(self, tokens, FixupContext::NONE);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn print_expr_yield(e: &ExprYield, tokens: &mut TokenStream, fixup:
FixupContext) {
+ outer_attrs_to_tokens(&e.attrs, tokens);
+ e.yield_token.to_tokens(tokens);
+ if let Some(expr) = &e.expr {
+ print_expr(expr, tokens, fixup.subsequent_subexpression());
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Arm {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(&self.attrs);
+ self.pat.to_tokens(tokens);
+ if let Some((if_token, guard)) = &self.guard {
+ if_token.to_tokens(tokens);
+ guard.to_tokens(tokens);
+ }
+ self.fat_arrow_token.to_tokens(tokens);
+ print_expr(&self.body, tokens, FixupContext::new_match_arm());
+ self.comma.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for FieldValue {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ outer_attrs_to_tokens(&self.attrs, tokens);
+ self.member.to_tokens(tokens);
+ if let Some(colon_token) = &self.colon_token {
+ colon_token.to_tokens(tokens);
+ self.expr.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Index {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let mut lit = Literal::i64_unsuffixed(i64::from(self.index));
+ lit.set_span(self.span);
+ tokens.append(lit);
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Label {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.name.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Member {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ Member::Named(ident) => ident.to_tokens(tokens),
+ Member::Unnamed(index) => index.to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for RangeLimits {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+ RangeLimits::Closed(t) => t.to_tokens(tokens),
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/ext.rs
b/rust/hw/char/pl011/vendor/syn/src/ext.rs
new file mode 100644
index 0000000000..5cd79e863a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/ext.rs
@@ -0,0 +1,136 @@
+//! Extension traits to provide parsing methods on foreign types.
+
+use crate::buffer::Cursor;
+use crate::error::Result;
+use crate::parse::ParseStream;
+use crate::parse::Peek;
+use crate::sealed::lookahead;
+use crate::token::CustomToken;
+use proc_macro2::Ident;
+
+/// Additional methods for `Ident` not provided by proc-macro2 or
libproc_macro.
+///
+/// This trait is sealed and cannot be implemented for types outside of Syn. It
+/// is implemented only for `proc_macro2::Ident`.
+pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+ /// This is useful when parsing macro input which allows Rust keywords as
+ /// identifiers.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Error, Ident, Result, Token};
+ /// use syn::ext::IdentExt;
+ /// use syn::parse::ParseStream;
+ ///
+ /// mod kw {
+ /// syn::custom_keyword!(name);
+ /// }
+ ///
+ /// // Parses input that looks like `name = NAME` where `NAME` can be
+ /// // any identifier.
+ /// //
+ /// // Examples:
+ /// //
+ /// // name = anything
+ /// // name = impl
+ /// fn parse_dsl(input: ParseStream) -> Result<Ident> {
+ /// input.parse::<kw::name>()?;
+ /// input.parse::<Token![=]>()?;
+ /// let name = input.call(Ident::parse_any)?;
+ /// Ok(name)
+ /// }
+ /// ```
+ fn parse_any(input: ParseStream) -> Result<Self>;
+
+ /// Peeks any identifier including keywords. Usage:
+ /// `input.peek(Ident::peek_any)`
+ ///
+ /// This is different from `input.peek(Ident)` which only returns true in
+ /// the case of an ident which is not a Rust keyword.
+ #[allow(non_upper_case_globals)]
+ const peek_any: private::PeekFn = private::PeekFn;
+
+ /// Strips the raw marker `r#`, if any, from the beginning of an ident.
+ ///
+ /// - unraw(`x`) = `x`
+ /// - unraw(`move`) = `move`
+ /// - unraw(`r#move`) = `move`
+ ///
+ /// # Example
+ ///
+ /// In the case of interop with other languages like Python that have a
+ /// different set of keywords than Rust, we might come across macro input
+ /// that involves raw identifiers to refer to ordinary variables in the
+ /// other language with a name that happens to be a Rust keyword.
+ ///
+ /// The function below appends an identifier from the caller's input onto a
+ /// fixed prefix. Without using `unraw()`, this would tend to produce
+ /// invalid identifiers like `__pyo3_get_r#move`.
+ ///
+ /// ```
+ /// use proc_macro2::Span;
+ /// use syn::Ident;
+ /// use syn::ext::IdentExt;
+ ///
+ /// fn ident_for_getter(variable: &Ident) -> Ident {
+ /// let getter = format!("__pyo3_get_{}", variable.unraw());
+ /// Ident::new(&getter, Span::call_site())
+ /// }
+ /// ```
+ fn unraw(&self) -> Ident;
+}
+
+impl IdentExt for Ident {
+ fn parse_any(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| match cursor.ident() {
+ Some((ident, rest)) => Ok((ident, rest)),
+ None => Err(cursor.error("expected ident")),
+ })
+ }
+
+ fn unraw(&self) -> Ident {
+ let string = self.to_string();
+ if let Some(string) = string.strip_prefix("r#") {
+ Ident::new(string, self.span())
+ } else {
+ self.clone()
+ }
+ }
+}
+
+impl Peek for private::PeekFn {
+ type Token = private::IdentAny;
+}
+
+impl CustomToken for private::IdentAny {
+ fn peek(cursor: Cursor) -> bool {
+ cursor.ident().is_some()
+ }
+
+ fn display() -> &'static str {
+ "identifier"
+ }
+}
+
+impl lookahead::Sealed for private::PeekFn {}
+
+mod private {
+ use proc_macro2::Ident;
+
+ pub trait Sealed {}
+
+ impl Sealed for Ident {}
+
+ pub struct PeekFn;
+ pub struct IdentAny;
+
+ impl Copy for PeekFn {}
+ impl Clone for PeekFn {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/file.rs
b/rust/hw/char/pl011/vendor/syn/src/file.rs
new file mode 100644
index 0000000000..681125e137
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/file.rs
@@ -0,0 +1,130 @@
+use crate::attr::Attribute;
+use crate::item::Item;
+
+ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+ /// Typically `File` objects are created with [`parse_file`].
+ ///
+ /// [`parse_file`]: crate::parse_file
+ ///
+ /// # Example
+ ///
+ /// Parse a Rust source file into a `syn::File` and print out a debug
+ /// representation of the syntax tree.
+ ///
+ /// ```
+ /// use std::env;
+ /// use std::fs::File;
+ /// use std::io::Read;
+ /// use std::process;
+ ///
+ /// fn main() {
+ /// # }
+ /// #
+ /// # fn fake_main() {
+ /// let mut args = env::args();
+ /// let _ = args.next(); // executable name
+ ///
+ /// let filename = match (args.next(), args.next()) {
+ /// (Some(filename), None) => filename,
+ /// _ => {
+ /// eprintln!("Usage: dump-syntax path/to/filename.rs");
+ /// process::exit(1);
+ /// }
+ /// };
+ ///
+ /// let mut file = File::open(&filename).expect("unable to open file");
+ ///
+ /// let mut src = String::new();
+ /// file.read_to_string(&mut src).expect("unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("unable to parse file");
+ ///
+ /// // Debug impl is available if Syn is built with "extra-traits"
feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+ ///
+ /// Running with its own source code as input, this program prints output
+ /// that begins with:
+ ///
+ /// ```text
+ /// File {
+ /// shebang: None,
+ /// attrs: [],
+ /// items: [
+ /// Use(
+ /// ItemUse {
+ /// attrs: [],
+ /// vis: Inherited,
+ /// use_token: Use,
+ /// leading_colon: None,
+ /// tree: Path(
+ /// UsePath {
+ /// ident: Ident(
+ /// std,
+ /// ),
+ /// colon2_token: Colon2,
+ /// tree: Name(
+ /// UseName {
+ /// ident: Ident(
+ /// env,
+ /// ),
+ /// },
+ /// ),
+ /// },
+ /// ),
+ /// semi_token: Semi,
+ /// },
+ /// ),
+ /// ...
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct File {
+ pub shebang: Option<String>,
+ pub attrs: Vec<Attribute>,
+ pub items: Vec<Item>,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::error::Result;
+ use crate::file::File;
+ use crate::parse::{Parse, ParseStream};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for File {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(File {
+ shebang: None,
+ attrs: input.call(Attribute::parse_inner)?,
+ items: {
+ let mut items = Vec::new();
+ while !input.is_empty() {
+ items.push(input.parse()?);
+ }
+ items
+ },
+ })
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::FilterAttrs;
+ use crate::file::File;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for File {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.items);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/fixup.rs
b/rust/hw/char/pl011/vendor/syn/src/fixup.rs
new file mode 100644
index 0000000000..5407c9fdf3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/fixup.rs
@@ -0,0 +1,218 @@
+use crate::classify;
+use crate::expr::Expr;
+use crate::precedence::Precedence;
+
+pub(crate) struct FixupContext {
+ // Print expression such that it can be parsed back as a statement
+ // consisting of the original expression.
+ //
+ // The effect of this is for binary operators in statement position to set
+ // `leftmost_subexpression_in_stmt` when printing their left-hand operand.
+ //
+ // (match x {}) - 1; // match needs parens when LHS of binary operator
+ //
+ // match x {}; // not when its own statement
+ //
+ stmt: bool,
+
+ // This is the difference between:
+ //
+ // (match x {}) - 1; // subexpression needs parens
+ //
+ // let _ = match x {} - 1; // no parens
+ //
+ // There are 3 distinguishable contexts in which `print_expr` might be
+ // called with the expression `$match` as its argument, where `$match`
+ // represents an expression of kind `ExprKind::Match`:
+ //
+ // - stmt=false leftmost_subexpression_in_stmt=false
+ //
+ // Example: `let _ = $match - 1;`
+ //
+ // No parentheses required.
+ //
+ // - stmt=false leftmost_subexpression_in_stmt=true
+ //
+ // Example: `$match - 1;`
+ //
+ // Must parenthesize `($match)`, otherwise parsing back the output as a
+ // statement would terminate the statement after the closing brace of
+ // the match, parsing `-1;` as a separate statement.
+ //
+ // - stmt=true leftmost_subexpression_in_stmt=false
+ //
+ // Example: `$match;`
+ //
+ // No parentheses required.
+ leftmost_subexpression_in_stmt: bool,
+
+ // Print expression such that it can be parsed as a match arm.
+ //
+ // This is almost equivalent to `stmt`, but the grammar diverges a tiny bit
+ // between statements and match arms when it comes to braced macro calls.
+ // Macro calls with brace delimiter terminate a statement without a
+ // semicolon, but do not terminate a match-arm without comma.
+ //
+ // m! {} - 1; // two statements: a macro call followed by -1 literal
+ //
+ // match () {
+ // _ => m! {} - 1, // binary subtraction operator
+ // }
+ //
+ match_arm: bool,
+
+ // This is almost equivalent to `leftmost_subexpression_in_stmt`, other
than
+ // for braced macro calls.
+ //
+ // If we have `m! {} - 1` as an expression, the leftmost subexpression
+ // `m! {}` will need to be parenthesized in the statement case but not the
+ // match-arm case.
+ //
+ // (m! {}) - 1; // subexpression needs parens
+ //
+ // match () {
+ // _ => m! {} - 1, // no parens
+ // }
+ //
+ leftmost_subexpression_in_match_arm: bool,
+
+ // This is the difference between:
+ //
+ // if let _ = (Struct {}) {} // needs parens
+ //
+ // match () {
+ // () if let _ = Struct {} => {} // no parens
+ // }
+ //
+ parenthesize_exterior_struct_lit: bool,
+}
+
+impl FixupContext {
+ /// The default amount of fixing is minimal fixing. Fixups should be turned
+ /// on in a targeted fashion where needed.
+ pub const NONE: Self = FixupContext {
+ stmt: false,
+ leftmost_subexpression_in_stmt: false,
+ match_arm: false,
+ leftmost_subexpression_in_match_arm: false,
+ parenthesize_exterior_struct_lit: false,
+ };
+
+ /// Create the initial fixup for printing an expression in statement
+ /// position.
+ pub fn new_stmt() -> Self {
+ FixupContext {
+ stmt: true,
+ ..FixupContext::NONE
+ }
+ }
+
+ /// Create the initial fixup for printing an expression as the right-hand
+ /// side of a match arm.
+ pub fn new_match_arm() -> Self {
+ FixupContext {
+ match_arm: true,
+ ..FixupContext::NONE
+ }
+ }
+
+ /// Create the initial fixup for printing an expression as the "condition"
+ /// of an `if` or `while`. There are a few other positions which are
+ /// grammatically equivalent and also use this, such as the iterator
+ /// expression in `for` and the scrutinee in `match`.
+ pub fn new_condition() -> Self {
+ FixupContext {
+ parenthesize_exterior_struct_lit: true,
+ ..FixupContext::NONE
+ }
+ }
+
+ /// Transform this fixup into the one that should apply when printing the
+ /// leftmost subexpression of the current expression.
+ ///
+ /// The leftmost subexpression is any subexpression that has the same first
+ /// token as the current expression, but has a different last token.
+ ///
+ /// For example in `$a + $b` and `$a.method()`, the subexpression `$a` is a
+ /// leftmost subexpression.
+ ///
+ /// Not every expression has a leftmost subexpression. For example neither
+ /// `-$a` nor `[$a]` have one.
+ pub fn leftmost_subexpression(self) -> Self {
+ FixupContext {
+ stmt: false,
+ leftmost_subexpression_in_stmt: self.stmt ||
self.leftmost_subexpression_in_stmt,
+ match_arm: false,
+ leftmost_subexpression_in_match_arm: self.match_arm
+ || self.leftmost_subexpression_in_match_arm,
+ ..self
+ }
+ }
+
+ /// Transform this fixup into the one that should apply when printing a
+ /// leftmost subexpression followed by a `.` or `?` token, which confer
+ /// different statement boundary rules compared to other leftmost
+ /// subexpressions.
+ pub fn leftmost_subexpression_with_dot(self) -> Self {
+ FixupContext {
+ stmt: self.stmt || self.leftmost_subexpression_in_stmt,
+ leftmost_subexpression_in_stmt: false,
+ match_arm: self.match_arm ||
self.leftmost_subexpression_in_match_arm,
+ leftmost_subexpression_in_match_arm: false,
+ ..self
+ }
+ }
+
+ /// Transform this fixup into the one that should apply when printing any
+ /// subexpression that is neither a leftmost subexpression nor surrounded
in
+ /// delimiters.
+ ///
+ /// This is for any subexpression that has a different first token than the
+ /// current expression, and is not surrounded by a paren/bracket/brace. For
+ /// example the `$b` in `$a + $b` and `-$b`, but not the one in `[$b]` or
+ /// `$a.f($b)`.
+ pub fn subsequent_subexpression(self) -> Self {
+ FixupContext {
+ stmt: false,
+ leftmost_subexpression_in_stmt: false,
+ match_arm: false,
+ leftmost_subexpression_in_match_arm: false,
+ ..self
+ }
+ }
+
+ /// Determine whether parentheses are needed around the given expression to
+ /// head off an unintended statement boundary.
+ ///
+ /// The documentation on `FixupContext::leftmost_subexpression_in_stmt` has
+ /// examples.
+ pub fn would_cause_statement_boundary(self, expr: &Expr) -> bool {
+ (self.leftmost_subexpression_in_stmt &&
!classify::requires_semi_to_be_stmt(expr))
+ || (self.leftmost_subexpression_in_match_arm
+ && !classify::requires_comma_to_be_match_arm(expr))
+ }
+
+ /// Determine whether parentheses are needed around the given `let`
+ /// scrutinee.
+ ///
+ /// In `if let _ = $e {}`, some examples of `$e` that would need
parentheses
+ /// are:
+ ///
+ /// - `Struct {}.f()`, because otherwise the `{` would be misinterpreted
+ /// as the opening of the if's then-block.
+ ///
+ /// - `true && false`, because otherwise this would be misinterpreted as
a
+ /// "let chain".
+ pub fn needs_group_as_let_scrutinee(self, expr: &Expr) -> bool {
+ self.parenthesize_exterior_struct_lit &&
classify::confusable_with_adjacent_block(expr)
+ || Precedence::of_rhs(expr) <= Precedence::And
+ }
+}
+
+impl Copy for FixupContext {}
+
+impl Clone for FixupContext {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/clone.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/clone.rs
new file mode 100644
index 0000000000..de8d574516
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/clone.rs
@@ -0,0 +1,2209 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Abi {
+ fn clone(&self) -> Self {
+ crate::Abi {
+ extern_token: self.extern_token.clone(),
+ name: self.name.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::AngleBracketedGenericArguments {
+ fn clone(&self) -> Self {
+ crate::AngleBracketedGenericArguments {
+ colon2_token: self.colon2_token.clone(),
+ lt_token: self.lt_token.clone(),
+ args: self.args.clone(),
+ gt_token: self.gt_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Arm {
+ fn clone(&self) -> Self {
+ crate::Arm {
+ attrs: self.attrs.clone(),
+ pat: self.pat.clone(),
+ guard: self.guard.clone(),
+ fat_arrow_token: self.fat_arrow_token.clone(),
+ body: self.body.clone(),
+ comma: self.comma.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::AssocConst {
+ fn clone(&self) -> Self {
+ crate::AssocConst {
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ eq_token: self.eq_token.clone(),
+ value: self.value.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::AssocType {
+ fn clone(&self) -> Self {
+ crate::AssocType {
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ eq_token: self.eq_token.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for crate::AttrStyle {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::AttrStyle {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Attribute {
+ fn clone(&self) -> Self {
+ crate::Attribute {
+ pound_token: self.pound_token.clone(),
+ style: self.style.clone(),
+ bracket_token: self.bracket_token.clone(),
+ meta: self.meta.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::BareFnArg {
+ fn clone(&self) -> Self {
+ crate::BareFnArg {
+ attrs: self.attrs.clone(),
+ name: self.name.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::BareVariadic {
+ fn clone(&self) -> Self {
+ crate::BareVariadic {
+ attrs: self.attrs.clone(),
+ name: self.name.clone(),
+ dots: self.dots.clone(),
+ comma: self.comma.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for crate::BinOp {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::BinOp {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Block {
+ fn clone(&self) -> Self {
+ crate::Block {
+ brace_token: self.brace_token.clone(),
+ stmts: self.stmts.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::BoundLifetimes {
+ fn clone(&self) -> Self {
+ crate::BoundLifetimes {
+ for_token: self.for_token.clone(),
+ lt_token: self.lt_token.clone(),
+ lifetimes: self.lifetimes.clone(),
+ gt_token: self.gt_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ConstParam {
+ fn clone(&self) -> Self {
+ crate::ConstParam {
+ attrs: self.attrs.clone(),
+ const_token: self.const_token.clone(),
+ ident: self.ident.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ eq_token: self.eq_token.clone(),
+ default: self.default.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Constraint {
+ fn clone(&self) -> Self {
+ crate::Constraint {
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Data {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Data::Struct(v0) => crate::Data::Struct(v0.clone()),
+ crate::Data::Enum(v0) => crate::Data::Enum(v0.clone()),
+ crate::Data::Union(v0) => crate::Data::Union(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::DataEnum {
+ fn clone(&self) -> Self {
+ crate::DataEnum {
+ enum_token: self.enum_token.clone(),
+ brace_token: self.brace_token.clone(),
+ variants: self.variants.clone(),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::DataStruct {
+ fn clone(&self) -> Self {
+ crate::DataStruct {
+ struct_token: self.struct_token.clone(),
+ fields: self.fields.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::DataUnion {
+ fn clone(&self) -> Self {
+ crate::DataUnion {
+ union_token: self.union_token.clone(),
+ fields: self.fields.clone(),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::DeriveInput {
+ fn clone(&self) -> Self {
+ crate::DeriveInput {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ data: self.data.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Expr {
+ fn clone(&self) -> Self {
+ match self {
+ #[cfg(feature = "full")]
+ crate::Expr::Array(v0) => crate::Expr::Array(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Assign(v0) => crate::Expr::Assign(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Async(v0) => crate::Expr::Async(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Await(v0) => crate::Expr::Await(v0.clone()),
+ crate::Expr::Binary(v0) => crate::Expr::Binary(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Block(v0) => crate::Expr::Block(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Break(v0) => crate::Expr::Break(v0.clone()),
+ crate::Expr::Call(v0) => crate::Expr::Call(v0.clone()),
+ crate::Expr::Cast(v0) => crate::Expr::Cast(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Closure(v0) => crate::Expr::Closure(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Const(v0) => crate::Expr::Const(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Continue(v0) => crate::Expr::Continue(v0.clone()),
+ crate::Expr::Field(v0) => crate::Expr::Field(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::ForLoop(v0) => crate::Expr::ForLoop(v0.clone()),
+ crate::Expr::Group(v0) => crate::Expr::Group(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::If(v0) => crate::Expr::If(v0.clone()),
+ crate::Expr::Index(v0) => crate::Expr::Index(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Infer(v0) => crate::Expr::Infer(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Let(v0) => crate::Expr::Let(v0.clone()),
+ crate::Expr::Lit(v0) => crate::Expr::Lit(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Loop(v0) => crate::Expr::Loop(v0.clone()),
+ crate::Expr::Macro(v0) => crate::Expr::Macro(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Match(v0) => crate::Expr::Match(v0.clone()),
+ crate::Expr::MethodCall(v0) => crate::Expr::MethodCall(v0.clone()),
+ crate::Expr::Paren(v0) => crate::Expr::Paren(v0.clone()),
+ crate::Expr::Path(v0) => crate::Expr::Path(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Range(v0) => crate::Expr::Range(v0.clone()),
+ crate::Expr::Reference(v0) => crate::Expr::Reference(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Repeat(v0) => crate::Expr::Repeat(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Return(v0) => crate::Expr::Return(v0.clone()),
+ crate::Expr::Struct(v0) => crate::Expr::Struct(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Try(v0) => crate::Expr::Try(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::TryBlock(v0) => crate::Expr::TryBlock(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Tuple(v0) => crate::Expr::Tuple(v0.clone()),
+ crate::Expr::Unary(v0) => crate::Expr::Unary(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Unsafe(v0) => crate::Expr::Unsafe(v0.clone()),
+ crate::Expr::Verbatim(v0) => crate::Expr::Verbatim(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::While(v0) => crate::Expr::While(v0.clone()),
+ #[cfg(feature = "full")]
+ crate::Expr::Yield(v0) => crate::Expr::Yield(v0.clone()),
+ #[cfg(not(feature = "full"))]
+ _ => unreachable!(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprArray {
+ fn clone(&self) -> Self {
+ crate::ExprArray {
+ attrs: self.attrs.clone(),
+ bracket_token: self.bracket_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprAssign {
+ fn clone(&self) -> Self {
+ crate::ExprAssign {
+ attrs: self.attrs.clone(),
+ left: self.left.clone(),
+ eq_token: self.eq_token.clone(),
+ right: self.right.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprAsync {
+ fn clone(&self) -> Self {
+ crate::ExprAsync {
+ attrs: self.attrs.clone(),
+ async_token: self.async_token.clone(),
+ capture: self.capture.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprAwait {
+ fn clone(&self) -> Self {
+ crate::ExprAwait {
+ attrs: self.attrs.clone(),
+ base: self.base.clone(),
+ dot_token: self.dot_token.clone(),
+ await_token: self.await_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprBinary {
+ fn clone(&self) -> Self {
+ crate::ExprBinary {
+ attrs: self.attrs.clone(),
+ left: self.left.clone(),
+ op: self.op.clone(),
+ right: self.right.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprBlock {
+ fn clone(&self) -> Self {
+ crate::ExprBlock {
+ attrs: self.attrs.clone(),
+ label: self.label.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprBreak {
+ fn clone(&self) -> Self {
+ crate::ExprBreak {
+ attrs: self.attrs.clone(),
+ break_token: self.break_token.clone(),
+ label: self.label.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprCall {
+ fn clone(&self) -> Self {
+ crate::ExprCall {
+ attrs: self.attrs.clone(),
+ func: self.func.clone(),
+ paren_token: self.paren_token.clone(),
+ args: self.args.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprCast {
+ fn clone(&self) -> Self {
+ crate::ExprCast {
+ attrs: self.attrs.clone(),
+ expr: self.expr.clone(),
+ as_token: self.as_token.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprClosure {
+ fn clone(&self) -> Self {
+ crate::ExprClosure {
+ attrs: self.attrs.clone(),
+ lifetimes: self.lifetimes.clone(),
+ constness: self.constness.clone(),
+ movability: self.movability.clone(),
+ asyncness: self.asyncness.clone(),
+ capture: self.capture.clone(),
+ or1_token: self.or1_token.clone(),
+ inputs: self.inputs.clone(),
+ or2_token: self.or2_token.clone(),
+ output: self.output.clone(),
+ body: self.body.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprConst {
+ fn clone(&self) -> Self {
+ crate::ExprConst {
+ attrs: self.attrs.clone(),
+ const_token: self.const_token.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprContinue {
+ fn clone(&self) -> Self {
+ crate::ExprContinue {
+ attrs: self.attrs.clone(),
+ continue_token: self.continue_token.clone(),
+ label: self.label.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprField {
+ fn clone(&self) -> Self {
+ crate::ExprField {
+ attrs: self.attrs.clone(),
+ base: self.base.clone(),
+ dot_token: self.dot_token.clone(),
+ member: self.member.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprForLoop {
+ fn clone(&self) -> Self {
+ crate::ExprForLoop {
+ attrs: self.attrs.clone(),
+ label: self.label.clone(),
+ for_token: self.for_token.clone(),
+ pat: self.pat.clone(),
+ in_token: self.in_token.clone(),
+ expr: self.expr.clone(),
+ body: self.body.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprGroup {
+ fn clone(&self) -> Self {
+ crate::ExprGroup {
+ attrs: self.attrs.clone(),
+ group_token: self.group_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprIf {
+ fn clone(&self) -> Self {
+ crate::ExprIf {
+ attrs: self.attrs.clone(),
+ if_token: self.if_token.clone(),
+ cond: self.cond.clone(),
+ then_branch: self.then_branch.clone(),
+ else_branch: self.else_branch.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprIndex {
+ fn clone(&self) -> Self {
+ crate::ExprIndex {
+ attrs: self.attrs.clone(),
+ expr: self.expr.clone(),
+ bracket_token: self.bracket_token.clone(),
+ index: self.index.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprInfer {
+ fn clone(&self) -> Self {
+ crate::ExprInfer {
+ attrs: self.attrs.clone(),
+ underscore_token: self.underscore_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprLet {
+ fn clone(&self) -> Self {
+ crate::ExprLet {
+ attrs: self.attrs.clone(),
+ let_token: self.let_token.clone(),
+ pat: self.pat.clone(),
+ eq_token: self.eq_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprLit {
+ fn clone(&self) -> Self {
+ crate::ExprLit {
+ attrs: self.attrs.clone(),
+ lit: self.lit.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprLoop {
+ fn clone(&self) -> Self {
+ crate::ExprLoop {
+ attrs: self.attrs.clone(),
+ label: self.label.clone(),
+ loop_token: self.loop_token.clone(),
+ body: self.body.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprMacro {
+ fn clone(&self) -> Self {
+ crate::ExprMacro {
+ attrs: self.attrs.clone(),
+ mac: self.mac.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprMatch {
+ fn clone(&self) -> Self {
+ crate::ExprMatch {
+ attrs: self.attrs.clone(),
+ match_token: self.match_token.clone(),
+ expr: self.expr.clone(),
+ brace_token: self.brace_token.clone(),
+ arms: self.arms.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprMethodCall {
+ fn clone(&self) -> Self {
+ crate::ExprMethodCall {
+ attrs: self.attrs.clone(),
+ receiver: self.receiver.clone(),
+ dot_token: self.dot_token.clone(),
+ method: self.method.clone(),
+ turbofish: self.turbofish.clone(),
+ paren_token: self.paren_token.clone(),
+ args: self.args.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprParen {
+ fn clone(&self) -> Self {
+ crate::ExprParen {
+ attrs: self.attrs.clone(),
+ paren_token: self.paren_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprPath {
+ fn clone(&self) -> Self {
+ crate::ExprPath {
+ attrs: self.attrs.clone(),
+ qself: self.qself.clone(),
+ path: self.path.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprRange {
+ fn clone(&self) -> Self {
+ crate::ExprRange {
+ attrs: self.attrs.clone(),
+ start: self.start.clone(),
+ limits: self.limits.clone(),
+ end: self.end.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprReference {
+ fn clone(&self) -> Self {
+ crate::ExprReference {
+ attrs: self.attrs.clone(),
+ and_token: self.and_token.clone(),
+ mutability: self.mutability.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprRepeat {
+ fn clone(&self) -> Self {
+ crate::ExprRepeat {
+ attrs: self.attrs.clone(),
+ bracket_token: self.bracket_token.clone(),
+ expr: self.expr.clone(),
+ semi_token: self.semi_token.clone(),
+ len: self.len.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprReturn {
+ fn clone(&self) -> Self {
+ crate::ExprReturn {
+ attrs: self.attrs.clone(),
+ return_token: self.return_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprStruct {
+ fn clone(&self) -> Self {
+ crate::ExprStruct {
+ attrs: self.attrs.clone(),
+ qself: self.qself.clone(),
+ path: self.path.clone(),
+ brace_token: self.brace_token.clone(),
+ fields: self.fields.clone(),
+ dot2_token: self.dot2_token.clone(),
+ rest: self.rest.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprTry {
+ fn clone(&self) -> Self {
+ crate::ExprTry {
+ attrs: self.attrs.clone(),
+ expr: self.expr.clone(),
+ question_token: self.question_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprTryBlock {
+ fn clone(&self) -> Self {
+ crate::ExprTryBlock {
+ attrs: self.attrs.clone(),
+ try_token: self.try_token.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprTuple {
+ fn clone(&self) -> Self {
+ crate::ExprTuple {
+ attrs: self.attrs.clone(),
+ paren_token: self.paren_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprUnary {
+ fn clone(&self) -> Self {
+ crate::ExprUnary {
+ attrs: self.attrs.clone(),
+ op: self.op.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprUnsafe {
+ fn clone(&self) -> Self {
+ crate::ExprUnsafe {
+ attrs: self.attrs.clone(),
+ unsafe_token: self.unsafe_token.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprWhile {
+ fn clone(&self) -> Self {
+ crate::ExprWhile {
+ attrs: self.attrs.clone(),
+ label: self.label.clone(),
+ while_token: self.while_token.clone(),
+ cond: self.cond.clone(),
+ body: self.body.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ExprYield {
+ fn clone(&self) -> Self {
+ crate::ExprYield {
+ attrs: self.attrs.clone(),
+ yield_token: self.yield_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Field {
+ fn clone(&self) -> Self {
+ crate::Field {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ mutability: self.mutability.clone(),
+ ident: self.ident.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FieldMutability {
+ fn clone(&self) -> Self {
+ match self {
+ crate::FieldMutability::None => crate::FieldMutability::None,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FieldPat {
+ fn clone(&self) -> Self {
+ crate::FieldPat {
+ attrs: self.attrs.clone(),
+ member: self.member.clone(),
+ colon_token: self.colon_token.clone(),
+ pat: self.pat.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FieldValue {
+ fn clone(&self) -> Self {
+ crate::FieldValue {
+ attrs: self.attrs.clone(),
+ member: self.member.clone(),
+ colon_token: self.colon_token.clone(),
+ expr: self.expr.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Fields {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Fields::Named(v0) => crate::Fields::Named(v0.clone()),
+ crate::Fields::Unnamed(v0) => crate::Fields::Unnamed(v0.clone()),
+ crate::Fields::Unit => crate::Fields::Unit,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FieldsNamed {
+ fn clone(&self) -> Self {
+ crate::FieldsNamed {
+ brace_token: self.brace_token.clone(),
+ named: self.named.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FieldsUnnamed {
+ fn clone(&self) -> Self {
+ crate::FieldsUnnamed {
+ paren_token: self.paren_token.clone(),
+ unnamed: self.unnamed.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::File {
+ fn clone(&self) -> Self {
+ crate::File {
+ shebang: self.shebang.clone(),
+ attrs: self.attrs.clone(),
+ items: self.items.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::FnArg {
+ fn clone(&self) -> Self {
+ match self {
+ crate::FnArg::Receiver(v0) => crate::FnArg::Receiver(v0.clone()),
+ crate::FnArg::Typed(v0) => crate::FnArg::Typed(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ForeignItem {
+ fn clone(&self) -> Self {
+ match self {
+ crate::ForeignItem::Fn(v0) => crate::ForeignItem::Fn(v0.clone()),
+ crate::ForeignItem::Static(v0) =>
crate::ForeignItem::Static(v0.clone()),
+ crate::ForeignItem::Type(v0) =>
crate::ForeignItem::Type(v0.clone()),
+ crate::ForeignItem::Macro(v0) =>
crate::ForeignItem::Macro(v0.clone()),
+ crate::ForeignItem::Verbatim(v0) =>
crate::ForeignItem::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ForeignItemFn {
+ fn clone(&self) -> Self {
+ crate::ForeignItemFn {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ sig: self.sig.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ForeignItemMacro {
+ fn clone(&self) -> Self {
+ crate::ForeignItemMacro {
+ attrs: self.attrs.clone(),
+ mac: self.mac.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ForeignItemStatic {
+ fn clone(&self) -> Self {
+ crate::ForeignItemStatic {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ static_token: self.static_token.clone(),
+ mutability: self.mutability.clone(),
+ ident: self.ident.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ForeignItemType {
+ fn clone(&self) -> Self {
+ crate::ForeignItemType {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ type_token: self.type_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::GenericArgument {
+ fn clone(&self) -> Self {
+ match self {
+ crate::GenericArgument::Lifetime(v0) => {
+ crate::GenericArgument::Lifetime(v0.clone())
+ }
+ crate::GenericArgument::Type(v0) =>
crate::GenericArgument::Type(v0.clone()),
+ crate::GenericArgument::Const(v0) => {
+ crate::GenericArgument::Const(v0.clone())
+ }
+ crate::GenericArgument::AssocType(v0) => {
+ crate::GenericArgument::AssocType(v0.clone())
+ }
+ crate::GenericArgument::AssocConst(v0) => {
+ crate::GenericArgument::AssocConst(v0.clone())
+ }
+ crate::GenericArgument::Constraint(v0) => {
+ crate::GenericArgument::Constraint(v0.clone())
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::GenericParam {
+ fn clone(&self) -> Self {
+ match self {
+ crate::GenericParam::Lifetime(v0) => {
+ crate::GenericParam::Lifetime(v0.clone())
+ }
+ crate::GenericParam::Type(v0) =>
crate::GenericParam::Type(v0.clone()),
+ crate::GenericParam::Const(v0) =>
crate::GenericParam::Const(v0.clone()),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Generics {
+ fn clone(&self) -> Self {
+ crate::Generics {
+ lt_token: self.lt_token.clone(),
+ params: self.params.clone(),
+ gt_token: self.gt_token.clone(),
+ where_clause: self.where_clause.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplItem {
+ fn clone(&self) -> Self {
+ match self {
+ crate::ImplItem::Const(v0) => crate::ImplItem::Const(v0.clone()),
+ crate::ImplItem::Fn(v0) => crate::ImplItem::Fn(v0.clone()),
+ crate::ImplItem::Type(v0) => crate::ImplItem::Type(v0.clone()),
+ crate::ImplItem::Macro(v0) => crate::ImplItem::Macro(v0.clone()),
+ crate::ImplItem::Verbatim(v0) =>
crate::ImplItem::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplItemConst {
+ fn clone(&self) -> Self {
+ crate::ImplItemConst {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ defaultness: self.defaultness.clone(),
+ const_token: self.const_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ eq_token: self.eq_token.clone(),
+ expr: self.expr.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplItemFn {
+ fn clone(&self) -> Self {
+ crate::ImplItemFn {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ defaultness: self.defaultness.clone(),
+ sig: self.sig.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplItemMacro {
+ fn clone(&self) -> Self {
+ crate::ImplItemMacro {
+ attrs: self.attrs.clone(),
+ mac: self.mac.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplItemType {
+ fn clone(&self) -> Self {
+ crate::ImplItemType {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ defaultness: self.defaultness.clone(),
+ type_token: self.type_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ eq_token: self.eq_token.clone(),
+ ty: self.ty.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ImplRestriction {
+ fn clone(&self) -> Self {
+ match *self {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Index {
+ fn clone(&self) -> Self {
+ crate::Index {
+ index: self.index.clone(),
+ span: self.span.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Item {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Item::Const(v0) => crate::Item::Const(v0.clone()),
+ crate::Item::Enum(v0) => crate::Item::Enum(v0.clone()),
+ crate::Item::ExternCrate(v0) =>
crate::Item::ExternCrate(v0.clone()),
+ crate::Item::Fn(v0) => crate::Item::Fn(v0.clone()),
+ crate::Item::ForeignMod(v0) => crate::Item::ForeignMod(v0.clone()),
+ crate::Item::Impl(v0) => crate::Item::Impl(v0.clone()),
+ crate::Item::Macro(v0) => crate::Item::Macro(v0.clone()),
+ crate::Item::Mod(v0) => crate::Item::Mod(v0.clone()),
+ crate::Item::Static(v0) => crate::Item::Static(v0.clone()),
+ crate::Item::Struct(v0) => crate::Item::Struct(v0.clone()),
+ crate::Item::Trait(v0) => crate::Item::Trait(v0.clone()),
+ crate::Item::TraitAlias(v0) => crate::Item::TraitAlias(v0.clone()),
+ crate::Item::Type(v0) => crate::Item::Type(v0.clone()),
+ crate::Item::Union(v0) => crate::Item::Union(v0.clone()),
+ crate::Item::Use(v0) => crate::Item::Use(v0.clone()),
+ crate::Item::Verbatim(v0) => crate::Item::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemConst {
+ fn clone(&self) -> Self {
+ crate::ItemConst {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ const_token: self.const_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ eq_token: self.eq_token.clone(),
+ expr: self.expr.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemEnum {
+ fn clone(&self) -> Self {
+ crate::ItemEnum {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ enum_token: self.enum_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ brace_token: self.brace_token.clone(),
+ variants: self.variants.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemExternCrate {
+ fn clone(&self) -> Self {
+ crate::ItemExternCrate {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ extern_token: self.extern_token.clone(),
+ crate_token: self.crate_token.clone(),
+ ident: self.ident.clone(),
+ rename: self.rename.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemFn {
+ fn clone(&self) -> Self {
+ crate::ItemFn {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ sig: self.sig.clone(),
+ block: self.block.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemForeignMod {
+ fn clone(&self) -> Self {
+ crate::ItemForeignMod {
+ attrs: self.attrs.clone(),
+ unsafety: self.unsafety.clone(),
+ abi: self.abi.clone(),
+ brace_token: self.brace_token.clone(),
+ items: self.items.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemImpl {
+ fn clone(&self) -> Self {
+ crate::ItemImpl {
+ attrs: self.attrs.clone(),
+ defaultness: self.defaultness.clone(),
+ unsafety: self.unsafety.clone(),
+ impl_token: self.impl_token.clone(),
+ generics: self.generics.clone(),
+ trait_: self.trait_.clone(),
+ self_ty: self.self_ty.clone(),
+ brace_token: self.brace_token.clone(),
+ items: self.items.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemMacro {
+ fn clone(&self) -> Self {
+ crate::ItemMacro {
+ attrs: self.attrs.clone(),
+ ident: self.ident.clone(),
+ mac: self.mac.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemMod {
+ fn clone(&self) -> Self {
+ crate::ItemMod {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ unsafety: self.unsafety.clone(),
+ mod_token: self.mod_token.clone(),
+ ident: self.ident.clone(),
+ content: self.content.clone(),
+ semi: self.semi.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemStatic {
+ fn clone(&self) -> Self {
+ crate::ItemStatic {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ static_token: self.static_token.clone(),
+ mutability: self.mutability.clone(),
+ ident: self.ident.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ eq_token: self.eq_token.clone(),
+ expr: self.expr.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemStruct {
+ fn clone(&self) -> Self {
+ crate::ItemStruct {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ struct_token: self.struct_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ fields: self.fields.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemTrait {
+ fn clone(&self) -> Self {
+ crate::ItemTrait {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ unsafety: self.unsafety.clone(),
+ auto_token: self.auto_token.clone(),
+ restriction: self.restriction.clone(),
+ trait_token: self.trait_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ supertraits: self.supertraits.clone(),
+ brace_token: self.brace_token.clone(),
+ items: self.items.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemTraitAlias {
+ fn clone(&self) -> Self {
+ crate::ItemTraitAlias {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ trait_token: self.trait_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ eq_token: self.eq_token.clone(),
+ bounds: self.bounds.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemType {
+ fn clone(&self) -> Self {
+ crate::ItemType {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ type_token: self.type_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ eq_token: self.eq_token.clone(),
+ ty: self.ty.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemUnion {
+ fn clone(&self) -> Self {
+ crate::ItemUnion {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ union_token: self.union_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ fields: self.fields.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ItemUse {
+ fn clone(&self) -> Self {
+ crate::ItemUse {
+ attrs: self.attrs.clone(),
+ vis: self.vis.clone(),
+ use_token: self.use_token.clone(),
+ leading_colon: self.leading_colon.clone(),
+ tree: self.tree.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Label {
+ fn clone(&self) -> Self {
+ crate::Label {
+ name: self.name.clone(),
+ colon_token: self.colon_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::LifetimeParam {
+ fn clone(&self) -> Self {
+ crate::LifetimeParam {
+ attrs: self.attrs.clone(),
+ lifetime: self.lifetime.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Lit {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Lit::Str(v0) => crate::Lit::Str(v0.clone()),
+ crate::Lit::ByteStr(v0) => crate::Lit::ByteStr(v0.clone()),
+ crate::Lit::CStr(v0) => crate::Lit::CStr(v0.clone()),
+ crate::Lit::Byte(v0) => crate::Lit::Byte(v0.clone()),
+ crate::Lit::Char(v0) => crate::Lit::Char(v0.clone()),
+ crate::Lit::Int(v0) => crate::Lit::Int(v0.clone()),
+ crate::Lit::Float(v0) => crate::Lit::Float(v0.clone()),
+ crate::Lit::Bool(v0) => crate::Lit::Bool(v0.clone()),
+ crate::Lit::Verbatim(v0) => crate::Lit::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::LitBool {
+ fn clone(&self) -> Self {
+ crate::LitBool {
+ value: self.value.clone(),
+ span: self.span.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Local {
+ fn clone(&self) -> Self {
+ crate::Local {
+ attrs: self.attrs.clone(),
+ let_token: self.let_token.clone(),
+ pat: self.pat.clone(),
+ init: self.init.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::LocalInit {
+ fn clone(&self) -> Self {
+ crate::LocalInit {
+ eq_token: self.eq_token.clone(),
+ expr: self.expr.clone(),
+ diverge: self.diverge.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Macro {
+ fn clone(&self) -> Self {
+ crate::Macro {
+ path: self.path.clone(),
+ bang_token: self.bang_token.clone(),
+ delimiter: self.delimiter.clone(),
+ tokens: self.tokens.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::MacroDelimiter {
+ fn clone(&self) -> Self {
+ match self {
+ crate::MacroDelimiter::Paren(v0) =>
crate::MacroDelimiter::Paren(v0.clone()),
+ crate::MacroDelimiter::Brace(v0) =>
crate::MacroDelimiter::Brace(v0.clone()),
+ crate::MacroDelimiter::Bracket(v0) => {
+ crate::MacroDelimiter::Bracket(v0.clone())
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Member {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Member::Named(v0) => crate::Member::Named(v0.clone()),
+ crate::Member::Unnamed(v0) => crate::Member::Unnamed(v0.clone()),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Meta {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Meta::Path(v0) => crate::Meta::Path(v0.clone()),
+ crate::Meta::List(v0) => crate::Meta::List(v0.clone()),
+ crate::Meta::NameValue(v0) => crate::Meta::NameValue(v0.clone()),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::MetaList {
+ fn clone(&self) -> Self {
+ crate::MetaList {
+ path: self.path.clone(),
+ delimiter: self.delimiter.clone(),
+ tokens: self.tokens.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::MetaNameValue {
+ fn clone(&self) -> Self {
+ crate::MetaNameValue {
+ path: self.path.clone(),
+ eq_token: self.eq_token.clone(),
+ value: self.value.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ParenthesizedGenericArguments {
+ fn clone(&self) -> Self {
+ crate::ParenthesizedGenericArguments {
+ paren_token: self.paren_token.clone(),
+ inputs: self.inputs.clone(),
+ output: self.output.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Pat {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Pat::Const(v0) => crate::Pat::Const(v0.clone()),
+ crate::Pat::Ident(v0) => crate::Pat::Ident(v0.clone()),
+ crate::Pat::Lit(v0) => crate::Pat::Lit(v0.clone()),
+ crate::Pat::Macro(v0) => crate::Pat::Macro(v0.clone()),
+ crate::Pat::Or(v0) => crate::Pat::Or(v0.clone()),
+ crate::Pat::Paren(v0) => crate::Pat::Paren(v0.clone()),
+ crate::Pat::Path(v0) => crate::Pat::Path(v0.clone()),
+ crate::Pat::Range(v0) => crate::Pat::Range(v0.clone()),
+ crate::Pat::Reference(v0) => crate::Pat::Reference(v0.clone()),
+ crate::Pat::Rest(v0) => crate::Pat::Rest(v0.clone()),
+ crate::Pat::Slice(v0) => crate::Pat::Slice(v0.clone()),
+ crate::Pat::Struct(v0) => crate::Pat::Struct(v0.clone()),
+ crate::Pat::Tuple(v0) => crate::Pat::Tuple(v0.clone()),
+ crate::Pat::TupleStruct(v0) => crate::Pat::TupleStruct(v0.clone()),
+ crate::Pat::Type(v0) => crate::Pat::Type(v0.clone()),
+ crate::Pat::Verbatim(v0) => crate::Pat::Verbatim(v0.clone()),
+ crate::Pat::Wild(v0) => crate::Pat::Wild(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatIdent {
+ fn clone(&self) -> Self {
+ crate::PatIdent {
+ attrs: self.attrs.clone(),
+ by_ref: self.by_ref.clone(),
+ mutability: self.mutability.clone(),
+ ident: self.ident.clone(),
+ subpat: self.subpat.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatOr {
+ fn clone(&self) -> Self {
+ crate::PatOr {
+ attrs: self.attrs.clone(),
+ leading_vert: self.leading_vert.clone(),
+ cases: self.cases.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatParen {
+ fn clone(&self) -> Self {
+ crate::PatParen {
+ attrs: self.attrs.clone(),
+ paren_token: self.paren_token.clone(),
+ pat: self.pat.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatReference {
+ fn clone(&self) -> Self {
+ crate::PatReference {
+ attrs: self.attrs.clone(),
+ and_token: self.and_token.clone(),
+ mutability: self.mutability.clone(),
+ pat: self.pat.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatRest {
+ fn clone(&self) -> Self {
+ crate::PatRest {
+ attrs: self.attrs.clone(),
+ dot2_token: self.dot2_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatSlice {
+ fn clone(&self) -> Self {
+ crate::PatSlice {
+ attrs: self.attrs.clone(),
+ bracket_token: self.bracket_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatStruct {
+ fn clone(&self) -> Self {
+ crate::PatStruct {
+ attrs: self.attrs.clone(),
+ qself: self.qself.clone(),
+ path: self.path.clone(),
+ brace_token: self.brace_token.clone(),
+ fields: self.fields.clone(),
+ rest: self.rest.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatTuple {
+ fn clone(&self) -> Self {
+ crate::PatTuple {
+ attrs: self.attrs.clone(),
+ paren_token: self.paren_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatTupleStruct {
+ fn clone(&self) -> Self {
+ crate::PatTupleStruct {
+ attrs: self.attrs.clone(),
+ qself: self.qself.clone(),
+ path: self.path.clone(),
+ paren_token: self.paren_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatType {
+ fn clone(&self) -> Self {
+ crate::PatType {
+ attrs: self.attrs.clone(),
+ pat: self.pat.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PatWild {
+ fn clone(&self) -> Self {
+ crate::PatWild {
+ attrs: self.attrs.clone(),
+ underscore_token: self.underscore_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Path {
+ fn clone(&self) -> Self {
+ crate::Path {
+ leading_colon: self.leading_colon.clone(),
+ segments: self.segments.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PathArguments {
+ fn clone(&self) -> Self {
+ match self {
+ crate::PathArguments::None => crate::PathArguments::None,
+ crate::PathArguments::AngleBracketed(v0) => {
+ crate::PathArguments::AngleBracketed(v0.clone())
+ }
+ crate::PathArguments::Parenthesized(v0) => {
+ crate::PathArguments::Parenthesized(v0.clone())
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PathSegment {
+ fn clone(&self) -> Self {
+ crate::PathSegment {
+ ident: self.ident.clone(),
+ arguments: self.arguments.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PredicateLifetime {
+ fn clone(&self) -> Self {
+ crate::PredicateLifetime {
+ lifetime: self.lifetime.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::PredicateType {
+ fn clone(&self) -> Self {
+ crate::PredicateType {
+ lifetimes: self.lifetimes.clone(),
+ bounded_ty: self.bounded_ty.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::QSelf {
+ fn clone(&self) -> Self {
+ crate::QSelf {
+ lt_token: self.lt_token.clone(),
+ ty: self.ty.clone(),
+ position: self.position.clone(),
+ as_token: self.as_token.clone(),
+ gt_token: self.gt_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for crate::RangeLimits {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::RangeLimits {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Receiver {
+ fn clone(&self) -> Self {
+ crate::Receiver {
+ attrs: self.attrs.clone(),
+ reference: self.reference.clone(),
+ mutability: self.mutability.clone(),
+ self_token: self.self_token.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::ReturnType {
+ fn clone(&self) -> Self {
+ match self {
+ crate::ReturnType::Default => crate::ReturnType::Default,
+ crate::ReturnType::Type(v0, v1) => {
+ crate::ReturnType::Type(v0.clone(), v1.clone())
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Signature {
+ fn clone(&self) -> Self {
+ crate::Signature {
+ constness: self.constness.clone(),
+ asyncness: self.asyncness.clone(),
+ unsafety: self.unsafety.clone(),
+ abi: self.abi.clone(),
+ fn_token: self.fn_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ paren_token: self.paren_token.clone(),
+ inputs: self.inputs.clone(),
+ variadic: self.variadic.clone(),
+ output: self.output.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::StaticMutability {
+ fn clone(&self) -> Self {
+ match self {
+ crate::StaticMutability::Mut(v0) =>
crate::StaticMutability::Mut(v0.clone()),
+ crate::StaticMutability::None => crate::StaticMutability::None,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Stmt {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Stmt::Local(v0) => crate::Stmt::Local(v0.clone()),
+ crate::Stmt::Item(v0) => crate::Stmt::Item(v0.clone()),
+ crate::Stmt::Expr(v0, v1) => crate::Stmt::Expr(v0.clone(),
v1.clone()),
+ crate::Stmt::Macro(v0) => crate::Stmt::Macro(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::StmtMacro {
+ fn clone(&self) -> Self {
+ crate::StmtMacro {
+ attrs: self.attrs.clone(),
+ mac: self.mac.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitBound {
+ fn clone(&self) -> Self {
+ crate::TraitBound {
+ paren_token: self.paren_token.clone(),
+ modifier: self.modifier.clone(),
+ lifetimes: self.lifetimes.clone(),
+ path: self.path.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for crate::TraitBoundModifier {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitBoundModifier {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitItem {
+ fn clone(&self) -> Self {
+ match self {
+ crate::TraitItem::Const(v0) => crate::TraitItem::Const(v0.clone()),
+ crate::TraitItem::Fn(v0) => crate::TraitItem::Fn(v0.clone()),
+ crate::TraitItem::Type(v0) => crate::TraitItem::Type(v0.clone()),
+ crate::TraitItem::Macro(v0) => crate::TraitItem::Macro(v0.clone()),
+ crate::TraitItem::Verbatim(v0) =>
crate::TraitItem::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitItemConst {
+ fn clone(&self) -> Self {
+ crate::TraitItemConst {
+ attrs: self.attrs.clone(),
+ const_token: self.const_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ ty: self.ty.clone(),
+ default: self.default.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitItemFn {
+ fn clone(&self) -> Self {
+ crate::TraitItemFn {
+ attrs: self.attrs.clone(),
+ sig: self.sig.clone(),
+ default: self.default.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitItemMacro {
+ fn clone(&self) -> Self {
+ crate::TraitItemMacro {
+ attrs: self.attrs.clone(),
+ mac: self.mac.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TraitItemType {
+ fn clone(&self) -> Self {
+ crate::TraitItemType {
+ attrs: self.attrs.clone(),
+ type_token: self.type_token.clone(),
+ ident: self.ident.clone(),
+ generics: self.generics.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ default: self.default.clone(),
+ semi_token: self.semi_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Type {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Type::Array(v0) => crate::Type::Array(v0.clone()),
+ crate::Type::BareFn(v0) => crate::Type::BareFn(v0.clone()),
+ crate::Type::Group(v0) => crate::Type::Group(v0.clone()),
+ crate::Type::ImplTrait(v0) => crate::Type::ImplTrait(v0.clone()),
+ crate::Type::Infer(v0) => crate::Type::Infer(v0.clone()),
+ crate::Type::Macro(v0) => crate::Type::Macro(v0.clone()),
+ crate::Type::Never(v0) => crate::Type::Never(v0.clone()),
+ crate::Type::Paren(v0) => crate::Type::Paren(v0.clone()),
+ crate::Type::Path(v0) => crate::Type::Path(v0.clone()),
+ crate::Type::Ptr(v0) => crate::Type::Ptr(v0.clone()),
+ crate::Type::Reference(v0) => crate::Type::Reference(v0.clone()),
+ crate::Type::Slice(v0) => crate::Type::Slice(v0.clone()),
+ crate::Type::TraitObject(v0) =>
crate::Type::TraitObject(v0.clone()),
+ crate::Type::Tuple(v0) => crate::Type::Tuple(v0.clone()),
+ crate::Type::Verbatim(v0) => crate::Type::Verbatim(v0.clone()),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeArray {
+ fn clone(&self) -> Self {
+ crate::TypeArray {
+ bracket_token: self.bracket_token.clone(),
+ elem: self.elem.clone(),
+ semi_token: self.semi_token.clone(),
+ len: self.len.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeBareFn {
+ fn clone(&self) -> Self {
+ crate::TypeBareFn {
+ lifetimes: self.lifetimes.clone(),
+ unsafety: self.unsafety.clone(),
+ abi: self.abi.clone(),
+ fn_token: self.fn_token.clone(),
+ paren_token: self.paren_token.clone(),
+ inputs: self.inputs.clone(),
+ variadic: self.variadic.clone(),
+ output: self.output.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeGroup {
+ fn clone(&self) -> Self {
+ crate::TypeGroup {
+ group_token: self.group_token.clone(),
+ elem: self.elem.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeImplTrait {
+ fn clone(&self) -> Self {
+ crate::TypeImplTrait {
+ impl_token: self.impl_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeInfer {
+ fn clone(&self) -> Self {
+ crate::TypeInfer {
+ underscore_token: self.underscore_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeMacro {
+ fn clone(&self) -> Self {
+ crate::TypeMacro {
+ mac: self.mac.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeNever {
+ fn clone(&self) -> Self {
+ crate::TypeNever {
+ bang_token: self.bang_token.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeParam {
+ fn clone(&self) -> Self {
+ crate::TypeParam {
+ attrs: self.attrs.clone(),
+ ident: self.ident.clone(),
+ colon_token: self.colon_token.clone(),
+ bounds: self.bounds.clone(),
+ eq_token: self.eq_token.clone(),
+ default: self.default.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeParamBound {
+ fn clone(&self) -> Self {
+ match self {
+ crate::TypeParamBound::Trait(v0) =>
crate::TypeParamBound::Trait(v0.clone()),
+ crate::TypeParamBound::Lifetime(v0) => {
+ crate::TypeParamBound::Lifetime(v0.clone())
+ }
+ crate::TypeParamBound::Verbatim(v0) => {
+ crate::TypeParamBound::Verbatim(v0.clone())
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeParen {
+ fn clone(&self) -> Self {
+ crate::TypeParen {
+ paren_token: self.paren_token.clone(),
+ elem: self.elem.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypePath {
+ fn clone(&self) -> Self {
+ crate::TypePath {
+ qself: self.qself.clone(),
+ path: self.path.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypePtr {
+ fn clone(&self) -> Self {
+ crate::TypePtr {
+ star_token: self.star_token.clone(),
+ const_token: self.const_token.clone(),
+ mutability: self.mutability.clone(),
+ elem: self.elem.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeReference {
+ fn clone(&self) -> Self {
+ crate::TypeReference {
+ and_token: self.and_token.clone(),
+ lifetime: self.lifetime.clone(),
+ mutability: self.mutability.clone(),
+ elem: self.elem.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeSlice {
+ fn clone(&self) -> Self {
+ crate::TypeSlice {
+ bracket_token: self.bracket_token.clone(),
+ elem: self.elem.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeTraitObject {
+ fn clone(&self) -> Self {
+ crate::TypeTraitObject {
+ dyn_token: self.dyn_token.clone(),
+ bounds: self.bounds.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::TypeTuple {
+ fn clone(&self) -> Self {
+ crate::TypeTuple {
+ paren_token: self.paren_token.clone(),
+ elems: self.elems.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for crate::UnOp {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UnOp {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UseGlob {
+ fn clone(&self) -> Self {
+ crate::UseGlob {
+ star_token: self.star_token.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UseGroup {
+ fn clone(&self) -> Self {
+ crate::UseGroup {
+ brace_token: self.brace_token.clone(),
+ items: self.items.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UseName {
+ fn clone(&self) -> Self {
+ crate::UseName {
+ ident: self.ident.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UsePath {
+ fn clone(&self) -> Self {
+ crate::UsePath {
+ ident: self.ident.clone(),
+ colon2_token: self.colon2_token.clone(),
+ tree: self.tree.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UseRename {
+ fn clone(&self) -> Self {
+ crate::UseRename {
+ ident: self.ident.clone(),
+ as_token: self.as_token.clone(),
+ rename: self.rename.clone(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::UseTree {
+ fn clone(&self) -> Self {
+ match self {
+ crate::UseTree::Path(v0) => crate::UseTree::Path(v0.clone()),
+ crate::UseTree::Name(v0) => crate::UseTree::Name(v0.clone()),
+ crate::UseTree::Rename(v0) => crate::UseTree::Rename(v0.clone()),
+ crate::UseTree::Glob(v0) => crate::UseTree::Glob(v0.clone()),
+ crate::UseTree::Group(v0) => crate::UseTree::Group(v0.clone()),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Variadic {
+ fn clone(&self) -> Self {
+ crate::Variadic {
+ attrs: self.attrs.clone(),
+ pat: self.pat.clone(),
+ dots: self.dots.clone(),
+ comma: self.comma.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Variant {
+ fn clone(&self) -> Self {
+ crate::Variant {
+ attrs: self.attrs.clone(),
+ ident: self.ident.clone(),
+ fields: self.fields.clone(),
+ discriminant: self.discriminant.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::VisRestricted {
+ fn clone(&self) -> Self {
+ crate::VisRestricted {
+ pub_token: self.pub_token.clone(),
+ paren_token: self.paren_token.clone(),
+ in_token: self.in_token.clone(),
+ path: self.path.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::Visibility {
+ fn clone(&self) -> Self {
+ match self {
+ crate::Visibility::Public(v0) =>
crate::Visibility::Public(v0.clone()),
+ crate::Visibility::Restricted(v0) => {
+ crate::Visibility::Restricted(v0.clone())
+ }
+ crate::Visibility::Inherited => crate::Visibility::Inherited,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::WhereClause {
+ fn clone(&self) -> Self {
+ crate::WhereClause {
+ where_token: self.where_token.clone(),
+ predicates: self.predicates.clone(),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for crate::WherePredicate {
+ fn clone(&self) -> Self {
+ match self {
+ crate::WherePredicate::Lifetime(v0) => {
+ crate::WherePredicate::Lifetime(v0.clone())
+ }
+ crate::WherePredicate::Type(v0) =>
crate::WherePredicate::Type(v0.clone()),
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/debug.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/debug.rs
new file mode 100644
index 0000000000..9232ece8ac
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/debug.rs
@@ -0,0 +1,3160 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(unknown_lints, non_local_definitions)]
+use std::fmt::{self, Debug};
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Abi {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Abi");
+ formatter.field("extern_token", &self.extern_token);
+ formatter.field("name", &self.name);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::AngleBracketedGenericArguments {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "AngleBracketedGenericArguments")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::AngleBracketedGenericArguments {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("colon2_token", &self.colon2_token);
+ formatter.field("lt_token", &self.lt_token);
+ formatter.field("args", &self.args);
+ formatter.field("gt_token", &self.gt_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Arm {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Arm");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("pat", &self.pat);
+ formatter.field("guard", &self.guard);
+ formatter.field("fat_arrow_token", &self.fat_arrow_token);
+ formatter.field("body", &self.body);
+ formatter.field("comma", &self.comma);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::AssocConst {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("AssocConst");
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("value", &self.value);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::AssocType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("AssocType");
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::AttrStyle {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("AttrStyle::")?;
+ match self {
+ crate::AttrStyle::Outer => formatter.write_str("Outer"),
+ crate::AttrStyle::Inner(v0) => {
+ let mut formatter = formatter.debug_tuple("Inner");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Attribute {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Attribute");
+ formatter.field("pound_token", &self.pound_token);
+ formatter.field("style", &self.style);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("meta", &self.meta);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::BareFnArg {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BareFnArg");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("name", &self.name);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::BareVariadic {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BareVariadic");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("name", &self.name);
+ formatter.field("dots", &self.dots);
+ formatter.field("comma", &self.comma);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::BinOp {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("BinOp::")?;
+ match self {
+ crate::BinOp::Add(v0) => {
+ let mut formatter = formatter.debug_tuple("Add");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Sub(v0) => {
+ let mut formatter = formatter.debug_tuple("Sub");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Mul(v0) => {
+ let mut formatter = formatter.debug_tuple("Mul");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Div(v0) => {
+ let mut formatter = formatter.debug_tuple("Div");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Rem(v0) => {
+ let mut formatter = formatter.debug_tuple("Rem");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::And(v0) => {
+ let mut formatter = formatter.debug_tuple("And");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Or(v0) => {
+ let mut formatter = formatter.debug_tuple("Or");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitXor(v0) => {
+ let mut formatter = formatter.debug_tuple("BitXor");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitAnd(v0) => {
+ let mut formatter = formatter.debug_tuple("BitAnd");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitOr(v0) => {
+ let mut formatter = formatter.debug_tuple("BitOr");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Shl(v0) => {
+ let mut formatter = formatter.debug_tuple("Shl");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Shr(v0) => {
+ let mut formatter = formatter.debug_tuple("Shr");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Eq(v0) => {
+ let mut formatter = formatter.debug_tuple("Eq");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Lt(v0) => {
+ let mut formatter = formatter.debug_tuple("Lt");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Le(v0) => {
+ let mut formatter = formatter.debug_tuple("Le");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Ne(v0) => {
+ let mut formatter = formatter.debug_tuple("Ne");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Ge(v0) => {
+ let mut formatter = formatter.debug_tuple("Ge");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::Gt(v0) => {
+ let mut formatter = formatter.debug_tuple("Gt");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::AddAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("AddAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::SubAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("SubAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::MulAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("MulAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::DivAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("DivAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::RemAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("RemAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitXorAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("BitXorAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitAndAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("BitAndAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::BitOrAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("BitOrAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::ShlAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("ShlAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::BinOp::ShrAssign(v0) => {
+ let mut formatter = formatter.debug_tuple("ShrAssign");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Block {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Block");
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("stmts", &self.stmts);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::BoundLifetimes {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BoundLifetimes");
+ formatter.field("for_token", &self.for_token);
+ formatter.field("lt_token", &self.lt_token);
+ formatter.field("lifetimes", &self.lifetimes);
+ formatter.field("gt_token", &self.gt_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ConstParam {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ConstParam");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("default", &self.default);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Constraint {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Constraint");
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Data {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Data::")?;
+ match self {
+ crate::Data::Struct(v0) => v0.debug(formatter, "Struct"),
+ crate::Data::Enum(v0) => v0.debug(formatter, "Enum"),
+ crate::Data::Union(v0) => v0.debug(formatter, "Union"),
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::DataEnum {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "DataEnum")
+ }
+}
+#[cfg(feature = "derive")]
+impl crate::DataEnum {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("enum_token", &self.enum_token);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("variants", &self.variants);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::DataStruct {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "DataStruct")
+ }
+}
+#[cfg(feature = "derive")]
+impl crate::DataStruct {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("struct_token", &self.struct_token);
+ formatter.field("fields", &self.fields);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::DataUnion {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "DataUnion")
+ }
+}
+#[cfg(feature = "derive")]
+impl crate::DataUnion {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("union_token", &self.union_token);
+ formatter.field("fields", &self.fields);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::DeriveInput {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("DeriveInput");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("data", &self.data);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Expr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Expr::")?;
+ match self {
+ #[cfg(feature = "full")]
+ crate::Expr::Array(v0) => v0.debug(formatter, "Array"),
+ #[cfg(feature = "full")]
+ crate::Expr::Assign(v0) => v0.debug(formatter, "Assign"),
+ #[cfg(feature = "full")]
+ crate::Expr::Async(v0) => v0.debug(formatter, "Async"),
+ #[cfg(feature = "full")]
+ crate::Expr::Await(v0) => v0.debug(formatter, "Await"),
+ crate::Expr::Binary(v0) => v0.debug(formatter, "Binary"),
+ #[cfg(feature = "full")]
+ crate::Expr::Block(v0) => v0.debug(formatter, "Block"),
+ #[cfg(feature = "full")]
+ crate::Expr::Break(v0) => v0.debug(formatter, "Break"),
+ crate::Expr::Call(v0) => v0.debug(formatter, "Call"),
+ crate::Expr::Cast(v0) => v0.debug(formatter, "Cast"),
+ #[cfg(feature = "full")]
+ crate::Expr::Closure(v0) => v0.debug(formatter, "Closure"),
+ #[cfg(feature = "full")]
+ crate::Expr::Const(v0) => v0.debug(formatter, "Const"),
+ #[cfg(feature = "full")]
+ crate::Expr::Continue(v0) => v0.debug(formatter, "Continue"),
+ crate::Expr::Field(v0) => v0.debug(formatter, "Field"),
+ #[cfg(feature = "full")]
+ crate::Expr::ForLoop(v0) => v0.debug(formatter, "ForLoop"),
+ crate::Expr::Group(v0) => v0.debug(formatter, "Group"),
+ #[cfg(feature = "full")]
+ crate::Expr::If(v0) => v0.debug(formatter, "If"),
+ crate::Expr::Index(v0) => v0.debug(formatter, "Index"),
+ #[cfg(feature = "full")]
+ crate::Expr::Infer(v0) => v0.debug(formatter, "Infer"),
+ #[cfg(feature = "full")]
+ crate::Expr::Let(v0) => v0.debug(formatter, "Let"),
+ crate::Expr::Lit(v0) => v0.debug(formatter, "Lit"),
+ #[cfg(feature = "full")]
+ crate::Expr::Loop(v0) => v0.debug(formatter, "Loop"),
+ crate::Expr::Macro(v0) => v0.debug(formatter, "Macro"),
+ #[cfg(feature = "full")]
+ crate::Expr::Match(v0) => v0.debug(formatter, "Match"),
+ crate::Expr::MethodCall(v0) => v0.debug(formatter, "MethodCall"),
+ crate::Expr::Paren(v0) => v0.debug(formatter, "Paren"),
+ crate::Expr::Path(v0) => v0.debug(formatter, "Path"),
+ #[cfg(feature = "full")]
+ crate::Expr::Range(v0) => v0.debug(formatter, "Range"),
+ crate::Expr::Reference(v0) => v0.debug(formatter, "Reference"),
+ #[cfg(feature = "full")]
+ crate::Expr::Repeat(v0) => v0.debug(formatter, "Repeat"),
+ #[cfg(feature = "full")]
+ crate::Expr::Return(v0) => v0.debug(formatter, "Return"),
+ crate::Expr::Struct(v0) => v0.debug(formatter, "Struct"),
+ #[cfg(feature = "full")]
+ crate::Expr::Try(v0) => v0.debug(formatter, "Try"),
+ #[cfg(feature = "full")]
+ crate::Expr::TryBlock(v0) => v0.debug(formatter, "TryBlock"),
+ #[cfg(feature = "full")]
+ crate::Expr::Tuple(v0) => v0.debug(formatter, "Tuple"),
+ crate::Expr::Unary(v0) => v0.debug(formatter, "Unary"),
+ #[cfg(feature = "full")]
+ crate::Expr::Unsafe(v0) => v0.debug(formatter, "Unsafe"),
+ crate::Expr::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::While(v0) => v0.debug(formatter, "While"),
+ #[cfg(feature = "full")]
+ crate::Expr::Yield(v0) => v0.debug(formatter, "Yield"),
+ #[cfg(not(feature = "full"))]
+ _ => unreachable!(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprArray {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprArray")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprArray {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprAssign {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprAssign")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprAssign {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("left", &self.left);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("right", &self.right);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprAsync {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprAsync")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprAsync {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("async_token", &self.async_token);
+ formatter.field("capture", &self.capture);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprAwait {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprAwait")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprAwait {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("base", &self.base);
+ formatter.field("dot_token", &self.dot_token);
+ formatter.field("await_token", &self.await_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprBinary {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprBinary")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprBinary {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("left", &self.left);
+ formatter.field("op", &self.op);
+ formatter.field("right", &self.right);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprBlock {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprBlock")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprBlock {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("label", &self.label);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprBreak {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprBreak")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprBreak {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("break_token", &self.break_token);
+ formatter.field("label", &self.label);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprCall {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprCall")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprCall {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("func", &self.func);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("args", &self.args);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprCast {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprCast")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprCast {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("expr", &self.expr);
+ formatter.field("as_token", &self.as_token);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprClosure {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprClosure")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprClosure {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("lifetimes", &self.lifetimes);
+ formatter.field("constness", &self.constness);
+ formatter.field("movability", &self.movability);
+ formatter.field("asyncness", &self.asyncness);
+ formatter.field("capture", &self.capture);
+ formatter.field("or1_token", &self.or1_token);
+ formatter.field("inputs", &self.inputs);
+ formatter.field("or2_token", &self.or2_token);
+ formatter.field("output", &self.output);
+ formatter.field("body", &self.body);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprConst {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprConst")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprConst {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprContinue {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprContinue")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprContinue {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("continue_token", &self.continue_token);
+ formatter.field("label", &self.label);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprField {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprField")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprField {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("base", &self.base);
+ formatter.field("dot_token", &self.dot_token);
+ formatter.field("member", &self.member);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprForLoop {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprForLoop")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprForLoop {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("label", &self.label);
+ formatter.field("for_token", &self.for_token);
+ formatter.field("pat", &self.pat);
+ formatter.field("in_token", &self.in_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("body", &self.body);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprGroup {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprGroup")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprGroup {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("group_token", &self.group_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprIf {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprIf")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprIf {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("if_token", &self.if_token);
+ formatter.field("cond", &self.cond);
+ formatter.field("then_branch", &self.then_branch);
+ formatter.field("else_branch", &self.else_branch);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprIndex {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprIndex")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprIndex {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("expr", &self.expr);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("index", &self.index);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprInfer {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprInfer")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprInfer {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("underscore_token", &self.underscore_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprLet {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprLet")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprLet {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("let_token", &self.let_token);
+ formatter.field("pat", &self.pat);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprLit {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprLit")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprLit {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("lit", &self.lit);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprLoop {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprLoop")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprLoop {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("label", &self.label);
+ formatter.field("loop_token", &self.loop_token);
+ formatter.field("body", &self.body);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprMacro")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("mac", &self.mac);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprMatch {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprMatch")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprMatch {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("match_token", &self.match_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("arms", &self.arms);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprMethodCall {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprMethodCall")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprMethodCall {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("receiver", &self.receiver);
+ formatter.field("dot_token", &self.dot_token);
+ formatter.field("method", &self.method);
+ formatter.field("turbofish", &self.turbofish);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("args", &self.args);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprParen {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprParen")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprParen {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprPath {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprPath")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprPath {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("qself", &self.qself);
+ formatter.field("path", &self.path);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprRange {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprRange")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprRange {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("start", &self.start);
+ formatter.field("limits", &self.limits);
+ formatter.field("end", &self.end);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprReference {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprReference")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprReference {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("and_token", &self.and_token);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprRepeat {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprRepeat")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprRepeat {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.field("len", &self.len);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprReturn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprReturn")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprReturn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("return_token", &self.return_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprStruct {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprStruct")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprStruct {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("qself", &self.qself);
+ formatter.field("path", &self.path);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("fields", &self.fields);
+ formatter.field("dot2_token", &self.dot2_token);
+ formatter.field("rest", &self.rest);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprTry {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprTry")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprTry {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("expr", &self.expr);
+ formatter.field("question_token", &self.question_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprTryBlock {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprTryBlock")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprTryBlock {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("try_token", &self.try_token);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprTuple {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprTuple")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprTuple {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprUnary {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprUnary")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ExprUnary {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("op", &self.op);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprUnsafe {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprUnsafe")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprUnsafe {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("unsafe_token", &self.unsafe_token);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprWhile {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprWhile")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprWhile {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("label", &self.label);
+ formatter.field("while_token", &self.while_token);
+ formatter.field("cond", &self.cond);
+ formatter.field("body", &self.body);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ExprYield {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ExprYield")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ExprYield {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("yield_token", &self.yield_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Field {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Field");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("ident", &self.ident);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FieldMutability {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("FieldMutability::")?;
+ match self {
+ crate::FieldMutability::None => formatter.write_str("None"),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FieldPat {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldPat");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("member", &self.member);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("pat", &self.pat);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FieldValue {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldValue");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("member", &self.member);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("expr", &self.expr);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Fields {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Fields::")?;
+ match self {
+ crate::Fields::Named(v0) => v0.debug(formatter, "Named"),
+ crate::Fields::Unnamed(v0) => v0.debug(formatter, "Unnamed"),
+ crate::Fields::Unit => formatter.write_str("Unit"),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FieldsNamed {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "FieldsNamed")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::FieldsNamed {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("named", &self.named);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FieldsUnnamed {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "FieldsUnnamed")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::FieldsUnnamed {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("unnamed", &self.unnamed);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::File {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("File");
+ formatter.field("shebang", &self.shebang);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("items", &self.items);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::FnArg {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("FnArg::")?;
+ match self {
+ crate::FnArg::Receiver(v0) => {
+ let mut formatter = formatter.debug_tuple("Receiver");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::FnArg::Typed(v0) => {
+ let mut formatter = formatter.debug_tuple("Typed");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ForeignItem {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("ForeignItem::")?;
+ match self {
+ crate::ForeignItem::Fn(v0) => v0.debug(formatter, "Fn"),
+ crate::ForeignItem::Static(v0) => v0.debug(formatter, "Static"),
+ crate::ForeignItem::Type(v0) => v0.debug(formatter, "Type"),
+ crate::ForeignItem::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::ForeignItem::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ForeignItemFn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ForeignItemFn")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ForeignItemFn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("sig", &self.sig);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ForeignItemMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ForeignItemMacro")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ForeignItemMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("mac", &self.mac);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ForeignItemStatic {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ForeignItemStatic")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ForeignItemStatic {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("static_token", &self.static_token);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("ident", &self.ident);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ForeignItemType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ForeignItemType")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ForeignItemType {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("type_token", &self.type_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::GenericArgument {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("GenericArgument::")?;
+ match self {
+ crate::GenericArgument::Lifetime(v0) => {
+ let mut formatter = formatter.debug_tuple("Lifetime");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericArgument::Type(v0) => {
+ let mut formatter = formatter.debug_tuple("Type");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericArgument::Const(v0) => {
+ let mut formatter = formatter.debug_tuple("Const");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericArgument::AssocType(v0) => {
+ let mut formatter = formatter.debug_tuple("AssocType");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericArgument::AssocConst(v0) => {
+ let mut formatter = formatter.debug_tuple("AssocConst");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericArgument::Constraint(v0) => {
+ let mut formatter = formatter.debug_tuple("Constraint");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::GenericParam {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("GenericParam::")?;
+ match self {
+ crate::GenericParam::Lifetime(v0) => {
+ let mut formatter = formatter.debug_tuple("Lifetime");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericParam::Type(v0) => {
+ let mut formatter = formatter.debug_tuple("Type");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::GenericParam::Const(v0) => {
+ let mut formatter = formatter.debug_tuple("Const");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Generics {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Generics");
+ formatter.field("lt_token", &self.lt_token);
+ formatter.field("params", &self.params);
+ formatter.field("gt_token", &self.gt_token);
+ formatter.field("where_clause", &self.where_clause);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplItem {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("ImplItem::")?;
+ match self {
+ crate::ImplItem::Const(v0) => v0.debug(formatter, "Const"),
+ crate::ImplItem::Fn(v0) => v0.debug(formatter, "Fn"),
+ crate::ImplItem::Type(v0) => v0.debug(formatter, "Type"),
+ crate::ImplItem::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::ImplItem::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplItemConst {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ImplItemConst")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ImplItemConst {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("defaultness", &self.defaultness);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplItemFn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ImplItemFn")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ImplItemFn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("defaultness", &self.defaultness);
+ formatter.field("sig", &self.sig);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplItemMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ImplItemMacro")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ImplItemMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("mac", &self.mac);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplItemType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ImplItemType")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ImplItemType {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("defaultness", &self.defaultness);
+ formatter.field("type_token", &self.type_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ImplRestriction {
+ fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
+ match *self {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Index {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Index");
+ formatter.field("index", &self.index);
+ formatter.field("span", &self.span);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Item {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Item::")?;
+ match self {
+ crate::Item::Const(v0) => v0.debug(formatter, "Const"),
+ crate::Item::Enum(v0) => v0.debug(formatter, "Enum"),
+ crate::Item::ExternCrate(v0) => v0.debug(formatter, "ExternCrate"),
+ crate::Item::Fn(v0) => v0.debug(formatter, "Fn"),
+ crate::Item::ForeignMod(v0) => v0.debug(formatter, "ForeignMod"),
+ crate::Item::Impl(v0) => v0.debug(formatter, "Impl"),
+ crate::Item::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::Item::Mod(v0) => v0.debug(formatter, "Mod"),
+ crate::Item::Static(v0) => v0.debug(formatter, "Static"),
+ crate::Item::Struct(v0) => v0.debug(formatter, "Struct"),
+ crate::Item::Trait(v0) => v0.debug(formatter, "Trait"),
+ crate::Item::TraitAlias(v0) => v0.debug(formatter, "TraitAlias"),
+ crate::Item::Type(v0) => v0.debug(formatter, "Type"),
+ crate::Item::Union(v0) => v0.debug(formatter, "Union"),
+ crate::Item::Use(v0) => v0.debug(formatter, "Use"),
+ crate::Item::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemConst {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemConst")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemConst {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemEnum {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemEnum")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemEnum {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("enum_token", &self.enum_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("variants", &self.variants);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemExternCrate {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemExternCrate")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemExternCrate {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("extern_token", &self.extern_token);
+ formatter.field("crate_token", &self.crate_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("rename", &self.rename);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemFn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemFn")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemFn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("sig", &self.sig);
+ formatter.field("block", &self.block);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemForeignMod {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemForeignMod")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemForeignMod {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("abi", &self.abi);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("items", &self.items);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemImpl {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemImpl")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemImpl {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("defaultness", &self.defaultness);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("impl_token", &self.impl_token);
+ formatter.field("generics", &self.generics);
+ formatter.field("trait_", &self.trait_);
+ formatter.field("self_ty", &self.self_ty);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("items", &self.items);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemMacro")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("ident", &self.ident);
+ formatter.field("mac", &self.mac);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemMod {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemMod")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemMod {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("mod_token", &self.mod_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("content", &self.content);
+ formatter.field("semi", &self.semi);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemStatic {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemStatic")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemStatic {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("static_token", &self.static_token);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("ident", &self.ident);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemStruct {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemStruct")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemStruct {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("struct_token", &self.struct_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("fields", &self.fields);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemTrait {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemTrait")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemTrait {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("auto_token", &self.auto_token);
+ formatter.field("restriction", &self.restriction);
+ formatter.field("trait_token", &self.trait_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("supertraits", &self.supertraits);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("items", &self.items);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemTraitAlias {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemTraitAlias")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemTraitAlias {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("trait_token", &self.trait_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemType")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemType {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("type_token", &self.type_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemUnion {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemUnion")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemUnion {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("union_token", &self.union_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("fields", &self.fields);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ItemUse {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ItemUse")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::ItemUse {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("vis", &self.vis);
+ formatter.field("use_token", &self.use_token);
+ formatter.field("leading_colon", &self.leading_colon);
+ formatter.field("tree", &self.tree);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Label {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Label");
+ formatter.field("name", &self.name);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.finish()
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Lifetime {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "Lifetime")
+ }
+}
+impl crate::Lifetime {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("apostrophe", &self.apostrophe);
+ formatter.field("ident", &self.ident);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::LifetimeParam {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("LifetimeParam");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("lifetime", &self.lifetime);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Lit {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Lit::")?;
+ match self {
+ crate::Lit::Str(v0) => v0.debug(formatter, "Str"),
+ crate::Lit::ByteStr(v0) => v0.debug(formatter, "ByteStr"),
+ crate::Lit::CStr(v0) => v0.debug(formatter, "CStr"),
+ crate::Lit::Byte(v0) => v0.debug(formatter, "Byte"),
+ crate::Lit::Char(v0) => v0.debug(formatter, "Char"),
+ crate::Lit::Int(v0) => v0.debug(formatter, "Int"),
+ crate::Lit::Float(v0) => v0.debug(formatter, "Float"),
+ crate::Lit::Bool(v0) => v0.debug(formatter, "Bool"),
+ crate::Lit::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Local {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "Local")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::Local {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("let_token", &self.let_token);
+ formatter.field("pat", &self.pat);
+ formatter.field("init", &self.init);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::LocalInit {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("LocalInit");
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("expr", &self.expr);
+ formatter.field("diverge", &self.diverge);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Macro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Macro");
+ formatter.field("path", &self.path);
+ formatter.field("bang_token", &self.bang_token);
+ formatter.field("delimiter", &self.delimiter);
+ formatter.field("tokens", &self.tokens);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::MacroDelimiter {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("MacroDelimiter::")?;
+ match self {
+ crate::MacroDelimiter::Paren(v0) => {
+ let mut formatter = formatter.debug_tuple("Paren");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::MacroDelimiter::Brace(v0) => {
+ let mut formatter = formatter.debug_tuple("Brace");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::MacroDelimiter::Bracket(v0) => {
+ let mut formatter = formatter.debug_tuple("Bracket");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Member {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Member::")?;
+ match self {
+ crate::Member::Named(v0) => {
+ let mut formatter = formatter.debug_tuple("Named");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::Member::Unnamed(v0) => {
+ let mut formatter = formatter.debug_tuple("Unnamed");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Meta {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Meta::")?;
+ match self {
+ crate::Meta::Path(v0) => v0.debug(formatter, "Path"),
+ crate::Meta::List(v0) => v0.debug(formatter, "List"),
+ crate::Meta::NameValue(v0) => v0.debug(formatter, "NameValue"),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::MetaList {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "MetaList")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::MetaList {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("path", &self.path);
+ formatter.field("delimiter", &self.delimiter);
+ formatter.field("tokens", &self.tokens);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::MetaNameValue {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "MetaNameValue")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::MetaNameValue {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("path", &self.path);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("value", &self.value);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ParenthesizedGenericArguments {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "ParenthesizedGenericArguments")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::ParenthesizedGenericArguments {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("inputs", &self.inputs);
+ formatter.field("output", &self.output);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Pat {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Pat::")?;
+ match self {
+ crate::Pat::Const(v0) => v0.debug(formatter, "Const"),
+ crate::Pat::Ident(v0) => v0.debug(formatter, "Ident"),
+ crate::Pat::Lit(v0) => v0.debug(formatter, "Lit"),
+ crate::Pat::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::Pat::Or(v0) => v0.debug(formatter, "Or"),
+ crate::Pat::Paren(v0) => v0.debug(formatter, "Paren"),
+ crate::Pat::Path(v0) => v0.debug(formatter, "Path"),
+ crate::Pat::Range(v0) => v0.debug(formatter, "Range"),
+ crate::Pat::Reference(v0) => v0.debug(formatter, "Reference"),
+ crate::Pat::Rest(v0) => v0.debug(formatter, "Rest"),
+ crate::Pat::Slice(v0) => v0.debug(formatter, "Slice"),
+ crate::Pat::Struct(v0) => v0.debug(formatter, "Struct"),
+ crate::Pat::Tuple(v0) => v0.debug(formatter, "Tuple"),
+ crate::Pat::TupleStruct(v0) => v0.debug(formatter, "TupleStruct"),
+ crate::Pat::Type(v0) => v0.debug(formatter, "Type"),
+ crate::Pat::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::Pat::Wild(v0) => v0.debug(formatter, "Wild"),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatIdent {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatIdent")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatIdent {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("by_ref", &self.by_ref);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("ident", &self.ident);
+ formatter.field("subpat", &self.subpat);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatOr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatOr")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatOr {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("leading_vert", &self.leading_vert);
+ formatter.field("cases", &self.cases);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatParen {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatParen")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatParen {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("pat", &self.pat);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatReference {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatReference")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatReference {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("and_token", &self.and_token);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("pat", &self.pat);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatRest {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatRest")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatRest {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("dot2_token", &self.dot2_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatSlice {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatSlice")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatSlice {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatStruct {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatStruct")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatStruct {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("qself", &self.qself);
+ formatter.field("path", &self.path);
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("fields", &self.fields);
+ formatter.field("rest", &self.rest);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatTuple {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatTuple")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatTuple {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatTupleStruct {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatTupleStruct")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatTupleStruct {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("qself", &self.qself);
+ formatter.field("path", &self.path);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatType")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatType {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("pat", &self.pat);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PatWild {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "PatWild")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::PatWild {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("underscore_token", &self.underscore_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Path {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "Path")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::Path {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("leading_colon", &self.leading_colon);
+ formatter.field("segments", &self.segments);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PathArguments {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("PathArguments::")?;
+ match self {
+ crate::PathArguments::None => formatter.write_str("None"),
+ crate::PathArguments::AngleBracketed(v0) => {
+ v0.debug(formatter, "AngleBracketed")
+ }
+ crate::PathArguments::Parenthesized(v0) => {
+ v0.debug(formatter, "Parenthesized")
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PathSegment {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PathSegment");
+ formatter.field("ident", &self.ident);
+ formatter.field("arguments", &self.arguments);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PredicateLifetime {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PredicateLifetime");
+ formatter.field("lifetime", &self.lifetime);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::PredicateType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PredicateType");
+ formatter.field("lifetimes", &self.lifetimes);
+ formatter.field("bounded_ty", &self.bounded_ty);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::QSelf {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("QSelf");
+ formatter.field("lt_token", &self.lt_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("position", &self.position);
+ formatter.field("as_token", &self.as_token);
+ formatter.field("gt_token", &self.gt_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::RangeLimits {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("RangeLimits::")?;
+ match self {
+ crate::RangeLimits::HalfOpen(v0) => {
+ let mut formatter = formatter.debug_tuple("HalfOpen");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::RangeLimits::Closed(v0) => {
+ let mut formatter = formatter.debug_tuple("Closed");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Receiver {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Receiver");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("reference", &self.reference);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("self_token", &self.self_token);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::ReturnType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("ReturnType::")?;
+ match self {
+ crate::ReturnType::Default => formatter.write_str("Default"),
+ crate::ReturnType::Type(v0, v1) => {
+ let mut formatter = formatter.debug_tuple("Type");
+ formatter.field(v0);
+ formatter.field(v1);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Signature {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Signature");
+ formatter.field("constness", &self.constness);
+ formatter.field("asyncness", &self.asyncness);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("abi", &self.abi);
+ formatter.field("fn_token", &self.fn_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("inputs", &self.inputs);
+ formatter.field("variadic", &self.variadic);
+ formatter.field("output", &self.output);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::StaticMutability {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("StaticMutability::")?;
+ match self {
+ crate::StaticMutability::Mut(v0) => {
+ let mut formatter = formatter.debug_tuple("Mut");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::StaticMutability::None => formatter.write_str("None"),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Stmt {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Stmt::")?;
+ match self {
+ crate::Stmt::Local(v0) => v0.debug(formatter, "Local"),
+ crate::Stmt::Item(v0) => {
+ let mut formatter = formatter.debug_tuple("Item");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::Stmt::Expr(v0, v1) => {
+ let mut formatter = formatter.debug_tuple("Expr");
+ formatter.field(v0);
+ formatter.field(v1);
+ formatter.finish()
+ }
+ crate::Stmt::Macro(v0) => v0.debug(formatter, "Macro"),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::StmtMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "StmtMacro")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::StmtMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("mac", &self.mac);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitBound {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitBound");
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("modifier", &self.modifier);
+ formatter.field("lifetimes", &self.lifetimes);
+ formatter.field("path", &self.path);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitBoundModifier {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("TraitBoundModifier::")?;
+ match self {
+ crate::TraitBoundModifier::None => formatter.write_str("None"),
+ crate::TraitBoundModifier::Maybe(v0) => {
+ let mut formatter = formatter.debug_tuple("Maybe");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitItem {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("TraitItem::")?;
+ match self {
+ crate::TraitItem::Const(v0) => v0.debug(formatter, "Const"),
+ crate::TraitItem::Fn(v0) => v0.debug(formatter, "Fn"),
+ crate::TraitItem::Type(v0) => v0.debug(formatter, "Type"),
+ crate::TraitItem::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::TraitItem::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitItemConst {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TraitItemConst")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::TraitItemConst {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("ty", &self.ty);
+ formatter.field("default", &self.default);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitItemFn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TraitItemFn")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::TraitItemFn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("sig", &self.sig);
+ formatter.field("default", &self.default);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitItemMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TraitItemMacro")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::TraitItemMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("mac", &self.mac);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TraitItemType {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TraitItemType")
+ }
+}
+#[cfg(feature = "full")]
+impl crate::TraitItemType {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("attrs", &self.attrs);
+ formatter.field("type_token", &self.type_token);
+ formatter.field("ident", &self.ident);
+ formatter.field("generics", &self.generics);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.field("default", &self.default);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Type {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Type::")?;
+ match self {
+ crate::Type::Array(v0) => v0.debug(formatter, "Array"),
+ crate::Type::BareFn(v0) => v0.debug(formatter, "BareFn"),
+ crate::Type::Group(v0) => v0.debug(formatter, "Group"),
+ crate::Type::ImplTrait(v0) => v0.debug(formatter, "ImplTrait"),
+ crate::Type::Infer(v0) => v0.debug(formatter, "Infer"),
+ crate::Type::Macro(v0) => v0.debug(formatter, "Macro"),
+ crate::Type::Never(v0) => v0.debug(formatter, "Never"),
+ crate::Type::Paren(v0) => v0.debug(formatter, "Paren"),
+ crate::Type::Path(v0) => v0.debug(formatter, "Path"),
+ crate::Type::Ptr(v0) => v0.debug(formatter, "Ptr"),
+ crate::Type::Reference(v0) => v0.debug(formatter, "Reference"),
+ crate::Type::Slice(v0) => v0.debug(formatter, "Slice"),
+ crate::Type::TraitObject(v0) => v0.debug(formatter, "TraitObject"),
+ crate::Type::Tuple(v0) => v0.debug(formatter, "Tuple"),
+ crate::Type::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeArray {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeArray")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeArray {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("elem", &self.elem);
+ formatter.field("semi_token", &self.semi_token);
+ formatter.field("len", &self.len);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeBareFn {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeBareFn")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeBareFn {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("lifetimes", &self.lifetimes);
+ formatter.field("unsafety", &self.unsafety);
+ formatter.field("abi", &self.abi);
+ formatter.field("fn_token", &self.fn_token);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("inputs", &self.inputs);
+ formatter.field("variadic", &self.variadic);
+ formatter.field("output", &self.output);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeGroup {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeGroup")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeGroup {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("group_token", &self.group_token);
+ formatter.field("elem", &self.elem);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeImplTrait {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeImplTrait")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeImplTrait {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("impl_token", &self.impl_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeInfer {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeInfer")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeInfer {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("underscore_token", &self.underscore_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeMacro {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeMacro")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeMacro {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("mac", &self.mac);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeNever {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeNever")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeNever {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("bang_token", &self.bang_token);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeParam {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeParam");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("ident", &self.ident);
+ formatter.field("colon_token", &self.colon_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.field("eq_token", &self.eq_token);
+ formatter.field("default", &self.default);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeParamBound {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("TypeParamBound::")?;
+ match self {
+ crate::TypeParamBound::Trait(v0) => {
+ let mut formatter = formatter.debug_tuple("Trait");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::TypeParamBound::Lifetime(v0) => v0.debug(formatter,
"Lifetime"),
+ crate::TypeParamBound::Verbatim(v0) => {
+ let mut formatter = formatter.debug_tuple("Verbatim");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeParen {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeParen")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeParen {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("elem", &self.elem);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypePath {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypePath")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypePath {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("qself", &self.qself);
+ formatter.field("path", &self.path);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypePtr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypePtr")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypePtr {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("star_token", &self.star_token);
+ formatter.field("const_token", &self.const_token);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("elem", &self.elem);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeReference {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeReference")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeReference {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("and_token", &self.and_token);
+ formatter.field("lifetime", &self.lifetime);
+ formatter.field("mutability", &self.mutability);
+ formatter.field("elem", &self.elem);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeSlice {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeSlice")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeSlice {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("bracket_token", &self.bracket_token);
+ formatter.field("elem", &self.elem);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeTraitObject {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeTraitObject")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeTraitObject {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("dyn_token", &self.dyn_token);
+ formatter.field("bounds", &self.bounds);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::TypeTuple {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "TypeTuple")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::TypeTuple {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("elems", &self.elems);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UnOp {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("UnOp::")?;
+ match self {
+ crate::UnOp::Deref(v0) => {
+ let mut formatter = formatter.debug_tuple("Deref");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UnOp::Not(v0) => {
+ let mut formatter = formatter.debug_tuple("Not");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UnOp::Neg(v0) => {
+ let mut formatter = formatter.debug_tuple("Neg");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UseGlob {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseGlob");
+ formatter.field("star_token", &self.star_token);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UseGroup {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseGroup");
+ formatter.field("brace_token", &self.brace_token);
+ formatter.field("items", &self.items);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UseName {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseName");
+ formatter.field("ident", &self.ident);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UsePath {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UsePath");
+ formatter.field("ident", &self.ident);
+ formatter.field("colon2_token", &self.colon2_token);
+ formatter.field("tree", &self.tree);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UseRename {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseRename");
+ formatter.field("ident", &self.ident);
+ formatter.field("as_token", &self.as_token);
+ formatter.field("rename", &self.rename);
+ formatter.finish()
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::UseTree {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("UseTree::")?;
+ match self {
+ crate::UseTree::Path(v0) => {
+ let mut formatter = formatter.debug_tuple("Path");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UseTree::Name(v0) => {
+ let mut formatter = formatter.debug_tuple("Name");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UseTree::Rename(v0) => {
+ let mut formatter = formatter.debug_tuple("Rename");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UseTree::Glob(v0) => {
+ let mut formatter = formatter.debug_tuple("Glob");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::UseTree::Group(v0) => {
+ let mut formatter = formatter.debug_tuple("Group");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Variadic {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Variadic");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("pat", &self.pat);
+ formatter.field("dots", &self.dots);
+ formatter.field("comma", &self.comma);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Variant {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Variant");
+ formatter.field("attrs", &self.attrs);
+ formatter.field("ident", &self.ident);
+ formatter.field("fields", &self.fields);
+ formatter.field("discriminant", &self.discriminant);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::VisRestricted {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "VisRestricted")
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+impl crate::VisRestricted {
+ fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result
{
+ let mut formatter = formatter.debug_struct(name);
+ formatter.field("pub_token", &self.pub_token);
+ formatter.field("paren_token", &self.paren_token);
+ formatter.field("in_token", &self.in_token);
+ formatter.field("path", &self.path);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::Visibility {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Visibility::")?;
+ match self {
+ crate::Visibility::Public(v0) => {
+ let mut formatter = formatter.debug_tuple("Public");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::Visibility::Restricted(v0) => v0.debug(formatter,
"Restricted"),
+ crate::Visibility::Inherited => formatter.write_str("Inherited"),
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::WhereClause {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("WhereClause");
+ formatter.field("where_token", &self.where_token);
+ formatter.field("predicates", &self.predicates);
+ formatter.finish()
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for crate::WherePredicate {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("WherePredicate::")?;
+ match self {
+ crate::WherePredicate::Lifetime(v0) => {
+ let mut formatter = formatter.debug_tuple("Lifetime");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ crate::WherePredicate::Type(v0) => {
+ let mut formatter = formatter.debug_tuple("Type");
+ formatter.field(v0);
+ formatter.finish()
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/eq.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/eq.rs
new file mode 100644
index 0000000000..bbcca47281
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/eq.rs
@@ -0,0 +1,2242 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#[cfg(any(feature = "derive", feature = "full"))]
+use crate::tt::TokenStreamHelper;
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Abi {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Abi {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::AngleBracketedGenericArguments {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::AngleBracketedGenericArguments {
+ fn eq(&self, other: &Self) -> bool {
+ self.colon2_token == other.colon2_token && self.args == other.args
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Arm {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Arm {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat && self.guard ==
other.guard
+ && self.body == other.body && self.comma == other.comma
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::AssocConst {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::AssocConst {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.generics == other.generics
+ && self.value == other.value
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::AssocType {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::AssocType {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.generics == other.generics
+ && self.ty == other.ty
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::AttrStyle {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::AttrStyle {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::AttrStyle::Outer, crate::AttrStyle::Outer) => true,
+ (crate::AttrStyle::Inner(_), crate::AttrStyle::Inner(_)) => true,
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Attribute {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Attribute {
+ fn eq(&self, other: &Self) -> bool {
+ self.style == other.style && self.meta == other.meta
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::BareFnArg {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::BareFnArg {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.name == other.name && self.ty ==
other.ty
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::BareVariadic {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::BareVariadic {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.name == other.name && self.comma ==
other.comma
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::BinOp {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::BinOp {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::BinOp::Add(_), crate::BinOp::Add(_)) => true,
+ (crate::BinOp::Sub(_), crate::BinOp::Sub(_)) => true,
+ (crate::BinOp::Mul(_), crate::BinOp::Mul(_)) => true,
+ (crate::BinOp::Div(_), crate::BinOp::Div(_)) => true,
+ (crate::BinOp::Rem(_), crate::BinOp::Rem(_)) => true,
+ (crate::BinOp::And(_), crate::BinOp::And(_)) => true,
+ (crate::BinOp::Or(_), crate::BinOp::Or(_)) => true,
+ (crate::BinOp::BitXor(_), crate::BinOp::BitXor(_)) => true,
+ (crate::BinOp::BitAnd(_), crate::BinOp::BitAnd(_)) => true,
+ (crate::BinOp::BitOr(_), crate::BinOp::BitOr(_)) => true,
+ (crate::BinOp::Shl(_), crate::BinOp::Shl(_)) => true,
+ (crate::BinOp::Shr(_), crate::BinOp::Shr(_)) => true,
+ (crate::BinOp::Eq(_), crate::BinOp::Eq(_)) => true,
+ (crate::BinOp::Lt(_), crate::BinOp::Lt(_)) => true,
+ (crate::BinOp::Le(_), crate::BinOp::Le(_)) => true,
+ (crate::BinOp::Ne(_), crate::BinOp::Ne(_)) => true,
+ (crate::BinOp::Ge(_), crate::BinOp::Ge(_)) => true,
+ (crate::BinOp::Gt(_), crate::BinOp::Gt(_)) => true,
+ (crate::BinOp::AddAssign(_), crate::BinOp::AddAssign(_)) => true,
+ (crate::BinOp::SubAssign(_), crate::BinOp::SubAssign(_)) => true,
+ (crate::BinOp::MulAssign(_), crate::BinOp::MulAssign(_)) => true,
+ (crate::BinOp::DivAssign(_), crate::BinOp::DivAssign(_)) => true,
+ (crate::BinOp::RemAssign(_), crate::BinOp::RemAssign(_)) => true,
+ (crate::BinOp::BitXorAssign(_), crate::BinOp::BitXorAssign(_)) =>
true,
+ (crate::BinOp::BitAndAssign(_), crate::BinOp::BitAndAssign(_)) =>
true,
+ (crate::BinOp::BitOrAssign(_), crate::BinOp::BitOrAssign(_)) =>
true,
+ (crate::BinOp::ShlAssign(_), crate::BinOp::ShlAssign(_)) => true,
+ (crate::BinOp::ShrAssign(_), crate::BinOp::ShrAssign(_)) => true,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Block {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Block {
+ fn eq(&self, other: &Self) -> bool {
+ self.stmts == other.stmts
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::BoundLifetimes {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::BoundLifetimes {
+ fn eq(&self, other: &Self) -> bool {
+ self.lifetimes == other.lifetimes
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ConstParam {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ConstParam {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident && self.ty ==
other.ty
+ && self.eq_token == other.eq_token && self.default == other.default
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Constraint {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Constraint {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.generics == other.generics
+ && self.bounds == other.bounds
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Data {}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Data {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Data::Struct(self0), crate::Data::Struct(other0)) => self0
== other0,
+ (crate::Data::Enum(self0), crate::Data::Enum(other0)) => self0 ==
other0,
+ (crate::Data::Union(self0), crate::Data::Union(other0)) => self0
== other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::DataEnum {}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::DataEnum {
+ fn eq(&self, other: &Self) -> bool {
+ self.variants == other.variants
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::DataStruct {}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::DataStruct {
+ fn eq(&self, other: &Self) -> bool {
+ self.fields == other.fields && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::DataUnion {}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::DataUnion {
+ fn eq(&self, other: &Self) -> bool {
+ self.fields == other.fields
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::DeriveInput {}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::DeriveInput {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.data == other.data
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Expr {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Expr {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ #[cfg(feature = "full")]
+ (crate::Expr::Array(self0), crate::Expr::Array(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Assign(self0), crate::Expr::Assign(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Async(self0), crate::Expr::Async(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Await(self0), crate::Expr::Await(other0)) => self0
== other0,
+ (crate::Expr::Binary(self0), crate::Expr::Binary(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Block(self0), crate::Expr::Block(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Break(self0), crate::Expr::Break(other0)) => self0
== other0,
+ (crate::Expr::Call(self0), crate::Expr::Call(other0)) => self0 ==
other0,
+ (crate::Expr::Cast(self0), crate::Expr::Cast(other0)) => self0 ==
other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Closure(self0), crate::Expr::Closure(other0)) => {
+ self0 == other0
+ }
+ #[cfg(feature = "full")]
+ (crate::Expr::Const(self0), crate::Expr::Const(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Continue(self0), crate::Expr::Continue(other0)) => {
+ self0 == other0
+ }
+ (crate::Expr::Field(self0), crate::Expr::Field(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::ForLoop(self0), crate::Expr::ForLoop(other0)) => {
+ self0 == other0
+ }
+ (crate::Expr::Group(self0), crate::Expr::Group(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::If(self0), crate::Expr::If(other0)) => self0 ==
other0,
+ (crate::Expr::Index(self0), crate::Expr::Index(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Infer(self0), crate::Expr::Infer(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Let(self0), crate::Expr::Let(other0)) => self0 ==
other0,
+ (crate::Expr::Lit(self0), crate::Expr::Lit(other0)) => self0 ==
other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Loop(self0), crate::Expr::Loop(other0)) => self0 ==
other0,
+ (crate::Expr::Macro(self0), crate::Expr::Macro(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Match(self0), crate::Expr::Match(other0)) => self0
== other0,
+ (crate::Expr::MethodCall(self0), crate::Expr::MethodCall(other0))
=> {
+ self0 == other0
+ }
+ (crate::Expr::Paren(self0), crate::Expr::Paren(other0)) => self0
== other0,
+ (crate::Expr::Path(self0), crate::Expr::Path(other0)) => self0 ==
other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Range(self0), crate::Expr::Range(other0)) => self0
== other0,
+ (crate::Expr::Reference(self0), crate::Expr::Reference(other0)) =>
{
+ self0 == other0
+ }
+ #[cfg(feature = "full")]
+ (crate::Expr::Repeat(self0), crate::Expr::Repeat(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Return(self0), crate::Expr::Return(other0)) => self0
== other0,
+ (crate::Expr::Struct(self0), crate::Expr::Struct(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Try(self0), crate::Expr::Try(other0)) => self0 ==
other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::TryBlock(self0), crate::Expr::TryBlock(other0)) => {
+ self0 == other0
+ }
+ #[cfg(feature = "full")]
+ (crate::Expr::Tuple(self0), crate::Expr::Tuple(other0)) => self0
== other0,
+ (crate::Expr::Unary(self0), crate::Expr::Unary(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Unsafe(self0), crate::Expr::Unsafe(other0)) => self0
== other0,
+ (crate::Expr::Verbatim(self0), crate::Expr::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ #[cfg(feature = "full")]
+ (crate::Expr::While(self0), crate::Expr::While(other0)) => self0
== other0,
+ #[cfg(feature = "full")]
+ (crate::Expr::Yield(self0), crate::Expr::Yield(other0)) => self0
== other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprArray {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprArray {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.elems == other.elems
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprAssign {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprAssign {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.left == other.left && self.right ==
other.right
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprAsync {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprAsync {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.capture == other.capture
+ && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprAwait {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprAwait {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.base == other.base
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprBinary {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprBinary {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.left == other.left && self.op ==
other.op
+ && self.right == other.right
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprBlock {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprBlock {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label
+ && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprBreak {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprBreak {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label && self.expr ==
other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprCall {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprCall {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.func == other.func && self.args ==
other.args
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprCast {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprCast {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr && self.ty ==
other.ty
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprClosure {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprClosure {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.lifetimes == other.lifetimes
+ && self.constness == other.constness && self.movability ==
other.movability
+ && self.asyncness == other.asyncness && self.capture ==
other.capture
+ && self.inputs == other.inputs && self.output == other.output
+ && self.body == other.body
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprConst {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprConst {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprContinue {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprContinue {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprField {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprField {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.base == other.base
+ && self.member == other.member
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprForLoop {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprForLoop {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label && self.pat ==
other.pat
+ && self.expr == other.expr && self.body == other.body
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprGroup {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprGroup {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprIf {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprIf {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.cond == other.cond
+ && self.then_branch == other.then_branch
+ && self.else_branch == other.else_branch
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprIndex {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprIndex {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr && self.index ==
other.index
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprInfer {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprInfer {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprLet {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprLet {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat && self.expr ==
other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprLit {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprLit {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.lit == other.lit
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprLoop {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprLoop {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label && self.body ==
other.body
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprMacro {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mac == other.mac
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprMatch {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprMatch {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr && self.arms ==
other.arms
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprMethodCall {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprMethodCall {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.receiver == other.receiver
+ && self.method == other.method && self.turbofish == other.turbofish
+ && self.args == other.args
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprParen {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprParen {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprPath {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprPath {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.qself == other.qself && self.path ==
other.path
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprRange {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprRange {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.start == other.start
+ && self.limits == other.limits && self.end == other.end
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprReference {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprReference {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mutability == other.mutability
+ && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprRepeat {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprRepeat {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr && self.len ==
other.len
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprReturn {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprReturn {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprStruct {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprStruct {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.qself == other.qself && self.path ==
other.path
+ && self.fields == other.fields && self.dot2_token ==
other.dot2_token
+ && self.rest == other.rest
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprTry {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprTry {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprTryBlock {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprTryBlock {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprTuple {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprTuple {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.elems == other.elems
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprUnary {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprUnary {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.op == other.op && self.expr ==
other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprUnsafe {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprUnsafe {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprWhile {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprWhile {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.label == other.label && self.cond ==
other.cond
+ && self.body == other.body
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ExprYield {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ExprYield {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.expr == other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Field {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Field {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.mutability == other.mutability && self.ident == other.ident
+ && self.colon_token == other.colon_token && self.ty == other.ty
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FieldMutability {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FieldMutability {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::FieldMutability::None, crate::FieldMutability::None) =>
true,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FieldPat {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FieldPat {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.member == other.member
+ && self.colon_token == other.colon_token && self.pat == other.pat
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FieldValue {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FieldValue {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.member == other.member
+ && self.colon_token == other.colon_token && self.expr == other.expr
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Fields {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Fields {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Fields::Named(self0), crate::Fields::Named(other0)) => {
+ self0 == other0
+ }
+ (crate::Fields::Unnamed(self0), crate::Fields::Unnamed(other0)) =>
{
+ self0 == other0
+ }
+ (crate::Fields::Unit, crate::Fields::Unit) => true,
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FieldsNamed {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FieldsNamed {
+ fn eq(&self, other: &Self) -> bool {
+ self.named == other.named
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FieldsUnnamed {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FieldsUnnamed {
+ fn eq(&self, other: &Self) -> bool {
+ self.unnamed == other.unnamed
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::File {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::File {
+ fn eq(&self, other: &Self) -> bool {
+ self.shebang == other.shebang && self.attrs == other.attrs
+ && self.items == other.items
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::FnArg {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::FnArg {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::FnArg::Receiver(self0), crate::FnArg::Receiver(other0)) =>
{
+ self0 == other0
+ }
+ (crate::FnArg::Typed(self0), crate::FnArg::Typed(other0)) => self0
== other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ForeignItem {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ForeignItem {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::ForeignItem::Fn(self0), crate::ForeignItem::Fn(other0)) =>
{
+ self0 == other0
+ }
+ (crate::ForeignItem::Static(self0),
crate::ForeignItem::Static(other0)) => {
+ self0 == other0
+ }
+ (crate::ForeignItem::Type(self0),
crate::ForeignItem::Type(other0)) => {
+ self0 == other0
+ }
+ (crate::ForeignItem::Macro(self0),
crate::ForeignItem::Macro(other0)) => {
+ self0 == other0
+ }
+ (
+ crate::ForeignItem::Verbatim(self0),
+ crate::ForeignItem::Verbatim(other0),
+ ) => TokenStreamHelper(self0) == TokenStreamHelper(other0),
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ForeignItemFn {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ForeignItemFn {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.sig ==
other.sig
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ForeignItemMacro {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ForeignItemMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mac == other.mac
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ForeignItemStatic {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ForeignItemStatic {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.mutability == other.mutability && self.ident == other.ident
+ && self.ty == other.ty
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ForeignItemType {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ForeignItemType {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::GenericArgument {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::GenericArgument {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (
+ crate::GenericArgument::Lifetime(self0),
+ crate::GenericArgument::Lifetime(other0),
+ ) => self0 == other0,
+ (
+ crate::GenericArgument::Type(self0),
+ crate::GenericArgument::Type(other0),
+ ) => self0 == other0,
+ (
+ crate::GenericArgument::Const(self0),
+ crate::GenericArgument::Const(other0),
+ ) => self0 == other0,
+ (
+ crate::GenericArgument::AssocType(self0),
+ crate::GenericArgument::AssocType(other0),
+ ) => self0 == other0,
+ (
+ crate::GenericArgument::AssocConst(self0),
+ crate::GenericArgument::AssocConst(other0),
+ ) => self0 == other0,
+ (
+ crate::GenericArgument::Constraint(self0),
+ crate::GenericArgument::Constraint(other0),
+ ) => self0 == other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::GenericParam {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::GenericParam {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (
+ crate::GenericParam::Lifetime(self0),
+ crate::GenericParam::Lifetime(other0),
+ ) => self0 == other0,
+ (crate::GenericParam::Type(self0),
crate::GenericParam::Type(other0)) => {
+ self0 == other0
+ }
+ (crate::GenericParam::Const(self0),
crate::GenericParam::Const(other0)) => {
+ self0 == other0
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Generics {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Generics {
+ fn eq(&self, other: &Self) -> bool {
+ self.lt_token == other.lt_token && self.params == other.params
+ && self.gt_token == other.gt_token && self.where_clause ==
other.where_clause
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplItem {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplItem {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::ImplItem::Const(self0), crate::ImplItem::Const(other0)) =>
{
+ self0 == other0
+ }
+ (crate::ImplItem::Fn(self0), crate::ImplItem::Fn(other0)) => self0
== other0,
+ (crate::ImplItem::Type(self0), crate::ImplItem::Type(other0)) => {
+ self0 == other0
+ }
+ (crate::ImplItem::Macro(self0), crate::ImplItem::Macro(other0)) =>
{
+ self0 == other0
+ }
+ (crate::ImplItem::Verbatim(self0),
crate::ImplItem::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplItemConst {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplItemConst {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.defaultness == other.defaultness && self.ident ==
other.ident
+ && self.generics == other.generics && self.ty == other.ty
+ && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplItemFn {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplItemFn {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.defaultness == other.defaultness && self.sig == other.sig
+ && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplItemMacro {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplItemMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mac == other.mac
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplItemType {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplItemType {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.defaultness == other.defaultness && self.ident ==
other.ident
+ && self.generics == other.generics && self.ty == other.ty
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ImplRestriction {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ImplRestriction {
+ fn eq(&self, _other: &Self) -> bool {
+ match *self {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Item {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Item {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Item::Const(self0), crate::Item::Const(other0)) => self0
== other0,
+ (crate::Item::Enum(self0), crate::Item::Enum(other0)) => self0 ==
other0,
+ (crate::Item::ExternCrate(self0),
crate::Item::ExternCrate(other0)) => {
+ self0 == other0
+ }
+ (crate::Item::Fn(self0), crate::Item::Fn(other0)) => self0 ==
other0,
+ (crate::Item::ForeignMod(self0), crate::Item::ForeignMod(other0))
=> {
+ self0 == other0
+ }
+ (crate::Item::Impl(self0), crate::Item::Impl(other0)) => self0 ==
other0,
+ (crate::Item::Macro(self0), crate::Item::Macro(other0)) => self0
== other0,
+ (crate::Item::Mod(self0), crate::Item::Mod(other0)) => self0 ==
other0,
+ (crate::Item::Static(self0), crate::Item::Static(other0)) => self0
== other0,
+ (crate::Item::Struct(self0), crate::Item::Struct(other0)) => self0
== other0,
+ (crate::Item::Trait(self0), crate::Item::Trait(other0)) => self0
== other0,
+ (crate::Item::TraitAlias(self0), crate::Item::TraitAlias(other0))
=> {
+ self0 == other0
+ }
+ (crate::Item::Type(self0), crate::Item::Type(other0)) => self0 ==
other0,
+ (crate::Item::Union(self0), crate::Item::Union(other0)) => self0
== other0,
+ (crate::Item::Use(self0), crate::Item::Use(other0)) => self0 ==
other0,
+ (crate::Item::Verbatim(self0), crate::Item::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemConst {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemConst {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.ty == other.ty
+ && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemEnum {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemEnum {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.variants ==
other.variants
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemExternCrate {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemExternCrate {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.rename == other.rename
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemFn {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemFn {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.sig ==
other.sig
+ && self.block == other.block
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemForeignMod {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemForeignMod {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.unsafety == other.unsafety
+ && self.abi == other.abi && self.items == other.items
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemImpl {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemImpl {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.defaultness == other.defaultness
+ && self.unsafety == other.unsafety && self.generics ==
other.generics
+ && self.trait_ == other.trait_ && self.self_ty == other.self_ty
+ && self.items == other.items
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemMacro {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident && self.mac ==
other.mac
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemMod {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemMod {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.unsafety == other.unsafety && self.ident == other.ident
+ && self.content == other.content && self.semi == other.semi
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemStatic {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemStatic {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.mutability == other.mutability && self.ident == other.ident
+ && self.ty == other.ty && self.expr == other.expr
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemStruct {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemStruct {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.fields == other.fields
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemTrait {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemTrait {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.unsafety == other.unsafety && self.auto_token ==
other.auto_token
+ && self.restriction == other.restriction && self.ident ==
other.ident
+ && self.generics == other.generics && self.colon_token ==
other.colon_token
+ && self.supertraits == other.supertraits && self.items ==
other.items
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemTraitAlias {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemTraitAlias {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.bounds == other.bounds
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemType {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemType {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.ty == other.ty
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemUnion {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemUnion {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis && self.ident ==
other.ident
+ && self.generics == other.generics && self.fields == other.fields
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ItemUse {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ItemUse {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.vis == other.vis
+ && self.leading_colon == other.leading_colon && self.tree ==
other.tree
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Label {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Label {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LifetimeParam {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::LifetimeParam {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.lifetime == other.lifetime
+ && self.colon_token == other.colon_token && self.bounds ==
other.bounds
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Lit {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Lit {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Lit::Str(self0), crate::Lit::Str(other0)) => self0 ==
other0,
+ (crate::Lit::ByteStr(self0), crate::Lit::ByteStr(other0)) => self0
== other0,
+ (crate::Lit::CStr(self0), crate::Lit::CStr(other0)) => self0 ==
other0,
+ (crate::Lit::Byte(self0), crate::Lit::Byte(other0)) => self0 ==
other0,
+ (crate::Lit::Char(self0), crate::Lit::Char(other0)) => self0 ==
other0,
+ (crate::Lit::Int(self0), crate::Lit::Int(other0)) => self0 ==
other0,
+ (crate::Lit::Float(self0), crate::Lit::Float(other0)) => self0 ==
other0,
+ (crate::Lit::Bool(self0), crate::Lit::Bool(other0)) => self0 ==
other0,
+ (crate::Lit::Verbatim(self0), crate::Lit::Verbatim(other0)) => {
+ self0.to_string() == other0.to_string()
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitBool {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::LitBool {
+ fn eq(&self, other: &Self) -> bool {
+ self.value == other.value
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitByte {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitByteStr {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitCStr {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitChar {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitFloat {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitInt {}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LitStr {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Local {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Local {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat && self.init ==
other.init
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::LocalInit {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::LocalInit {
+ fn eq(&self, other: &Self) -> bool {
+ self.expr == other.expr && self.diverge == other.diverge
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Macro {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Macro {
+ fn eq(&self, other: &Self) -> bool {
+ self.path == other.path && self.delimiter == other.delimiter
+ && TokenStreamHelper(&self.tokens) ==
TokenStreamHelper(&other.tokens)
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::MacroDelimiter {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::MacroDelimiter {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::MacroDelimiter::Paren(_), crate::MacroDelimiter::Paren(_))
=> true,
+ (crate::MacroDelimiter::Brace(_), crate::MacroDelimiter::Brace(_))
=> true,
+ (crate::MacroDelimiter::Bracket(_),
crate::MacroDelimiter::Bracket(_)) => {
+ true
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Meta {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Meta {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Meta::Path(self0), crate::Meta::Path(other0)) => self0 ==
other0,
+ (crate::Meta::List(self0), crate::Meta::List(other0)) => self0 ==
other0,
+ (crate::Meta::NameValue(self0), crate::Meta::NameValue(other0)) =>
{
+ self0 == other0
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::MetaList {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::MetaList {
+ fn eq(&self, other: &Self) -> bool {
+ self.path == other.path && self.delimiter == other.delimiter
+ && TokenStreamHelper(&self.tokens) ==
TokenStreamHelper(&other.tokens)
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::MetaNameValue {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::MetaNameValue {
+ fn eq(&self, other: &Self) -> bool {
+ self.path == other.path && self.value == other.value
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ParenthesizedGenericArguments {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ParenthesizedGenericArguments {
+ fn eq(&self, other: &Self) -> bool {
+ self.inputs == other.inputs && self.output == other.output
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Pat {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Pat {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Pat::Const(self0), crate::Pat::Const(other0)) => self0 ==
other0,
+ (crate::Pat::Ident(self0), crate::Pat::Ident(other0)) => self0 ==
other0,
+ (crate::Pat::Lit(self0), crate::Pat::Lit(other0)) => self0 ==
other0,
+ (crate::Pat::Macro(self0), crate::Pat::Macro(other0)) => self0 ==
other0,
+ (crate::Pat::Or(self0), crate::Pat::Or(other0)) => self0 == other0,
+ (crate::Pat::Paren(self0), crate::Pat::Paren(other0)) => self0 ==
other0,
+ (crate::Pat::Path(self0), crate::Pat::Path(other0)) => self0 ==
other0,
+ (crate::Pat::Range(self0), crate::Pat::Range(other0)) => self0 ==
other0,
+ (crate::Pat::Reference(self0), crate::Pat::Reference(other0)) => {
+ self0 == other0
+ }
+ (crate::Pat::Rest(self0), crate::Pat::Rest(other0)) => self0 ==
other0,
+ (crate::Pat::Slice(self0), crate::Pat::Slice(other0)) => self0 ==
other0,
+ (crate::Pat::Struct(self0), crate::Pat::Struct(other0)) => self0
== other0,
+ (crate::Pat::Tuple(self0), crate::Pat::Tuple(other0)) => self0 ==
other0,
+ (crate::Pat::TupleStruct(self0), crate::Pat::TupleStruct(other0))
=> {
+ self0 == other0
+ }
+ (crate::Pat::Type(self0), crate::Pat::Type(other0)) => self0 ==
other0,
+ (crate::Pat::Verbatim(self0), crate::Pat::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ (crate::Pat::Wild(self0), crate::Pat::Wild(other0)) => self0 ==
other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatIdent {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatIdent {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.by_ref == other.by_ref
+ && self.mutability == other.mutability && self.ident == other.ident
+ && self.subpat == other.subpat
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatOr {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatOr {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.leading_vert == other.leading_vert
+ && self.cases == other.cases
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatParen {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatParen {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatReference {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatReference {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mutability == other.mutability
+ && self.pat == other.pat
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatRest {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatRest {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatSlice {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatSlice {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.elems == other.elems
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatStruct {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatStruct {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.qself == other.qself && self.path ==
other.path
+ && self.fields == other.fields && self.rest == other.rest
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatTuple {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatTuple {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.elems == other.elems
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatTupleStruct {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatTupleStruct {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.qself == other.qself && self.path ==
other.path
+ && self.elems == other.elems
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatType {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatType {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat && self.ty ==
other.ty
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PatWild {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PatWild {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Path {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Path {
+ fn eq(&self, other: &Self) -> bool {
+ self.leading_colon == other.leading_colon && self.segments ==
other.segments
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PathArguments {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PathArguments {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::PathArguments::None, crate::PathArguments::None) => true,
+ (
+ crate::PathArguments::AngleBracketed(self0),
+ crate::PathArguments::AngleBracketed(other0),
+ ) => self0 == other0,
+ (
+ crate::PathArguments::Parenthesized(self0),
+ crate::PathArguments::Parenthesized(other0),
+ ) => self0 == other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PathSegment {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PathSegment {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.arguments == other.arguments
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PredicateLifetime {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PredicateLifetime {
+ fn eq(&self, other: &Self) -> bool {
+ self.lifetime == other.lifetime && self.bounds == other.bounds
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::PredicateType {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::PredicateType {
+ fn eq(&self, other: &Self) -> bool {
+ self.lifetimes == other.lifetimes && self.bounded_ty ==
other.bounded_ty
+ && self.bounds == other.bounds
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::QSelf {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::QSelf {
+ fn eq(&self, other: &Self) -> bool {
+ self.ty == other.ty && self.position == other.position
+ && self.as_token == other.as_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::RangeLimits {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::RangeLimits {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::RangeLimits::HalfOpen(_), crate::RangeLimits::HalfOpen(_))
=> true,
+ (crate::RangeLimits::Closed(_), crate::RangeLimits::Closed(_)) =>
true,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Receiver {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Receiver {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.reference == other.reference
+ && self.mutability == other.mutability
+ && self.colon_token == other.colon_token && self.ty == other.ty
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::ReturnType {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::ReturnType {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::ReturnType::Default, crate::ReturnType::Default) => true,
+ (crate::ReturnType::Type(_, self1), crate::ReturnType::Type(_,
other1)) => {
+ self1 == other1
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Signature {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Signature {
+ fn eq(&self, other: &Self) -> bool {
+ self.constness == other.constness && self.asyncness == other.asyncness
+ && self.unsafety == other.unsafety && self.abi == other.abi
+ && self.ident == other.ident && self.generics == other.generics
+ && self.inputs == other.inputs && self.variadic == other.variadic
+ && self.output == other.output
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::StaticMutability {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::StaticMutability {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::StaticMutability::Mut(_), crate::StaticMutability::Mut(_))
=> true,
+ (crate::StaticMutability::None, crate::StaticMutability::None) =>
true,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Stmt {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Stmt {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Stmt::Local(self0), crate::Stmt::Local(other0)) => self0
== other0,
+ (crate::Stmt::Item(self0), crate::Stmt::Item(other0)) => self0 ==
other0,
+ (crate::Stmt::Expr(self0, self1), crate::Stmt::Expr(other0,
other1)) => {
+ self0 == other0 && self1 == other1
+ }
+ (crate::Stmt::Macro(self0), crate::Stmt::Macro(other0)) => self0
== other0,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::StmtMacro {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::StmtMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mac == other.mac
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitBound {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitBound {
+ fn eq(&self, other: &Self) -> bool {
+ self.paren_token == other.paren_token && self.modifier ==
other.modifier
+ && self.lifetimes == other.lifetimes && self.path == other.path
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitBoundModifier {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitBoundModifier {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::TraitBoundModifier::None, crate::TraitBoundModifier::None)
=> true,
+ (
+ crate::TraitBoundModifier::Maybe(_),
+ crate::TraitBoundModifier::Maybe(_),
+ ) => true,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitItem {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitItem {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::TraitItem::Const(self0), crate::TraitItem::Const(other0))
=> {
+ self0 == other0
+ }
+ (crate::TraitItem::Fn(self0), crate::TraitItem::Fn(other0)) => {
+ self0 == other0
+ }
+ (crate::TraitItem::Type(self0), crate::TraitItem::Type(other0)) =>
{
+ self0 == other0
+ }
+ (crate::TraitItem::Macro(self0), crate::TraitItem::Macro(other0))
=> {
+ self0 == other0
+ }
+ (crate::TraitItem::Verbatim(self0),
crate::TraitItem::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitItemConst {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitItemConst {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident
+ && self.generics == other.generics && self.ty == other.ty
+ && self.default == other.default
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitItemFn {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitItemFn {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.sig == other.sig
+ && self.default == other.default && self.semi_token ==
other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitItemMacro {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitItemMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.mac == other.mac
+ && self.semi_token == other.semi_token
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TraitItemType {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TraitItemType {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident
+ && self.generics == other.generics && self.colon_token ==
other.colon_token
+ && self.bounds == other.bounds && self.default == other.default
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Type {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Type {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Type::Array(self0), crate::Type::Array(other0)) => self0
== other0,
+ (crate::Type::BareFn(self0), crate::Type::BareFn(other0)) => self0
== other0,
+ (crate::Type::Group(self0), crate::Type::Group(other0)) => self0
== other0,
+ (crate::Type::ImplTrait(self0), crate::Type::ImplTrait(other0)) =>
{
+ self0 == other0
+ }
+ (crate::Type::Infer(self0), crate::Type::Infer(other0)) => self0
== other0,
+ (crate::Type::Macro(self0), crate::Type::Macro(other0)) => self0
== other0,
+ (crate::Type::Never(self0), crate::Type::Never(other0)) => self0
== other0,
+ (crate::Type::Paren(self0), crate::Type::Paren(other0)) => self0
== other0,
+ (crate::Type::Path(self0), crate::Type::Path(other0)) => self0 ==
other0,
+ (crate::Type::Ptr(self0), crate::Type::Ptr(other0)) => self0 ==
other0,
+ (crate::Type::Reference(self0), crate::Type::Reference(other0)) =>
{
+ self0 == other0
+ }
+ (crate::Type::Slice(self0), crate::Type::Slice(other0)) => self0
== other0,
+ (crate::Type::TraitObject(self0),
crate::Type::TraitObject(other0)) => {
+ self0 == other0
+ }
+ (crate::Type::Tuple(self0), crate::Type::Tuple(other0)) => self0
== other0,
+ (crate::Type::Verbatim(self0), crate::Type::Verbatim(other0)) => {
+ TokenStreamHelper(self0) == TokenStreamHelper(other0)
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeArray {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeArray {
+ fn eq(&self, other: &Self) -> bool {
+ self.elem == other.elem && self.len == other.len
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeBareFn {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeBareFn {
+ fn eq(&self, other: &Self) -> bool {
+ self.lifetimes == other.lifetimes && self.unsafety == other.unsafety
+ && self.abi == other.abi && self.inputs == other.inputs
+ && self.variadic == other.variadic && self.output == other.output
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeGroup {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeGroup {
+ fn eq(&self, other: &Self) -> bool {
+ self.elem == other.elem
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeImplTrait {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeImplTrait {
+ fn eq(&self, other: &Self) -> bool {
+ self.bounds == other.bounds
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeInfer {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeInfer {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeMacro {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.mac == other.mac
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeNever {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeNever {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeParam {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeParam {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident
+ && self.colon_token == other.colon_token && self.bounds ==
other.bounds
+ && self.eq_token == other.eq_token && self.default == other.default
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeParamBound {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeParamBound {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (
+ crate::TypeParamBound::Trait(self0),
+ crate::TypeParamBound::Trait(other0),
+ ) => self0 == other0,
+ (
+ crate::TypeParamBound::Lifetime(self0),
+ crate::TypeParamBound::Lifetime(other0),
+ ) => self0 == other0,
+ (
+ crate::TypeParamBound::Verbatim(self0),
+ crate::TypeParamBound::Verbatim(other0),
+ ) => TokenStreamHelper(self0) == TokenStreamHelper(other0),
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeParen {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeParen {
+ fn eq(&self, other: &Self) -> bool {
+ self.elem == other.elem
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypePath {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypePath {
+ fn eq(&self, other: &Self) -> bool {
+ self.qself == other.qself && self.path == other.path
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypePtr {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypePtr {
+ fn eq(&self, other: &Self) -> bool {
+ self.const_token == other.const_token && self.mutability ==
other.mutability
+ && self.elem == other.elem
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeReference {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeReference {
+ fn eq(&self, other: &Self) -> bool {
+ self.lifetime == other.lifetime && self.mutability == other.mutability
+ && self.elem == other.elem
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeSlice {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeSlice {
+ fn eq(&self, other: &Self) -> bool {
+ self.elem == other.elem
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeTraitObject {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeTraitObject {
+ fn eq(&self, other: &Self) -> bool {
+ self.dyn_token == other.dyn_token && self.bounds == other.bounds
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::TypeTuple {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::TypeTuple {
+ fn eq(&self, other: &Self) -> bool {
+ self.elems == other.elems
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UnOp {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UnOp {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::UnOp::Deref(_), crate::UnOp::Deref(_)) => true,
+ (crate::UnOp::Not(_), crate::UnOp::Not(_)) => true,
+ (crate::UnOp::Neg(_), crate::UnOp::Neg(_)) => true,
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UseGlob {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UseGlob {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UseGroup {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UseGroup {
+ fn eq(&self, other: &Self) -> bool {
+ self.items == other.items
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UseName {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UseName {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UsePath {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UsePath {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.tree == other.tree
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UseRename {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UseRename {
+ fn eq(&self, other: &Self) -> bool {
+ self.ident == other.ident && self.rename == other.rename
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::UseTree {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::UseTree {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::UseTree::Path(self0), crate::UseTree::Path(other0)) => {
+ self0 == other0
+ }
+ (crate::UseTree::Name(self0), crate::UseTree::Name(other0)) => {
+ self0 == other0
+ }
+ (crate::UseTree::Rename(self0), crate::UseTree::Rename(other0)) =>
{
+ self0 == other0
+ }
+ (crate::UseTree::Glob(self0), crate::UseTree::Glob(other0)) => {
+ self0 == other0
+ }
+ (crate::UseTree::Group(self0), crate::UseTree::Group(other0)) => {
+ self0 == other0
+ }
+ _ => false,
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Variadic {}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Variadic {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.pat == other.pat && self.comma ==
other.comma
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Variant {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Variant {
+ fn eq(&self, other: &Self) -> bool {
+ self.attrs == other.attrs && self.ident == other.ident
+ && self.fields == other.fields && self.discriminant ==
other.discriminant
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::VisRestricted {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::VisRestricted {
+ fn eq(&self, other: &Self) -> bool {
+ self.in_token == other.in_token && self.path == other.path
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::Visibility {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::Visibility {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (crate::Visibility::Public(_), crate::Visibility::Public(_)) =>
true,
+ (
+ crate::Visibility::Restricted(self0),
+ crate::Visibility::Restricted(other0),
+ ) => self0 == other0,
+ (crate::Visibility::Inherited, crate::Visibility::Inherited) =>
true,
+ _ => false,
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::WhereClause {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::WhereClause {
+ fn eq(&self, other: &Self) -> bool {
+ self.predicates == other.predicates
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for crate::WherePredicate {}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for crate::WherePredicate {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (
+ crate::WherePredicate::Lifetime(self0),
+ crate::WherePredicate::Lifetime(other0),
+ ) => self0 == other0,
+ (crate::WherePredicate::Type(self0),
crate::WherePredicate::Type(other0)) => {
+ self0 == other0
+ }
+ _ => false,
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/fold.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/fold.rs
new file mode 100644
index 0000000000..e5662f5834
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/fold.rs
@@ -0,0 +1,3779 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(unreachable_code, unused_variables)]
+#![allow(
+ clippy::match_wildcard_for_single_variants,
+ clippy::needless_match,
+ clippy::needless_pass_by_ref_mut,
+)]
+#[cfg(feature = "full")]
+macro_rules! full {
+ ($e:expr) => {
+ $e
+ };
+}
+#[cfg(all(feature = "derive", not(feature = "full")))]
+macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+}
+/// Syntax tree traversal to transform the nodes of an owned syntax tree.
+///
+/// See the [module documentation] for details.
+///
+/// [module documentation]: self
+pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_abi(&mut self, i: crate::Abi) -> crate::Abi {
+ fold_abi(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_angle_bracketed_generic_arguments(
+ &mut self,
+ i: crate::AngleBracketedGenericArguments,
+ ) -> crate::AngleBracketedGenericArguments {
+ fold_angle_bracketed_generic_arguments(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_arm(&mut self, i: crate::Arm) -> crate::Arm {
+ fold_arm(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_assoc_const(&mut self, i: crate::AssocConst) -> crate::AssocConst {
+ fold_assoc_const(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_assoc_type(&mut self, i: crate::AssocType) -> crate::AssocType {
+ fold_assoc_type(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_attr_style(&mut self, i: crate::AttrStyle) -> crate::AttrStyle {
+ fold_attr_style(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_attribute(&mut self, i: crate::Attribute) -> crate::Attribute {
+ fold_attribute(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_bare_fn_arg(&mut self, i: crate::BareFnArg) -> crate::BareFnArg {
+ fold_bare_fn_arg(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_bare_variadic(&mut self, i: crate::BareVariadic) ->
crate::BareVariadic {
+ fold_bare_variadic(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_bin_op(&mut self, i: crate::BinOp) -> crate::BinOp {
+ fold_bin_op(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_block(&mut self, i: crate::Block) -> crate::Block {
+ fold_block(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_bound_lifetimes(
+ &mut self,
+ i: crate::BoundLifetimes,
+ ) -> crate::BoundLifetimes {
+ fold_bound_lifetimes(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_const_param(&mut self, i: crate::ConstParam) -> crate::ConstParam {
+ fold_const_param(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_constraint(&mut self, i: crate::Constraint) -> crate::Constraint {
+ fold_constraint(self, i)
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn fold_data(&mut self, i: crate::Data) -> crate::Data {
+ fold_data(self, i)
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn fold_data_enum(&mut self, i: crate::DataEnum) -> crate::DataEnum {
+ fold_data_enum(self, i)
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn fold_data_struct(&mut self, i: crate::DataStruct) -> crate::DataStruct {
+ fold_data_struct(self, i)
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn fold_data_union(&mut self, i: crate::DataUnion) -> crate::DataUnion {
+ fold_data_union(self, i)
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn fold_derive_input(&mut self, i: crate::DeriveInput) ->
crate::DeriveInput {
+ fold_derive_input(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr(&mut self, i: crate::Expr) -> crate::Expr {
+ fold_expr(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_array(&mut self, i: crate::ExprArray) -> crate::ExprArray {
+ fold_expr_array(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_assign(&mut self, i: crate::ExprAssign) -> crate::ExprAssign {
+ fold_expr_assign(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_async(&mut self, i: crate::ExprAsync) -> crate::ExprAsync {
+ fold_expr_async(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_await(&mut self, i: crate::ExprAwait) -> crate::ExprAwait {
+ fold_expr_await(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_binary(&mut self, i: crate::ExprBinary) -> crate::ExprBinary {
+ fold_expr_binary(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_block(&mut self, i: crate::ExprBlock) -> crate::ExprBlock {
+ fold_expr_block(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_break(&mut self, i: crate::ExprBreak) -> crate::ExprBreak {
+ fold_expr_break(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_call(&mut self, i: crate::ExprCall) -> crate::ExprCall {
+ fold_expr_call(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_cast(&mut self, i: crate::ExprCast) -> crate::ExprCast {
+ fold_expr_cast(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_closure(&mut self, i: crate::ExprClosure) ->
crate::ExprClosure {
+ fold_expr_closure(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_const(&mut self, i: crate::ExprConst) -> crate::ExprConst {
+ fold_expr_const(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_continue(&mut self, i: crate::ExprContinue) ->
crate::ExprContinue {
+ fold_expr_continue(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_field(&mut self, i: crate::ExprField) -> crate::ExprField {
+ fold_expr_field(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_for_loop(&mut self, i: crate::ExprForLoop) ->
crate::ExprForLoop {
+ fold_expr_for_loop(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_group(&mut self, i: crate::ExprGroup) -> crate::ExprGroup {
+ fold_expr_group(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_if(&mut self, i: crate::ExprIf) -> crate::ExprIf {
+ fold_expr_if(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_index(&mut self, i: crate::ExprIndex) -> crate::ExprIndex {
+ fold_expr_index(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_infer(&mut self, i: crate::ExprInfer) -> crate::ExprInfer {
+ fold_expr_infer(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_let(&mut self, i: crate::ExprLet) -> crate::ExprLet {
+ fold_expr_let(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_lit(&mut self, i: crate::ExprLit) -> crate::ExprLit {
+ fold_expr_lit(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_loop(&mut self, i: crate::ExprLoop) -> crate::ExprLoop {
+ fold_expr_loop(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_macro(&mut self, i: crate::ExprMacro) -> crate::ExprMacro {
+ fold_expr_macro(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_match(&mut self, i: crate::ExprMatch) -> crate::ExprMatch {
+ fold_expr_match(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_method_call(
+ &mut self,
+ i: crate::ExprMethodCall,
+ ) -> crate::ExprMethodCall {
+ fold_expr_method_call(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_paren(&mut self, i: crate::ExprParen) -> crate::ExprParen {
+ fold_expr_paren(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_path(&mut self, i: crate::ExprPath) -> crate::ExprPath {
+ fold_expr_path(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_range(&mut self, i: crate::ExprRange) -> crate::ExprRange {
+ fold_expr_range(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_reference(&mut self, i: crate::ExprReference) ->
crate::ExprReference {
+ fold_expr_reference(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_repeat(&mut self, i: crate::ExprRepeat) -> crate::ExprRepeat {
+ fold_expr_repeat(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_return(&mut self, i: crate::ExprReturn) -> crate::ExprReturn {
+ fold_expr_return(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_struct(&mut self, i: crate::ExprStruct) -> crate::ExprStruct {
+ fold_expr_struct(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_try(&mut self, i: crate::ExprTry) -> crate::ExprTry {
+ fold_expr_try(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_try_block(&mut self, i: crate::ExprTryBlock) ->
crate::ExprTryBlock {
+ fold_expr_try_block(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_tuple(&mut self, i: crate::ExprTuple) -> crate::ExprTuple {
+ fold_expr_tuple(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_expr_unary(&mut self, i: crate::ExprUnary) -> crate::ExprUnary {
+ fold_expr_unary(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_unsafe(&mut self, i: crate::ExprUnsafe) -> crate::ExprUnsafe {
+ fold_expr_unsafe(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_while(&mut self, i: crate::ExprWhile) -> crate::ExprWhile {
+ fold_expr_while(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_expr_yield(&mut self, i: crate::ExprYield) -> crate::ExprYield {
+ fold_expr_yield(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_field(&mut self, i: crate::Field) -> crate::Field {
+ fold_field(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_field_mutability(
+ &mut self,
+ i: crate::FieldMutability,
+ ) -> crate::FieldMutability {
+ fold_field_mutability(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_field_pat(&mut self, i: crate::FieldPat) -> crate::FieldPat {
+ fold_field_pat(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_field_value(&mut self, i: crate::FieldValue) -> crate::FieldValue {
+ fold_field_value(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_fields(&mut self, i: crate::Fields) -> crate::Fields {
+ fold_fields(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_fields_named(&mut self, i: crate::FieldsNamed) ->
crate::FieldsNamed {
+ fold_fields_named(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_fields_unnamed(&mut self, i: crate::FieldsUnnamed) ->
crate::FieldsUnnamed {
+ fold_fields_unnamed(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_file(&mut self, i: crate::File) -> crate::File {
+ fold_file(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_fn_arg(&mut self, i: crate::FnArg) -> crate::FnArg {
+ fold_fn_arg(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_foreign_item(&mut self, i: crate::ForeignItem) ->
crate::ForeignItem {
+ fold_foreign_item(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_foreign_item_fn(&mut self, i: crate::ForeignItemFn) ->
crate::ForeignItemFn {
+ fold_foreign_item_fn(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_foreign_item_macro(
+ &mut self,
+ i: crate::ForeignItemMacro,
+ ) -> crate::ForeignItemMacro {
+ fold_foreign_item_macro(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_foreign_item_static(
+ &mut self,
+ i: crate::ForeignItemStatic,
+ ) -> crate::ForeignItemStatic {
+ fold_foreign_item_static(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_foreign_item_type(
+ &mut self,
+ i: crate::ForeignItemType,
+ ) -> crate::ForeignItemType {
+ fold_foreign_item_type(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_generic_argument(
+ &mut self,
+ i: crate::GenericArgument,
+ ) -> crate::GenericArgument {
+ fold_generic_argument(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_generic_param(&mut self, i: crate::GenericParam) ->
crate::GenericParam {
+ fold_generic_param(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_generics(&mut self, i: crate::Generics) -> crate::Generics {
+ fold_generics(self, i)
+ }
+ fn fold_ident(&mut self, i: proc_macro2::Ident) -> proc_macro2::Ident {
+ fold_ident(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_item(&mut self, i: crate::ImplItem) -> crate::ImplItem {
+ fold_impl_item(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_item_const(&mut self, i: crate::ImplItemConst) ->
crate::ImplItemConst {
+ fold_impl_item_const(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_item_fn(&mut self, i: crate::ImplItemFn) -> crate::ImplItemFn
{
+ fold_impl_item_fn(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_item_macro(&mut self, i: crate::ImplItemMacro) ->
crate::ImplItemMacro {
+ fold_impl_item_macro(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_item_type(&mut self, i: crate::ImplItemType) ->
crate::ImplItemType {
+ fold_impl_item_type(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_impl_restriction(
+ &mut self,
+ i: crate::ImplRestriction,
+ ) -> crate::ImplRestriction {
+ fold_impl_restriction(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_index(&mut self, i: crate::Index) -> crate::Index {
+ fold_index(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item(&mut self, i: crate::Item) -> crate::Item {
+ fold_item(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_const(&mut self, i: crate::ItemConst) -> crate::ItemConst {
+ fold_item_const(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_enum(&mut self, i: crate::ItemEnum) -> crate::ItemEnum {
+ fold_item_enum(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_extern_crate(
+ &mut self,
+ i: crate::ItemExternCrate,
+ ) -> crate::ItemExternCrate {
+ fold_item_extern_crate(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_fn(&mut self, i: crate::ItemFn) -> crate::ItemFn {
+ fold_item_fn(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_foreign_mod(
+ &mut self,
+ i: crate::ItemForeignMod,
+ ) -> crate::ItemForeignMod {
+ fold_item_foreign_mod(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_impl(&mut self, i: crate::ItemImpl) -> crate::ItemImpl {
+ fold_item_impl(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_macro(&mut self, i: crate::ItemMacro) -> crate::ItemMacro {
+ fold_item_macro(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_mod(&mut self, i: crate::ItemMod) -> crate::ItemMod {
+ fold_item_mod(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_static(&mut self, i: crate::ItemStatic) -> crate::ItemStatic {
+ fold_item_static(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_struct(&mut self, i: crate::ItemStruct) -> crate::ItemStruct {
+ fold_item_struct(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_trait(&mut self, i: crate::ItemTrait) -> crate::ItemTrait {
+ fold_item_trait(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_trait_alias(
+ &mut self,
+ i: crate::ItemTraitAlias,
+ ) -> crate::ItemTraitAlias {
+ fold_item_trait_alias(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_type(&mut self, i: crate::ItemType) -> crate::ItemType {
+ fold_item_type(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_union(&mut self, i: crate::ItemUnion) -> crate::ItemUnion {
+ fold_item_union(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_item_use(&mut self, i: crate::ItemUse) -> crate::ItemUse {
+ fold_item_use(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_label(&mut self, i: crate::Label) -> crate::Label {
+ fold_label(self, i)
+ }
+ fn fold_lifetime(&mut self, i: crate::Lifetime) -> crate::Lifetime {
+ fold_lifetime(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_lifetime_param(&mut self, i: crate::LifetimeParam) ->
crate::LifetimeParam {
+ fold_lifetime_param(self, i)
+ }
+ fn fold_lit(&mut self, i: crate::Lit) -> crate::Lit {
+ fold_lit(self, i)
+ }
+ fn fold_lit_bool(&mut self, i: crate::LitBool) -> crate::LitBool {
+ fold_lit_bool(self, i)
+ }
+ fn fold_lit_byte(&mut self, i: crate::LitByte) -> crate::LitByte {
+ fold_lit_byte(self, i)
+ }
+ fn fold_lit_byte_str(&mut self, i: crate::LitByteStr) -> crate::LitByteStr
{
+ fold_lit_byte_str(self, i)
+ }
+ fn fold_lit_cstr(&mut self, i: crate::LitCStr) -> crate::LitCStr {
+ fold_lit_cstr(self, i)
+ }
+ fn fold_lit_char(&mut self, i: crate::LitChar) -> crate::LitChar {
+ fold_lit_char(self, i)
+ }
+ fn fold_lit_float(&mut self, i: crate::LitFloat) -> crate::LitFloat {
+ fold_lit_float(self, i)
+ }
+ fn fold_lit_int(&mut self, i: crate::LitInt) -> crate::LitInt {
+ fold_lit_int(self, i)
+ }
+ fn fold_lit_str(&mut self, i: crate::LitStr) -> crate::LitStr {
+ fold_lit_str(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_local(&mut self, i: crate::Local) -> crate::Local {
+ fold_local(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_local_init(&mut self, i: crate::LocalInit) -> crate::LocalInit {
+ fold_local_init(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_macro(&mut self, i: crate::Macro) -> crate::Macro {
+ fold_macro(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_macro_delimiter(
+ &mut self,
+ i: crate::MacroDelimiter,
+ ) -> crate::MacroDelimiter {
+ fold_macro_delimiter(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_member(&mut self, i: crate::Member) -> crate::Member {
+ fold_member(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_meta(&mut self, i: crate::Meta) -> crate::Meta {
+ fold_meta(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_meta_list(&mut self, i: crate::MetaList) -> crate::MetaList {
+ fold_meta_list(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_meta_name_value(&mut self, i: crate::MetaNameValue) ->
crate::MetaNameValue {
+ fold_meta_name_value(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_parenthesized_generic_arguments(
+ &mut self,
+ i: crate::ParenthesizedGenericArguments,
+ ) -> crate::ParenthesizedGenericArguments {
+ fold_parenthesized_generic_arguments(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat(&mut self, i: crate::Pat) -> crate::Pat {
+ fold_pat(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_ident(&mut self, i: crate::PatIdent) -> crate::PatIdent {
+ fold_pat_ident(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_or(&mut self, i: crate::PatOr) -> crate::PatOr {
+ fold_pat_or(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_paren(&mut self, i: crate::PatParen) -> crate::PatParen {
+ fold_pat_paren(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_reference(&mut self, i: crate::PatReference) ->
crate::PatReference {
+ fold_pat_reference(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_rest(&mut self, i: crate::PatRest) -> crate::PatRest {
+ fold_pat_rest(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_slice(&mut self, i: crate::PatSlice) -> crate::PatSlice {
+ fold_pat_slice(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_struct(&mut self, i: crate::PatStruct) -> crate::PatStruct {
+ fold_pat_struct(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_tuple(&mut self, i: crate::PatTuple) -> crate::PatTuple {
+ fold_pat_tuple(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_tuple_struct(
+ &mut self,
+ i: crate::PatTupleStruct,
+ ) -> crate::PatTupleStruct {
+ fold_pat_tuple_struct(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_type(&mut self, i: crate::PatType) -> crate::PatType {
+ fold_pat_type(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_pat_wild(&mut self, i: crate::PatWild) -> crate::PatWild {
+ fold_pat_wild(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_path(&mut self, i: crate::Path) -> crate::Path {
+ fold_path(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_path_arguments(&mut self, i: crate::PathArguments) ->
crate::PathArguments {
+ fold_path_arguments(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_path_segment(&mut self, i: crate::PathSegment) ->
crate::PathSegment {
+ fold_path_segment(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_predicate_lifetime(
+ &mut self,
+ i: crate::PredicateLifetime,
+ ) -> crate::PredicateLifetime {
+ fold_predicate_lifetime(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_predicate_type(&mut self, i: crate::PredicateType) ->
crate::PredicateType {
+ fold_predicate_type(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_qself(&mut self, i: crate::QSelf) -> crate::QSelf {
+ fold_qself(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_range_limits(&mut self, i: crate::RangeLimits) ->
crate::RangeLimits {
+ fold_range_limits(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_receiver(&mut self, i: crate::Receiver) -> crate::Receiver {
+ fold_receiver(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_return_type(&mut self, i: crate::ReturnType) -> crate::ReturnType {
+ fold_return_type(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_signature(&mut self, i: crate::Signature) -> crate::Signature {
+ fold_signature(self, i)
+ }
+ fn fold_span(&mut self, i: proc_macro2::Span) -> proc_macro2::Span {
+ fold_span(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_static_mutability(
+ &mut self,
+ i: crate::StaticMutability,
+ ) -> crate::StaticMutability {
+ fold_static_mutability(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_stmt(&mut self, i: crate::Stmt) -> crate::Stmt {
+ fold_stmt(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_stmt_macro(&mut self, i: crate::StmtMacro) -> crate::StmtMacro {
+ fold_stmt_macro(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_trait_bound(&mut self, i: crate::TraitBound) -> crate::TraitBound {
+ fold_trait_bound(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_trait_bound_modifier(
+ &mut self,
+ i: crate::TraitBoundModifier,
+ ) -> crate::TraitBoundModifier {
+ fold_trait_bound_modifier(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_trait_item(&mut self, i: crate::TraitItem) -> crate::TraitItem {
+ fold_trait_item(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_trait_item_const(
+ &mut self,
+ i: crate::TraitItemConst,
+ ) -> crate::TraitItemConst {
+ fold_trait_item_const(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_trait_item_fn(&mut self, i: crate::TraitItemFn) ->
crate::TraitItemFn {
+ fold_trait_item_fn(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_trait_item_macro(
+ &mut self,
+ i: crate::TraitItemMacro,
+ ) -> crate::TraitItemMacro {
+ fold_trait_item_macro(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_trait_item_type(&mut self, i: crate::TraitItemType) ->
crate::TraitItemType {
+ fold_trait_item_type(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type(&mut self, i: crate::Type) -> crate::Type {
+ fold_type(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_array(&mut self, i: crate::TypeArray) -> crate::TypeArray {
+ fold_type_array(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_bare_fn(&mut self, i: crate::TypeBareFn) -> crate::TypeBareFn
{
+ fold_type_bare_fn(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_group(&mut self, i: crate::TypeGroup) -> crate::TypeGroup {
+ fold_type_group(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_impl_trait(&mut self, i: crate::TypeImplTrait) ->
crate::TypeImplTrait {
+ fold_type_impl_trait(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_infer(&mut self, i: crate::TypeInfer) -> crate::TypeInfer {
+ fold_type_infer(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_macro(&mut self, i: crate::TypeMacro) -> crate::TypeMacro {
+ fold_type_macro(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_never(&mut self, i: crate::TypeNever) -> crate::TypeNever {
+ fold_type_never(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_param(&mut self, i: crate::TypeParam) -> crate::TypeParam {
+ fold_type_param(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_param_bound(
+ &mut self,
+ i: crate::TypeParamBound,
+ ) -> crate::TypeParamBound {
+ fold_type_param_bound(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_paren(&mut self, i: crate::TypeParen) -> crate::TypeParen {
+ fold_type_paren(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_path(&mut self, i: crate::TypePath) -> crate::TypePath {
+ fold_type_path(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_ptr(&mut self, i: crate::TypePtr) -> crate::TypePtr {
+ fold_type_ptr(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_reference(&mut self, i: crate::TypeReference) ->
crate::TypeReference {
+ fold_type_reference(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_slice(&mut self, i: crate::TypeSlice) -> crate::TypeSlice {
+ fold_type_slice(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_trait_object(
+ &mut self,
+ i: crate::TypeTraitObject,
+ ) -> crate::TypeTraitObject {
+ fold_type_trait_object(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_type_tuple(&mut self, i: crate::TypeTuple) -> crate::TypeTuple {
+ fold_type_tuple(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_un_op(&mut self, i: crate::UnOp) -> crate::UnOp {
+ fold_un_op(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_glob(&mut self, i: crate::UseGlob) -> crate::UseGlob {
+ fold_use_glob(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_group(&mut self, i: crate::UseGroup) -> crate::UseGroup {
+ fold_use_group(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_name(&mut self, i: crate::UseName) -> crate::UseName {
+ fold_use_name(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_path(&mut self, i: crate::UsePath) -> crate::UsePath {
+ fold_use_path(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_rename(&mut self, i: crate::UseRename) -> crate::UseRename {
+ fold_use_rename(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_use_tree(&mut self, i: crate::UseTree) -> crate::UseTree {
+ fold_use_tree(self, i)
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn fold_variadic(&mut self, i: crate::Variadic) -> crate::Variadic {
+ fold_variadic(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_variant(&mut self, i: crate::Variant) -> crate::Variant {
+ fold_variant(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_vis_restricted(&mut self, i: crate::VisRestricted) ->
crate::VisRestricted {
+ fold_vis_restricted(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_visibility(&mut self, i: crate::Visibility) -> crate::Visibility {
+ fold_visibility(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_where_clause(&mut self, i: crate::WhereClause) ->
crate::WhereClause {
+ fold_where_clause(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn fold_where_predicate(
+ &mut self,
+ i: crate::WherePredicate,
+ ) -> crate::WherePredicate {
+ fold_where_predicate(self, i)
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_abi<F>(f: &mut F, node: crate::Abi) -> crate::Abi
+where
+ F: Fold + ?Sized,
+{
+ crate::Abi {
+ extern_token: node.extern_token,
+ name: (node.name).map(|it| f.fold_lit_str(it)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_angle_bracketed_generic_arguments<F>(
+ f: &mut F,
+ node: crate::AngleBracketedGenericArguments,
+) -> crate::AngleBracketedGenericArguments
+where
+ F: Fold + ?Sized,
+{
+ crate::AngleBracketedGenericArguments {
+ colon2_token: node.colon2_token,
+ lt_token: node.lt_token,
+ args: crate::punctuated::fold(node.args, f, F::fold_generic_argument),
+ gt_token: node.gt_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_arm<F>(f: &mut F, node: crate::Arm) -> crate::Arm
+where
+ F: Fold + ?Sized,
+{
+ crate::Arm {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ pat: f.fold_pat(node.pat),
+ guard: (node.guard).map(|it| ((it).0, Box::new(f.fold_expr(*(it).1)))),
+ fat_arrow_token: node.fat_arrow_token,
+ body: Box::new(f.fold_expr(*node.body)),
+ comma: node.comma,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_assoc_const<F>(f: &mut F, node: crate::AssocConst) ->
crate::AssocConst
+where
+ F: Fold + ?Sized,
+{
+ crate::AssocConst {
+ ident: f.fold_ident(node.ident),
+ generics: (node.generics).map(|it|
f.fold_angle_bracketed_generic_arguments(it)),
+ eq_token: node.eq_token,
+ value: f.fold_expr(node.value),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_assoc_type<F>(f: &mut F, node: crate::AssocType) ->
crate::AssocType
+where
+ F: Fold + ?Sized,
+{
+ crate::AssocType {
+ ident: f.fold_ident(node.ident),
+ generics: (node.generics).map(|it|
f.fold_angle_bracketed_generic_arguments(it)),
+ eq_token: node.eq_token,
+ ty: f.fold_type(node.ty),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_attr_style<F>(f: &mut F, node: crate::AttrStyle) ->
crate::AttrStyle
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::AttrStyle::Outer => crate::AttrStyle::Outer,
+ crate::AttrStyle::Inner(_binding_0) =>
crate::AttrStyle::Inner(_binding_0),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_attribute<F>(f: &mut F, node: crate::Attribute) -> crate::Attribute
+where
+ F: Fold + ?Sized,
+{
+ crate::Attribute {
+ pound_token: node.pound_token,
+ style: f.fold_attr_style(node.style),
+ bracket_token: node.bracket_token,
+ meta: f.fold_meta(node.meta),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_bare_fn_arg<F>(f: &mut F, node: crate::BareFnArg) ->
crate::BareFnArg
+where
+ F: Fold + ?Sized,
+{
+ crate::BareFnArg {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ name: (node.name).map(|it| (f.fold_ident((it).0), (it).1)),
+ ty: f.fold_type(node.ty),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_bare_variadic<F>(f: &mut F, node: crate::BareVariadic) ->
crate::BareVariadic
+where
+ F: Fold + ?Sized,
+{
+ crate::BareVariadic {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ name: (node.name).map(|it| (f.fold_ident((it).0), (it).1)),
+ dots: node.dots,
+ comma: node.comma,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_bin_op<F>(f: &mut F, node: crate::BinOp) -> crate::BinOp
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::BinOp::Add(_binding_0) => crate::BinOp::Add(_binding_0),
+ crate::BinOp::Sub(_binding_0) => crate::BinOp::Sub(_binding_0),
+ crate::BinOp::Mul(_binding_0) => crate::BinOp::Mul(_binding_0),
+ crate::BinOp::Div(_binding_0) => crate::BinOp::Div(_binding_0),
+ crate::BinOp::Rem(_binding_0) => crate::BinOp::Rem(_binding_0),
+ crate::BinOp::And(_binding_0) => crate::BinOp::And(_binding_0),
+ crate::BinOp::Or(_binding_0) => crate::BinOp::Or(_binding_0),
+ crate::BinOp::BitXor(_binding_0) => crate::BinOp::BitXor(_binding_0),
+ crate::BinOp::BitAnd(_binding_0) => crate::BinOp::BitAnd(_binding_0),
+ crate::BinOp::BitOr(_binding_0) => crate::BinOp::BitOr(_binding_0),
+ crate::BinOp::Shl(_binding_0) => crate::BinOp::Shl(_binding_0),
+ crate::BinOp::Shr(_binding_0) => crate::BinOp::Shr(_binding_0),
+ crate::BinOp::Eq(_binding_0) => crate::BinOp::Eq(_binding_0),
+ crate::BinOp::Lt(_binding_0) => crate::BinOp::Lt(_binding_0),
+ crate::BinOp::Le(_binding_0) => crate::BinOp::Le(_binding_0),
+ crate::BinOp::Ne(_binding_0) => crate::BinOp::Ne(_binding_0),
+ crate::BinOp::Ge(_binding_0) => crate::BinOp::Ge(_binding_0),
+ crate::BinOp::Gt(_binding_0) => crate::BinOp::Gt(_binding_0),
+ crate::BinOp::AddAssign(_binding_0) =>
crate::BinOp::AddAssign(_binding_0),
+ crate::BinOp::SubAssign(_binding_0) =>
crate::BinOp::SubAssign(_binding_0),
+ crate::BinOp::MulAssign(_binding_0) =>
crate::BinOp::MulAssign(_binding_0),
+ crate::BinOp::DivAssign(_binding_0) =>
crate::BinOp::DivAssign(_binding_0),
+ crate::BinOp::RemAssign(_binding_0) =>
crate::BinOp::RemAssign(_binding_0),
+ crate::BinOp::BitXorAssign(_binding_0) =>
crate::BinOp::BitXorAssign(_binding_0),
+ crate::BinOp::BitAndAssign(_binding_0) =>
crate::BinOp::BitAndAssign(_binding_0),
+ crate::BinOp::BitOrAssign(_binding_0) =>
crate::BinOp::BitOrAssign(_binding_0),
+ crate::BinOp::ShlAssign(_binding_0) =>
crate::BinOp::ShlAssign(_binding_0),
+ crate::BinOp::ShrAssign(_binding_0) =>
crate::BinOp::ShrAssign(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_block<F>(f: &mut F, node: crate::Block) -> crate::Block
+where
+ F: Fold + ?Sized,
+{
+ crate::Block {
+ brace_token: node.brace_token,
+ stmts: fold_vec(node.stmts, f, F::fold_stmt),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_bound_lifetimes<F>(
+ f: &mut F,
+ node: crate::BoundLifetimes,
+) -> crate::BoundLifetimes
+where
+ F: Fold + ?Sized,
+{
+ crate::BoundLifetimes {
+ for_token: node.for_token,
+ lt_token: node.lt_token,
+ lifetimes: crate::punctuated::fold(node.lifetimes, f,
F::fold_generic_param),
+ gt_token: node.gt_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_const_param<F>(f: &mut F, node: crate::ConstParam) ->
crate::ConstParam
+where
+ F: Fold + ?Sized,
+{
+ crate::ConstParam {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ const_token: node.const_token,
+ ident: f.fold_ident(node.ident),
+ colon_token: node.colon_token,
+ ty: f.fold_type(node.ty),
+ eq_token: node.eq_token,
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_constraint<F>(f: &mut F, node: crate::Constraint) ->
crate::Constraint
+where
+ F: Fold + ?Sized,
+{
+ crate::Constraint {
+ ident: f.fold_ident(node.ident),
+ generics: (node.generics).map(|it|
f.fold_angle_bracketed_generic_arguments(it)),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn fold_data<F>(f: &mut F, node: crate::Data) -> crate::Data
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Data::Struct(_binding_0) => {
+ crate::Data::Struct(f.fold_data_struct(_binding_0))
+ }
+ crate::Data::Enum(_binding_0) =>
crate::Data::Enum(f.fold_data_enum(_binding_0)),
+ crate::Data::Union(_binding_0) => {
+ crate::Data::Union(f.fold_data_union(_binding_0))
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn fold_data_enum<F>(f: &mut F, node: crate::DataEnum) -> crate::DataEnum
+where
+ F: Fold + ?Sized,
+{
+ crate::DataEnum {
+ enum_token: node.enum_token,
+ brace_token: node.brace_token,
+ variants: crate::punctuated::fold(node.variants, f, F::fold_variant),
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn fold_data_struct<F>(f: &mut F, node: crate::DataStruct) ->
crate::DataStruct
+where
+ F: Fold + ?Sized,
+{
+ crate::DataStruct {
+ struct_token: node.struct_token,
+ fields: f.fold_fields(node.fields),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn fold_data_union<F>(f: &mut F, node: crate::DataUnion) ->
crate::DataUnion
+where
+ F: Fold + ?Sized,
+{
+ crate::DataUnion {
+ union_token: node.union_token,
+ fields: f.fold_fields_named(node.fields),
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn fold_derive_input<F>(f: &mut F, node: crate::DeriveInput) ->
crate::DeriveInput
+where
+ F: Fold + ?Sized,
+{
+ crate::DeriveInput {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ data: f.fold_data(node.data),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr<F>(f: &mut F, node: crate::Expr) -> crate::Expr
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Expr::Array(_binding_0) => {
+ crate::Expr::Array(full!(f.fold_expr_array(_binding_0)))
+ }
+ crate::Expr::Assign(_binding_0) => {
+ crate::Expr::Assign(full!(f.fold_expr_assign(_binding_0)))
+ }
+ crate::Expr::Async(_binding_0) => {
+ crate::Expr::Async(full!(f.fold_expr_async(_binding_0)))
+ }
+ crate::Expr::Await(_binding_0) => {
+ crate::Expr::Await(full!(f.fold_expr_await(_binding_0)))
+ }
+ crate::Expr::Binary(_binding_0) => {
+ crate::Expr::Binary(f.fold_expr_binary(_binding_0))
+ }
+ crate::Expr::Block(_binding_0) => {
+ crate::Expr::Block(full!(f.fold_expr_block(_binding_0)))
+ }
+ crate::Expr::Break(_binding_0) => {
+ crate::Expr::Break(full!(f.fold_expr_break(_binding_0)))
+ }
+ crate::Expr::Call(_binding_0) =>
crate::Expr::Call(f.fold_expr_call(_binding_0)),
+ crate::Expr::Cast(_binding_0) =>
crate::Expr::Cast(f.fold_expr_cast(_binding_0)),
+ crate::Expr::Closure(_binding_0) => {
+ crate::Expr::Closure(full!(f.fold_expr_closure(_binding_0)))
+ }
+ crate::Expr::Const(_binding_0) => {
+ crate::Expr::Const(full!(f.fold_expr_const(_binding_0)))
+ }
+ crate::Expr::Continue(_binding_0) => {
+ crate::Expr::Continue(full!(f.fold_expr_continue(_binding_0)))
+ }
+ crate::Expr::Field(_binding_0) => {
+ crate::Expr::Field(f.fold_expr_field(_binding_0))
+ }
+ crate::Expr::ForLoop(_binding_0) => {
+ crate::Expr::ForLoop(full!(f.fold_expr_for_loop(_binding_0)))
+ }
+ crate::Expr::Group(_binding_0) => {
+ crate::Expr::Group(f.fold_expr_group(_binding_0))
+ }
+ crate::Expr::If(_binding_0) =>
crate::Expr::If(full!(f.fold_expr_if(_binding_0))),
+ crate::Expr::Index(_binding_0) => {
+ crate::Expr::Index(f.fold_expr_index(_binding_0))
+ }
+ crate::Expr::Infer(_binding_0) => {
+ crate::Expr::Infer(full!(f.fold_expr_infer(_binding_0)))
+ }
+ crate::Expr::Let(_binding_0) => {
+ crate::Expr::Let(full!(f.fold_expr_let(_binding_0)))
+ }
+ crate::Expr::Lit(_binding_0) =>
crate::Expr::Lit(f.fold_expr_lit(_binding_0)),
+ crate::Expr::Loop(_binding_0) => {
+ crate::Expr::Loop(full!(f.fold_expr_loop(_binding_0)))
+ }
+ crate::Expr::Macro(_binding_0) => {
+ crate::Expr::Macro(f.fold_expr_macro(_binding_0))
+ }
+ crate::Expr::Match(_binding_0) => {
+ crate::Expr::Match(full!(f.fold_expr_match(_binding_0)))
+ }
+ crate::Expr::MethodCall(_binding_0) => {
+ crate::Expr::MethodCall(f.fold_expr_method_call(_binding_0))
+ }
+ crate::Expr::Paren(_binding_0) => {
+ crate::Expr::Paren(f.fold_expr_paren(_binding_0))
+ }
+ crate::Expr::Path(_binding_0) =>
crate::Expr::Path(f.fold_expr_path(_binding_0)),
+ crate::Expr::Range(_binding_0) => {
+ crate::Expr::Range(full!(f.fold_expr_range(_binding_0)))
+ }
+ crate::Expr::Reference(_binding_0) => {
+ crate::Expr::Reference(f.fold_expr_reference(_binding_0))
+ }
+ crate::Expr::Repeat(_binding_0) => {
+ crate::Expr::Repeat(full!(f.fold_expr_repeat(_binding_0)))
+ }
+ crate::Expr::Return(_binding_0) => {
+ crate::Expr::Return(full!(f.fold_expr_return(_binding_0)))
+ }
+ crate::Expr::Struct(_binding_0) => {
+ crate::Expr::Struct(f.fold_expr_struct(_binding_0))
+ }
+ crate::Expr::Try(_binding_0) => {
+ crate::Expr::Try(full!(f.fold_expr_try(_binding_0)))
+ }
+ crate::Expr::TryBlock(_binding_0) => {
+ crate::Expr::TryBlock(full!(f.fold_expr_try_block(_binding_0)))
+ }
+ crate::Expr::Tuple(_binding_0) => {
+ crate::Expr::Tuple(full!(f.fold_expr_tuple(_binding_0)))
+ }
+ crate::Expr::Unary(_binding_0) => {
+ crate::Expr::Unary(f.fold_expr_unary(_binding_0))
+ }
+ crate::Expr::Unsafe(_binding_0) => {
+ crate::Expr::Unsafe(full!(f.fold_expr_unsafe(_binding_0)))
+ }
+ crate::Expr::Verbatim(_binding_0) => crate::Expr::Verbatim(_binding_0),
+ crate::Expr::While(_binding_0) => {
+ crate::Expr::While(full!(f.fold_expr_while(_binding_0)))
+ }
+ crate::Expr::Yield(_binding_0) => {
+ crate::Expr::Yield(full!(f.fold_expr_yield(_binding_0)))
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_array<F>(f: &mut F, node: crate::ExprArray) ->
crate::ExprArray
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprArray {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ bracket_token: node.bracket_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_expr),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_assign<F>(f: &mut F, node: crate::ExprAssign) ->
crate::ExprAssign
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprAssign {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ left: Box::new(f.fold_expr(*node.left)),
+ eq_token: node.eq_token,
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_async<F>(f: &mut F, node: crate::ExprAsync) ->
crate::ExprAsync
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprAsync {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ async_token: node.async_token,
+ capture: node.capture,
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_await<F>(f: &mut F, node: crate::ExprAwait) ->
crate::ExprAwait
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprAwait {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ base: Box::new(f.fold_expr(*node.base)),
+ dot_token: node.dot_token,
+ await_token: node.await_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_binary<F>(f: &mut F, node: crate::ExprBinary) ->
crate::ExprBinary
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprBinary {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ left: Box::new(f.fold_expr(*node.left)),
+ op: f.fold_bin_op(node.op),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_block<F>(f: &mut F, node: crate::ExprBlock) ->
crate::ExprBlock
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprBlock {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ label: (node.label).map(|it| f.fold_label(it)),
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_break<F>(f: &mut F, node: crate::ExprBreak) ->
crate::ExprBreak
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprBreak {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ break_token: node.break_token,
+ label: (node.label).map(|it| f.fold_lifetime(it)),
+ expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_call<F>(f: &mut F, node: crate::ExprCall) -> crate::ExprCall
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprCall {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ func: Box::new(f.fold_expr(*node.func)),
+ paren_token: node.paren_token,
+ args: crate::punctuated::fold(node.args, f, F::fold_expr),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_cast<F>(f: &mut F, node: crate::ExprCast) -> crate::ExprCast
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprCast {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ as_token: node.as_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_closure<F>(f: &mut F, node: crate::ExprClosure) ->
crate::ExprClosure
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprClosure {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ constness: node.constness,
+ movability: node.movability,
+ asyncness: node.asyncness,
+ capture: node.capture,
+ or1_token: node.or1_token,
+ inputs: crate::punctuated::fold(node.inputs, f, F::fold_pat),
+ or2_token: node.or2_token,
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_const<F>(f: &mut F, node: crate::ExprConst) ->
crate::ExprConst
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprConst {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ const_token: node.const_token,
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_continue<F>(f: &mut F, node: crate::ExprContinue) ->
crate::ExprContinue
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprContinue {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ continue_token: node.continue_token,
+ label: (node.label).map(|it| f.fold_lifetime(it)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_field<F>(f: &mut F, node: crate::ExprField) ->
crate::ExprField
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprField {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ base: Box::new(f.fold_expr(*node.base)),
+ dot_token: node.dot_token,
+ member: f.fold_member(node.member),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_for_loop<F>(f: &mut F, node: crate::ExprForLoop) ->
crate::ExprForLoop
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprForLoop {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ label: (node.label).map(|it| f.fold_label(it)),
+ for_token: node.for_token,
+ pat: Box::new(f.fold_pat(*node.pat)),
+ in_token: node.in_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ body: f.fold_block(node.body),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_group<F>(f: &mut F, node: crate::ExprGroup) ->
crate::ExprGroup
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprGroup {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ group_token: node.group_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_if<F>(f: &mut F, node: crate::ExprIf) -> crate::ExprIf
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprIf {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ if_token: node.if_token,
+ cond: Box::new(f.fold_expr(*node.cond)),
+ then_branch: f.fold_block(node.then_branch),
+ else_branch: (node.else_branch)
+ .map(|it| ((it).0, Box::new(f.fold_expr(*(it).1)))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_index<F>(f: &mut F, node: crate::ExprIndex) ->
crate::ExprIndex
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprIndex {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ bracket_token: node.bracket_token,
+ index: Box::new(f.fold_expr(*node.index)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_infer<F>(f: &mut F, node: crate::ExprInfer) ->
crate::ExprInfer
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprInfer {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ underscore_token: node.underscore_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_let<F>(f: &mut F, node: crate::ExprLet) -> crate::ExprLet
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprLet {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ let_token: node.let_token,
+ pat: Box::new(f.fold_pat(*node.pat)),
+ eq_token: node.eq_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_lit<F>(f: &mut F, node: crate::ExprLit) -> crate::ExprLit
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprLit {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ lit: f.fold_lit(node.lit),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_loop<F>(f: &mut F, node: crate::ExprLoop) -> crate::ExprLoop
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprLoop {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ label: (node.label).map(|it| f.fold_label(it)),
+ loop_token: node.loop_token,
+ body: f.fold_block(node.body),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_macro<F>(f: &mut F, node: crate::ExprMacro) ->
crate::ExprMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ mac: f.fold_macro(node.mac),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_match<F>(f: &mut F, node: crate::ExprMatch) ->
crate::ExprMatch
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprMatch {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ match_token: node.match_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ brace_token: node.brace_token,
+ arms: fold_vec(node.arms, f, F::fold_arm),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_method_call<F>(
+ f: &mut F,
+ node: crate::ExprMethodCall,
+) -> crate::ExprMethodCall
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprMethodCall {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+ dot_token: node.dot_token,
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish)
+ .map(|it| f.fold_angle_bracketed_generic_arguments(it)),
+ paren_token: node.paren_token,
+ args: crate::punctuated::fold(node.args, f, F::fold_expr),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_paren<F>(f: &mut F, node: crate::ExprParen) ->
crate::ExprParen
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprParen {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ paren_token: node.paren_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_path<F>(f: &mut F, node: crate::ExprPath) -> crate::ExprPath
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprPath {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ qself: (node.qself).map(|it| f.fold_qself(it)),
+ path: f.fold_path(node.path),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_range<F>(f: &mut F, node: crate::ExprRange) ->
crate::ExprRange
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprRange {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ start: (node.start).map(|it| Box::new(f.fold_expr(*it))),
+ limits: f.fold_range_limits(node.limits),
+ end: (node.end).map(|it| Box::new(f.fold_expr(*it))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_reference<F>(
+ f: &mut F,
+ node: crate::ExprReference,
+) -> crate::ExprReference
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprReference {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ and_token: node.and_token,
+ mutability: node.mutability,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_repeat<F>(f: &mut F, node: crate::ExprRepeat) ->
crate::ExprRepeat
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprRepeat {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ bracket_token: node.bracket_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ semi_token: node.semi_token,
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_return<F>(f: &mut F, node: crate::ExprReturn) ->
crate::ExprReturn
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprReturn {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ return_token: node.return_token,
+ expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_struct<F>(f: &mut F, node: crate::ExprStruct) ->
crate::ExprStruct
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprStruct {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ qself: (node.qself).map(|it| f.fold_qself(it)),
+ path: f.fold_path(node.path),
+ brace_token: node.brace_token,
+ fields: crate::punctuated::fold(node.fields, f, F::fold_field_value),
+ dot2_token: node.dot2_token,
+ rest: (node.rest).map(|it| Box::new(f.fold_expr(*it))),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_try<F>(f: &mut F, node: crate::ExprTry) -> crate::ExprTry
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprTry {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ question_token: node.question_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_try_block<F>(
+ f: &mut F,
+ node: crate::ExprTryBlock,
+) -> crate::ExprTryBlock
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprTryBlock {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ try_token: node.try_token,
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_tuple<F>(f: &mut F, node: crate::ExprTuple) ->
crate::ExprTuple
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprTuple {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ paren_token: node.paren_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_expr),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_expr_unary<F>(f: &mut F, node: crate::ExprUnary) ->
crate::ExprUnary
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprUnary {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ op: f.fold_un_op(node.op),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_unsafe<F>(f: &mut F, node: crate::ExprUnsafe) ->
crate::ExprUnsafe
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprUnsafe {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ unsafe_token: node.unsafe_token,
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_while<F>(f: &mut F, node: crate::ExprWhile) ->
crate::ExprWhile
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprWhile {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ label: (node.label).map(|it| f.fold_label(it)),
+ while_token: node.while_token,
+ cond: Box::new(f.fold_expr(*node.cond)),
+ body: f.fold_block(node.body),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_expr_yield<F>(f: &mut F, node: crate::ExprYield) ->
crate::ExprYield
+where
+ F: Fold + ?Sized,
+{
+ crate::ExprYield {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ yield_token: node.yield_token,
+ expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_field<F>(f: &mut F, node: crate::Field) -> crate::Field
+where
+ F: Fold + ?Sized,
+{
+ crate::Field {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ mutability: f.fold_field_mutability(node.mutability),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ colon_token: node.colon_token,
+ ty: f.fold_type(node.ty),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_field_mutability<F>(
+ f: &mut F,
+ node: crate::FieldMutability,
+) -> crate::FieldMutability
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::FieldMutability::None => crate::FieldMutability::None,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_field_pat<F>(f: &mut F, node: crate::FieldPat) -> crate::FieldPat
+where
+ F: Fold + ?Sized,
+{
+ crate::FieldPat {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ member: f.fold_member(node.member),
+ colon_token: node.colon_token,
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_field_value<F>(f: &mut F, node: crate::FieldValue) ->
crate::FieldValue
+where
+ F: Fold + ?Sized,
+{
+ crate::FieldValue {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ member: f.fold_member(node.member),
+ colon_token: node.colon_token,
+ expr: f.fold_expr(node.expr),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_fields<F>(f: &mut F, node: crate::Fields) -> crate::Fields
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Fields::Named(_binding_0) => {
+ crate::Fields::Named(f.fold_fields_named(_binding_0))
+ }
+ crate::Fields::Unnamed(_binding_0) => {
+ crate::Fields::Unnamed(f.fold_fields_unnamed(_binding_0))
+ }
+ crate::Fields::Unit => crate::Fields::Unit,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_fields_named<F>(f: &mut F, node: crate::FieldsNamed) ->
crate::FieldsNamed
+where
+ F: Fold + ?Sized,
+{
+ crate::FieldsNamed {
+ brace_token: node.brace_token,
+ named: crate::punctuated::fold(node.named, f, F::fold_field),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_fields_unnamed<F>(
+ f: &mut F,
+ node: crate::FieldsUnnamed,
+) -> crate::FieldsUnnamed
+where
+ F: Fold + ?Sized,
+{
+ crate::FieldsUnnamed {
+ paren_token: node.paren_token,
+ unnamed: crate::punctuated::fold(node.unnamed, f, F::fold_field),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_file<F>(f: &mut F, node: crate::File) -> crate::File
+where
+ F: Fold + ?Sized,
+{
+ crate::File {
+ shebang: node.shebang,
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ items: fold_vec(node.items, f, F::fold_item),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_fn_arg<F>(f: &mut F, node: crate::FnArg) -> crate::FnArg
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::FnArg::Receiver(_binding_0) => {
+ crate::FnArg::Receiver(f.fold_receiver(_binding_0))
+ }
+ crate::FnArg::Typed(_binding_0) => {
+ crate::FnArg::Typed(f.fold_pat_type(_binding_0))
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_foreign_item<F>(f: &mut F, node: crate::ForeignItem) ->
crate::ForeignItem
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::ForeignItem::Fn(_binding_0) => {
+ crate::ForeignItem::Fn(f.fold_foreign_item_fn(_binding_0))
+ }
+ crate::ForeignItem::Static(_binding_0) => {
+ crate::ForeignItem::Static(f.fold_foreign_item_static(_binding_0))
+ }
+ crate::ForeignItem::Type(_binding_0) => {
+ crate::ForeignItem::Type(f.fold_foreign_item_type(_binding_0))
+ }
+ crate::ForeignItem::Macro(_binding_0) => {
+ crate::ForeignItem::Macro(f.fold_foreign_item_macro(_binding_0))
+ }
+ crate::ForeignItem::Verbatim(_binding_0) => {
+ crate::ForeignItem::Verbatim(_binding_0)
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_foreign_item_fn<F>(
+ f: &mut F,
+ node: crate::ForeignItemFn,
+) -> crate::ForeignItemFn
+where
+ F: Fold + ?Sized,
+{
+ crate::ForeignItemFn {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_foreign_item_macro<F>(
+ f: &mut F,
+ node: crate::ForeignItemMacro,
+) -> crate::ForeignItemMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::ForeignItemMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ mac: f.fold_macro(node.mac),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_foreign_item_static<F>(
+ f: &mut F,
+ node: crate::ForeignItemStatic,
+) -> crate::ForeignItemStatic
+where
+ F: Fold + ?Sized,
+{
+ crate::ForeignItemStatic {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ static_token: node.static_token,
+ mutability: f.fold_static_mutability(node.mutability),
+ ident: f.fold_ident(node.ident),
+ colon_token: node.colon_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_foreign_item_type<F>(
+ f: &mut F,
+ node: crate::ForeignItemType,
+) -> crate::ForeignItemType
+where
+ F: Fold + ?Sized,
+{
+ crate::ForeignItemType {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ type_token: node.type_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_generic_argument<F>(
+ f: &mut F,
+ node: crate::GenericArgument,
+) -> crate::GenericArgument
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::GenericArgument::Lifetime(_binding_0) => {
+ crate::GenericArgument::Lifetime(f.fold_lifetime(_binding_0))
+ }
+ crate::GenericArgument::Type(_binding_0) => {
+ crate::GenericArgument::Type(f.fold_type(_binding_0))
+ }
+ crate::GenericArgument::Const(_binding_0) => {
+ crate::GenericArgument::Const(f.fold_expr(_binding_0))
+ }
+ crate::GenericArgument::AssocType(_binding_0) => {
+ crate::GenericArgument::AssocType(f.fold_assoc_type(_binding_0))
+ }
+ crate::GenericArgument::AssocConst(_binding_0) => {
+ crate::GenericArgument::AssocConst(f.fold_assoc_const(_binding_0))
+ }
+ crate::GenericArgument::Constraint(_binding_0) => {
+ crate::GenericArgument::Constraint(f.fold_constraint(_binding_0))
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_generic_param<F>(f: &mut F, node: crate::GenericParam) ->
crate::GenericParam
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::GenericParam::Lifetime(_binding_0) => {
+ crate::GenericParam::Lifetime(f.fold_lifetime_param(_binding_0))
+ }
+ crate::GenericParam::Type(_binding_0) => {
+ crate::GenericParam::Type(f.fold_type_param(_binding_0))
+ }
+ crate::GenericParam::Const(_binding_0) => {
+ crate::GenericParam::Const(f.fold_const_param(_binding_0))
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_generics<F>(f: &mut F, node: crate::Generics) -> crate::Generics
+where
+ F: Fold + ?Sized,
+{
+ crate::Generics {
+ lt_token: node.lt_token,
+ params: crate::punctuated::fold(node.params, f, F::fold_generic_param),
+ gt_token: node.gt_token,
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+}
+pub fn fold_ident<F>(f: &mut F, node: proc_macro2::Ident) -> proc_macro2::Ident
+where
+ F: Fold + ?Sized,
+{
+ let mut node = node;
+ let span = f.fold_span(node.span());
+ node.set_span(span);
+ node
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_item<F>(f: &mut F, node: crate::ImplItem) -> crate::ImplItem
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::ImplItem::Const(_binding_0) => {
+ crate::ImplItem::Const(f.fold_impl_item_const(_binding_0))
+ }
+ crate::ImplItem::Fn(_binding_0) => {
+ crate::ImplItem::Fn(f.fold_impl_item_fn(_binding_0))
+ }
+ crate::ImplItem::Type(_binding_0) => {
+ crate::ImplItem::Type(f.fold_impl_item_type(_binding_0))
+ }
+ crate::ImplItem::Macro(_binding_0) => {
+ crate::ImplItem::Macro(f.fold_impl_item_macro(_binding_0))
+ }
+ crate::ImplItem::Verbatim(_binding_0) =>
crate::ImplItem::Verbatim(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_item_const<F>(
+ f: &mut F,
+ node: crate::ImplItemConst,
+) -> crate::ImplItemConst
+where
+ F: Fold + ?Sized,
+{
+ crate::ImplItemConst {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ defaultness: node.defaultness,
+ const_token: node.const_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ colon_token: node.colon_token,
+ ty: f.fold_type(node.ty),
+ eq_token: node.eq_token,
+ expr: f.fold_expr(node.expr),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_item_fn<F>(f: &mut F, node: crate::ImplItemFn) ->
crate::ImplItemFn
+where
+ F: Fold + ?Sized,
+{
+ crate::ImplItemFn {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ defaultness: node.defaultness,
+ sig: f.fold_signature(node.sig),
+ block: f.fold_block(node.block),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_item_macro<F>(
+ f: &mut F,
+ node: crate::ImplItemMacro,
+) -> crate::ImplItemMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::ImplItemMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ mac: f.fold_macro(node.mac),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_item_type<F>(
+ f: &mut F,
+ node: crate::ImplItemType,
+) -> crate::ImplItemType
+where
+ F: Fold + ?Sized,
+{
+ crate::ImplItemType {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ defaultness: node.defaultness,
+ type_token: node.type_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ eq_token: node.eq_token,
+ ty: f.fold_type(node.ty),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_impl_restriction<F>(
+ f: &mut F,
+ node: crate::ImplRestriction,
+) -> crate::ImplRestriction
+where
+ F: Fold + ?Sized,
+{
+ match node {}
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_index<F>(f: &mut F, node: crate::Index) -> crate::Index
+where
+ F: Fold + ?Sized,
+{
+ crate::Index {
+ index: node.index,
+ span: f.fold_span(node.span),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item<F>(f: &mut F, node: crate::Item) -> crate::Item
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Item::Const(_binding_0) => {
+ crate::Item::Const(f.fold_item_const(_binding_0))
+ }
+ crate::Item::Enum(_binding_0) =>
crate::Item::Enum(f.fold_item_enum(_binding_0)),
+ crate::Item::ExternCrate(_binding_0) => {
+ crate::Item::ExternCrate(f.fold_item_extern_crate(_binding_0))
+ }
+ crate::Item::Fn(_binding_0) =>
crate::Item::Fn(f.fold_item_fn(_binding_0)),
+ crate::Item::ForeignMod(_binding_0) => {
+ crate::Item::ForeignMod(f.fold_item_foreign_mod(_binding_0))
+ }
+ crate::Item::Impl(_binding_0) =>
crate::Item::Impl(f.fold_item_impl(_binding_0)),
+ crate::Item::Macro(_binding_0) => {
+ crate::Item::Macro(f.fold_item_macro(_binding_0))
+ }
+ crate::Item::Mod(_binding_0) =>
crate::Item::Mod(f.fold_item_mod(_binding_0)),
+ crate::Item::Static(_binding_0) => {
+ crate::Item::Static(f.fold_item_static(_binding_0))
+ }
+ crate::Item::Struct(_binding_0) => {
+ crate::Item::Struct(f.fold_item_struct(_binding_0))
+ }
+ crate::Item::Trait(_binding_0) => {
+ crate::Item::Trait(f.fold_item_trait(_binding_0))
+ }
+ crate::Item::TraitAlias(_binding_0) => {
+ crate::Item::TraitAlias(f.fold_item_trait_alias(_binding_0))
+ }
+ crate::Item::Type(_binding_0) =>
crate::Item::Type(f.fold_item_type(_binding_0)),
+ crate::Item::Union(_binding_0) => {
+ crate::Item::Union(f.fold_item_union(_binding_0))
+ }
+ crate::Item::Use(_binding_0) =>
crate::Item::Use(f.fold_item_use(_binding_0)),
+ crate::Item::Verbatim(_binding_0) => crate::Item::Verbatim(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_const<F>(f: &mut F, node: crate::ItemConst) ->
crate::ItemConst
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemConst {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ const_token: node.const_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ colon_token: node.colon_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ eq_token: node.eq_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_enum<F>(f: &mut F, node: crate::ItemEnum) -> crate::ItemEnum
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemEnum {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ enum_token: node.enum_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ brace_token: node.brace_token,
+ variants: crate::punctuated::fold(node.variants, f, F::fold_variant),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_extern_crate<F>(
+ f: &mut F,
+ node: crate::ItemExternCrate,
+) -> crate::ItemExternCrate
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemExternCrate {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ extern_token: node.extern_token,
+ crate_token: node.crate_token,
+ ident: f.fold_ident(node.ident),
+ rename: (node.rename).map(|it| ((it).0, f.fold_ident((it).1))),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_fn<F>(f: &mut F, node: crate::ItemFn) -> crate::ItemFn
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemFn {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+ block: Box::new(f.fold_block(*node.block)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_foreign_mod<F>(
+ f: &mut F,
+ node: crate::ItemForeignMod,
+) -> crate::ItemForeignMod
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemForeignMod {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ unsafety: node.unsafety,
+ abi: f.fold_abi(node.abi),
+ brace_token: node.brace_token,
+ items: fold_vec(node.items, f, F::fold_foreign_item),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_impl<F>(f: &mut F, node: crate::ItemImpl) -> crate::ItemImpl
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemImpl {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ defaultness: node.defaultness,
+ unsafety: node.unsafety,
+ impl_token: node.impl_token,
+ generics: f.fold_generics(node.generics),
+ trait_: (node.trait_).map(|it| ((it).0, f.fold_path((it).1), (it).2)),
+ self_ty: Box::new(f.fold_type(*node.self_ty)),
+ brace_token: node.brace_token,
+ items: fold_vec(node.items, f, F::fold_impl_item),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_macro<F>(f: &mut F, node: crate::ItemMacro) ->
crate::ItemMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_mod<F>(f: &mut F, node: crate::ItemMod) -> crate::ItemMod
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemMod {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ unsafety: node.unsafety,
+ mod_token: node.mod_token,
+ ident: f.fold_ident(node.ident),
+ content: (node.content).map(|it| ((it).0, fold_vec((it).1, f,
F::fold_item))),
+ semi: node.semi,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_static<F>(f: &mut F, node: crate::ItemStatic) ->
crate::ItemStatic
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemStatic {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ static_token: node.static_token,
+ mutability: f.fold_static_mutability(node.mutability),
+ ident: f.fold_ident(node.ident),
+ colon_token: node.colon_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ eq_token: node.eq_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_struct<F>(f: &mut F, node: crate::ItemStruct) ->
crate::ItemStruct
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemStruct {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ struct_token: node.struct_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_trait<F>(f: &mut F, node: crate::ItemTrait) ->
crate::ItemTrait
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemTrait {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ unsafety: node.unsafety,
+ auto_token: node.auto_token,
+ restriction: (node.restriction).map(|it| f.fold_impl_restriction(it)),
+ trait_token: node.trait_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ colon_token: node.colon_token,
+ supertraits: crate::punctuated::fold(
+ node.supertraits,
+ f,
+ F::fold_type_param_bound,
+ ),
+ brace_token: node.brace_token,
+ items: fold_vec(node.items, f, F::fold_trait_item),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_trait_alias<F>(
+ f: &mut F,
+ node: crate::ItemTraitAlias,
+) -> crate::ItemTraitAlias
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemTraitAlias {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ trait_token: node.trait_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ eq_token: node.eq_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_type<F>(f: &mut F, node: crate::ItemType) -> crate::ItemType
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemType {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ type_token: node.type_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ eq_token: node.eq_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_union<F>(f: &mut F, node: crate::ItemUnion) ->
crate::ItemUnion
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemUnion {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ union_token: node.union_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields_named(node.fields),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_item_use<F>(f: &mut F, node: crate::ItemUse) -> crate::ItemUse
+where
+ F: Fold + ?Sized,
+{
+ crate::ItemUse {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ vis: f.fold_visibility(node.vis),
+ use_token: node.use_token,
+ leading_colon: node.leading_colon,
+ tree: f.fold_use_tree(node.tree),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_label<F>(f: &mut F, node: crate::Label) -> crate::Label
+where
+ F: Fold + ?Sized,
+{
+ crate::Label {
+ name: f.fold_lifetime(node.name),
+ colon_token: node.colon_token,
+ }
+}
+pub fn fold_lifetime<F>(f: &mut F, node: crate::Lifetime) -> crate::Lifetime
+where
+ F: Fold + ?Sized,
+{
+ crate::Lifetime {
+ apostrophe: f.fold_span(node.apostrophe),
+ ident: f.fold_ident(node.ident),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_lifetime_param<F>(
+ f: &mut F,
+ node: crate::LifetimeParam,
+) -> crate::LifetimeParam
+where
+ F: Fold + ?Sized,
+{
+ crate::LifetimeParam {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ lifetime: f.fold_lifetime(node.lifetime),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f, F::fold_lifetime),
+ }
+}
+pub fn fold_lit<F>(f: &mut F, node: crate::Lit) -> crate::Lit
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Lit::Str(_binding_0) =>
crate::Lit::Str(f.fold_lit_str(_binding_0)),
+ crate::Lit::ByteStr(_binding_0) => {
+ crate::Lit::ByteStr(f.fold_lit_byte_str(_binding_0))
+ }
+ crate::Lit::CStr(_binding_0) =>
crate::Lit::CStr(f.fold_lit_cstr(_binding_0)),
+ crate::Lit::Byte(_binding_0) =>
crate::Lit::Byte(f.fold_lit_byte(_binding_0)),
+ crate::Lit::Char(_binding_0) =>
crate::Lit::Char(f.fold_lit_char(_binding_0)),
+ crate::Lit::Int(_binding_0) =>
crate::Lit::Int(f.fold_lit_int(_binding_0)),
+ crate::Lit::Float(_binding_0) =>
crate::Lit::Float(f.fold_lit_float(_binding_0)),
+ crate::Lit::Bool(_binding_0) =>
crate::Lit::Bool(f.fold_lit_bool(_binding_0)),
+ crate::Lit::Verbatim(_binding_0) => crate::Lit::Verbatim(_binding_0),
+ }
+}
+pub fn fold_lit_bool<F>(f: &mut F, node: crate::LitBool) -> crate::LitBool
+where
+ F: Fold + ?Sized,
+{
+ crate::LitBool {
+ value: node.value,
+ span: f.fold_span(node.span),
+ }
+}
+pub fn fold_lit_byte<F>(f: &mut F, node: crate::LitByte) -> crate::LitByte
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_byte_str<F>(f: &mut F, node: crate::LitByteStr) ->
crate::LitByteStr
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_cstr<F>(f: &mut F, node: crate::LitCStr) -> crate::LitCStr
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_char<F>(f: &mut F, node: crate::LitChar) -> crate::LitChar
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_float<F>(f: &mut F, node: crate::LitFloat) -> crate::LitFloat
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_int<F>(f: &mut F, node: crate::LitInt) -> crate::LitInt
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+pub fn fold_lit_str<F>(f: &mut F, node: crate::LitStr) -> crate::LitStr
+where
+ F: Fold + ?Sized,
+{
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_local<F>(f: &mut F, node: crate::Local) -> crate::Local
+where
+ F: Fold + ?Sized,
+{
+ crate::Local {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ let_token: node.let_token,
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| f.fold_local_init(it)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_local_init<F>(f: &mut F, node: crate::LocalInit) ->
crate::LocalInit
+where
+ F: Fold + ?Sized,
+{
+ crate::LocalInit {
+ eq_token: node.eq_token,
+ expr: Box::new(f.fold_expr(*node.expr)),
+ diverge: (node.diverge).map(|it| ((it).0,
Box::new(f.fold_expr(*(it).1)))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_macro<F>(f: &mut F, node: crate::Macro) -> crate::Macro
+where
+ F: Fold + ?Sized,
+{
+ crate::Macro {
+ path: f.fold_path(node.path),
+ bang_token: node.bang_token,
+ delimiter: f.fold_macro_delimiter(node.delimiter),
+ tokens: node.tokens,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_macro_delimiter<F>(
+ f: &mut F,
+ node: crate::MacroDelimiter,
+) -> crate::MacroDelimiter
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::MacroDelimiter::Paren(_binding_0) => {
+ crate::MacroDelimiter::Paren(_binding_0)
+ }
+ crate::MacroDelimiter::Brace(_binding_0) => {
+ crate::MacroDelimiter::Brace(_binding_0)
+ }
+ crate::MacroDelimiter::Bracket(_binding_0) => {
+ crate::MacroDelimiter::Bracket(_binding_0)
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_member<F>(f: &mut F, node: crate::Member) -> crate::Member
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Member::Named(_binding_0) => {
+ crate::Member::Named(f.fold_ident(_binding_0))
+ }
+ crate::Member::Unnamed(_binding_0) => {
+ crate::Member::Unnamed(f.fold_index(_binding_0))
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_meta<F>(f: &mut F, node: crate::Meta) -> crate::Meta
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Meta::Path(_binding_0) =>
crate::Meta::Path(f.fold_path(_binding_0)),
+ crate::Meta::List(_binding_0) =>
crate::Meta::List(f.fold_meta_list(_binding_0)),
+ crate::Meta::NameValue(_binding_0) => {
+ crate::Meta::NameValue(f.fold_meta_name_value(_binding_0))
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_meta_list<F>(f: &mut F, node: crate::MetaList) -> crate::MetaList
+where
+ F: Fold + ?Sized,
+{
+ crate::MetaList {
+ path: f.fold_path(node.path),
+ delimiter: f.fold_macro_delimiter(node.delimiter),
+ tokens: node.tokens,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_meta_name_value<F>(
+ f: &mut F,
+ node: crate::MetaNameValue,
+) -> crate::MetaNameValue
+where
+ F: Fold + ?Sized,
+{
+ crate::MetaNameValue {
+ path: f.fold_path(node.path),
+ eq_token: node.eq_token,
+ value: f.fold_expr(node.value),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_parenthesized_generic_arguments<F>(
+ f: &mut F,
+ node: crate::ParenthesizedGenericArguments,
+) -> crate::ParenthesizedGenericArguments
+where
+ F: Fold + ?Sized,
+{
+ crate::ParenthesizedGenericArguments {
+ paren_token: node.paren_token,
+ inputs: crate::punctuated::fold(node.inputs, f, F::fold_type),
+ output: f.fold_return_type(node.output),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat<F>(f: &mut F, node: crate::Pat) -> crate::Pat
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Pat::Const(_binding_0) =>
crate::Pat::Const(f.fold_expr_const(_binding_0)),
+ crate::Pat::Ident(_binding_0) =>
crate::Pat::Ident(f.fold_pat_ident(_binding_0)),
+ crate::Pat::Lit(_binding_0) =>
crate::Pat::Lit(f.fold_expr_lit(_binding_0)),
+ crate::Pat::Macro(_binding_0) =>
crate::Pat::Macro(f.fold_expr_macro(_binding_0)),
+ crate::Pat::Or(_binding_0) =>
crate::Pat::Or(f.fold_pat_or(_binding_0)),
+ crate::Pat::Paren(_binding_0) =>
crate::Pat::Paren(f.fold_pat_paren(_binding_0)),
+ crate::Pat::Path(_binding_0) =>
crate::Pat::Path(f.fold_expr_path(_binding_0)),
+ crate::Pat::Range(_binding_0) =>
crate::Pat::Range(f.fold_expr_range(_binding_0)),
+ crate::Pat::Reference(_binding_0) => {
+ crate::Pat::Reference(f.fold_pat_reference(_binding_0))
+ }
+ crate::Pat::Rest(_binding_0) =>
crate::Pat::Rest(f.fold_pat_rest(_binding_0)),
+ crate::Pat::Slice(_binding_0) =>
crate::Pat::Slice(f.fold_pat_slice(_binding_0)),
+ crate::Pat::Struct(_binding_0) => {
+ crate::Pat::Struct(f.fold_pat_struct(_binding_0))
+ }
+ crate::Pat::Tuple(_binding_0) =>
crate::Pat::Tuple(f.fold_pat_tuple(_binding_0)),
+ crate::Pat::TupleStruct(_binding_0) => {
+ crate::Pat::TupleStruct(f.fold_pat_tuple_struct(_binding_0))
+ }
+ crate::Pat::Type(_binding_0) =>
crate::Pat::Type(f.fold_pat_type(_binding_0)),
+ crate::Pat::Verbatim(_binding_0) => crate::Pat::Verbatim(_binding_0),
+ crate::Pat::Wild(_binding_0) =>
crate::Pat::Wild(f.fold_pat_wild(_binding_0)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_ident<F>(f: &mut F, node: crate::PatIdent) -> crate::PatIdent
+where
+ F: Fold + ?Sized,
+{
+ crate::PatIdent {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ by_ref: node.by_ref,
+ mutability: node.mutability,
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| ((it).0,
Box::new(f.fold_pat(*(it).1)))),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_or<F>(f: &mut F, node: crate::PatOr) -> crate::PatOr
+where
+ F: Fold + ?Sized,
+{
+ crate::PatOr {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ leading_vert: node.leading_vert,
+ cases: crate::punctuated::fold(node.cases, f, F::fold_pat),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_paren<F>(f: &mut F, node: crate::PatParen) -> crate::PatParen
+where
+ F: Fold + ?Sized,
+{
+ crate::PatParen {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ paren_token: node.paren_token,
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_reference<F>(f: &mut F, node: crate::PatReference) ->
crate::PatReference
+where
+ F: Fold + ?Sized,
+{
+ crate::PatReference {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ and_token: node.and_token,
+ mutability: node.mutability,
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_rest<F>(f: &mut F, node: crate::PatRest) -> crate::PatRest
+where
+ F: Fold + ?Sized,
+{
+ crate::PatRest {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ dot2_token: node.dot2_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_slice<F>(f: &mut F, node: crate::PatSlice) -> crate::PatSlice
+where
+ F: Fold + ?Sized,
+{
+ crate::PatSlice {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ bracket_token: node.bracket_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_pat),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_struct<F>(f: &mut F, node: crate::PatStruct) ->
crate::PatStruct
+where
+ F: Fold + ?Sized,
+{
+ crate::PatStruct {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ qself: (node.qself).map(|it| f.fold_qself(it)),
+ path: f.fold_path(node.path),
+ brace_token: node.brace_token,
+ fields: crate::punctuated::fold(node.fields, f, F::fold_field_pat),
+ rest: (node.rest).map(|it| f.fold_pat_rest(it)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_tuple<F>(f: &mut F, node: crate::PatTuple) -> crate::PatTuple
+where
+ F: Fold + ?Sized,
+{
+ crate::PatTuple {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ paren_token: node.paren_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_pat),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_tuple_struct<F>(
+ f: &mut F,
+ node: crate::PatTupleStruct,
+) -> crate::PatTupleStruct
+where
+ F: Fold + ?Sized,
+{
+ crate::PatTupleStruct {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ qself: (node.qself).map(|it| f.fold_qself(it)),
+ path: f.fold_path(node.path),
+ paren_token: node.paren_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_pat),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_type<F>(f: &mut F, node: crate::PatType) -> crate::PatType
+where
+ F: Fold + ?Sized,
+{
+ crate::PatType {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ colon_token: node.colon_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_pat_wild<F>(f: &mut F, node: crate::PatWild) -> crate::PatWild
+where
+ F: Fold + ?Sized,
+{
+ crate::PatWild {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ underscore_token: node.underscore_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_path<F>(f: &mut F, node: crate::Path) -> crate::Path
+where
+ F: Fold + ?Sized,
+{
+ crate::Path {
+ leading_colon: node.leading_colon,
+ segments: crate::punctuated::fold(node.segments, f,
F::fold_path_segment),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_path_arguments<F>(
+ f: &mut F,
+ node: crate::PathArguments,
+) -> crate::PathArguments
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::PathArguments::None => crate::PathArguments::None,
+ crate::PathArguments::AngleBracketed(_binding_0) => {
+ crate::PathArguments::AngleBracketed(
+ f.fold_angle_bracketed_generic_arguments(_binding_0),
+ )
+ }
+ crate::PathArguments::Parenthesized(_binding_0) => {
+ crate::PathArguments::Parenthesized(
+ f.fold_parenthesized_generic_arguments(_binding_0),
+ )
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_path_segment<F>(f: &mut F, node: crate::PathSegment) ->
crate::PathSegment
+where
+ F: Fold + ?Sized,
+{
+ crate::PathSegment {
+ ident: f.fold_ident(node.ident),
+ arguments: f.fold_path_arguments(node.arguments),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_predicate_lifetime<F>(
+ f: &mut F,
+ node: crate::PredicateLifetime,
+) -> crate::PredicateLifetime
+where
+ F: Fold + ?Sized,
+{
+ crate::PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f, F::fold_lifetime),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_predicate_type<F>(
+ f: &mut F,
+ node: crate::PredicateType,
+) -> crate::PredicateType
+where
+ F: Fold + ?Sized,
+{
+ crate::PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_qself<F>(f: &mut F, node: crate::QSelf) -> crate::QSelf
+where
+ F: Fold + ?Sized,
+{
+ crate::QSelf {
+ lt_token: node.lt_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: node.as_token,
+ gt_token: node.gt_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_range_limits<F>(f: &mut F, node: crate::RangeLimits) ->
crate::RangeLimits
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::RangeLimits::HalfOpen(_binding_0) => {
+ crate::RangeLimits::HalfOpen(_binding_0)
+ }
+ crate::RangeLimits::Closed(_binding_0) =>
crate::RangeLimits::Closed(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_receiver<F>(f: &mut F, node: crate::Receiver) -> crate::Receiver
+where
+ F: Fold + ?Sized,
+{
+ crate::Receiver {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ reference: (node.reference)
+ .map(|it| ((it).0, ((it).1).map(|it| f.fold_lifetime(it)))),
+ mutability: node.mutability,
+ self_token: node.self_token,
+ colon_token: node.colon_token,
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_return_type<F>(f: &mut F, node: crate::ReturnType) ->
crate::ReturnType
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::ReturnType::Default => crate::ReturnType::Default,
+ crate::ReturnType::Type(_binding_0, _binding_1) => {
+ crate::ReturnType::Type(_binding_0,
Box::new(f.fold_type(*_binding_1)))
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_signature<F>(f: &mut F, node: crate::Signature) -> crate::Signature
+where
+ F: Fold + ?Sized,
+{
+ crate::Signature {
+ constness: node.constness,
+ asyncness: node.asyncness,
+ unsafety: node.unsafety,
+ abi: (node.abi).map(|it| f.fold_abi(it)),
+ fn_token: node.fn_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ paren_token: node.paren_token,
+ inputs: crate::punctuated::fold(node.inputs, f, F::fold_fn_arg),
+ variadic: (node.variadic).map(|it| f.fold_variadic(it)),
+ output: f.fold_return_type(node.output),
+ }
+}
+pub fn fold_span<F>(f: &mut F, node: proc_macro2::Span) -> proc_macro2::Span
+where
+ F: Fold + ?Sized,
+{
+ node
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_static_mutability<F>(
+ f: &mut F,
+ node: crate::StaticMutability,
+) -> crate::StaticMutability
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::StaticMutability::Mut(_binding_0) => {
+ crate::StaticMutability::Mut(_binding_0)
+ }
+ crate::StaticMutability::None => crate::StaticMutability::None,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_stmt<F>(f: &mut F, node: crate::Stmt) -> crate::Stmt
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Stmt::Local(_binding_0) =>
crate::Stmt::Local(f.fold_local(_binding_0)),
+ crate::Stmt::Item(_binding_0) =>
crate::Stmt::Item(f.fold_item(_binding_0)),
+ crate::Stmt::Expr(_binding_0, _binding_1) => {
+ crate::Stmt::Expr(f.fold_expr(_binding_0), _binding_1)
+ }
+ crate::Stmt::Macro(_binding_0) => {
+ crate::Stmt::Macro(f.fold_stmt_macro(_binding_0))
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_stmt_macro<F>(f: &mut F, node: crate::StmtMacro) ->
crate::StmtMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::StmtMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ mac: f.fold_macro(node.mac),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_trait_bound<F>(f: &mut F, node: crate::TraitBound) ->
crate::TraitBound
+where
+ F: Fold + ?Sized,
+{
+ crate::TraitBound {
+ paren_token: node.paren_token,
+ modifier: f.fold_trait_bound_modifier(node.modifier),
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ path: f.fold_path(node.path),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_trait_bound_modifier<F>(
+ f: &mut F,
+ node: crate::TraitBoundModifier,
+) -> crate::TraitBoundModifier
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::TraitBoundModifier::None => crate::TraitBoundModifier::None,
+ crate::TraitBoundModifier::Maybe(_binding_0) => {
+ crate::TraitBoundModifier::Maybe(_binding_0)
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_trait_item<F>(f: &mut F, node: crate::TraitItem) ->
crate::TraitItem
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::TraitItem::Const(_binding_0) => {
+ crate::TraitItem::Const(f.fold_trait_item_const(_binding_0))
+ }
+ crate::TraitItem::Fn(_binding_0) => {
+ crate::TraitItem::Fn(f.fold_trait_item_fn(_binding_0))
+ }
+ crate::TraitItem::Type(_binding_0) => {
+ crate::TraitItem::Type(f.fold_trait_item_type(_binding_0))
+ }
+ crate::TraitItem::Macro(_binding_0) => {
+ crate::TraitItem::Macro(f.fold_trait_item_macro(_binding_0))
+ }
+ crate::TraitItem::Verbatim(_binding_0) =>
crate::TraitItem::Verbatim(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_trait_item_const<F>(
+ f: &mut F,
+ node: crate::TraitItemConst,
+) -> crate::TraitItemConst
+where
+ F: Fold + ?Sized,
+{
+ crate::TraitItemConst {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ const_token: node.const_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ colon_token: node.colon_token,
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| ((it).0, f.fold_expr((it).1))),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_trait_item_fn<F>(f: &mut F, node: crate::TraitItemFn) ->
crate::TraitItemFn
+where
+ F: Fold + ?Sized,
+{
+ crate::TraitItemFn {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_trait_item_macro<F>(
+ f: &mut F,
+ node: crate::TraitItemMacro,
+) -> crate::TraitItemMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::TraitItemMacro {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ mac: f.fold_macro(node.mac),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_trait_item_type<F>(
+ f: &mut F,
+ node: crate::TraitItemType,
+) -> crate::TraitItemType
+where
+ F: Fold + ?Sized,
+{
+ crate::TraitItemType {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ type_token: node.type_token,
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ default: (node.default).map(|it| ((it).0, f.fold_type((it).1))),
+ semi_token: node.semi_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type<F>(f: &mut F, node: crate::Type) -> crate::Type
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Type::Array(_binding_0) => {
+ crate::Type::Array(f.fold_type_array(_binding_0))
+ }
+ crate::Type::BareFn(_binding_0) => {
+ crate::Type::BareFn(f.fold_type_bare_fn(_binding_0))
+ }
+ crate::Type::Group(_binding_0) => {
+ crate::Type::Group(f.fold_type_group(_binding_0))
+ }
+ crate::Type::ImplTrait(_binding_0) => {
+ crate::Type::ImplTrait(f.fold_type_impl_trait(_binding_0))
+ }
+ crate::Type::Infer(_binding_0) => {
+ crate::Type::Infer(f.fold_type_infer(_binding_0))
+ }
+ crate::Type::Macro(_binding_0) => {
+ crate::Type::Macro(f.fold_type_macro(_binding_0))
+ }
+ crate::Type::Never(_binding_0) => {
+ crate::Type::Never(f.fold_type_never(_binding_0))
+ }
+ crate::Type::Paren(_binding_0) => {
+ crate::Type::Paren(f.fold_type_paren(_binding_0))
+ }
+ crate::Type::Path(_binding_0) =>
crate::Type::Path(f.fold_type_path(_binding_0)),
+ crate::Type::Ptr(_binding_0) =>
crate::Type::Ptr(f.fold_type_ptr(_binding_0)),
+ crate::Type::Reference(_binding_0) => {
+ crate::Type::Reference(f.fold_type_reference(_binding_0))
+ }
+ crate::Type::Slice(_binding_0) => {
+ crate::Type::Slice(f.fold_type_slice(_binding_0))
+ }
+ crate::Type::TraitObject(_binding_0) => {
+ crate::Type::TraitObject(f.fold_type_trait_object(_binding_0))
+ }
+ crate::Type::Tuple(_binding_0) => {
+ crate::Type::Tuple(f.fold_type_tuple(_binding_0))
+ }
+ crate::Type::Verbatim(_binding_0) => crate::Type::Verbatim(_binding_0),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_array<F>(f: &mut F, node: crate::TypeArray) ->
crate::TypeArray
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeArray {
+ bracket_token: node.bracket_token,
+ elem: Box::new(f.fold_type(*node.elem)),
+ semi_token: node.semi_token,
+ len: f.fold_expr(node.len),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_bare_fn<F>(f: &mut F, node: crate::TypeBareFn) ->
crate::TypeBareFn
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeBareFn {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ unsafety: node.unsafety,
+ abi: (node.abi).map(|it| f.fold_abi(it)),
+ fn_token: node.fn_token,
+ paren_token: node.paren_token,
+ inputs: crate::punctuated::fold(node.inputs, f, F::fold_bare_fn_arg),
+ variadic: (node.variadic).map(|it| f.fold_bare_variadic(it)),
+ output: f.fold_return_type(node.output),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_group<F>(f: &mut F, node: crate::TypeGroup) ->
crate::TypeGroup
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeGroup {
+ group_token: node.group_token,
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_impl_trait<F>(
+ f: &mut F,
+ node: crate::TypeImplTrait,
+) -> crate::TypeImplTrait
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeImplTrait {
+ impl_token: node.impl_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_infer<F>(f: &mut F, node: crate::TypeInfer) ->
crate::TypeInfer
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeInfer {
+ underscore_token: node.underscore_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_macro<F>(f: &mut F, node: crate::TypeMacro) ->
crate::TypeMacro
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeMacro {
+ mac: f.fold_macro(node.mac),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_never<F>(f: &mut F, node: crate::TypeNever) ->
crate::TypeNever
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeNever {
+ bang_token: node.bang_token,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_param<F>(f: &mut F, node: crate::TypeParam) ->
crate::TypeParam
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeParam {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ ident: f.fold_ident(node.ident),
+ colon_token: node.colon_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ eq_token: node.eq_token,
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_param_bound<F>(
+ f: &mut F,
+ node: crate::TypeParamBound,
+) -> crate::TypeParamBound
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::TypeParamBound::Trait(_binding_0) => {
+ crate::TypeParamBound::Trait(f.fold_trait_bound(_binding_0))
+ }
+ crate::TypeParamBound::Lifetime(_binding_0) => {
+ crate::TypeParamBound::Lifetime(f.fold_lifetime(_binding_0))
+ }
+ crate::TypeParamBound::Verbatim(_binding_0) => {
+ crate::TypeParamBound::Verbatim(_binding_0)
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_paren<F>(f: &mut F, node: crate::TypeParen) ->
crate::TypeParen
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeParen {
+ paren_token: node.paren_token,
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_path<F>(f: &mut F, node: crate::TypePath) -> crate::TypePath
+where
+ F: Fold + ?Sized,
+{
+ crate::TypePath {
+ qself: (node.qself).map(|it| f.fold_qself(it)),
+ path: f.fold_path(node.path),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_ptr<F>(f: &mut F, node: crate::TypePtr) -> crate::TypePtr
+where
+ F: Fold + ?Sized,
+{
+ crate::TypePtr {
+ star_token: node.star_token,
+ const_token: node.const_token,
+ mutability: node.mutability,
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_reference<F>(
+ f: &mut F,
+ node: crate::TypeReference,
+) -> crate::TypeReference
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeReference {
+ and_token: node.and_token,
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: node.mutability,
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_slice<F>(f: &mut F, node: crate::TypeSlice) ->
crate::TypeSlice
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeSlice {
+ bracket_token: node.bracket_token,
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_trait_object<F>(
+ f: &mut F,
+ node: crate::TypeTraitObject,
+) -> crate::TypeTraitObject
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeTraitObject {
+ dyn_token: node.dyn_token,
+ bounds: crate::punctuated::fold(node.bounds, f,
F::fold_type_param_bound),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_type_tuple<F>(f: &mut F, node: crate::TypeTuple) ->
crate::TypeTuple
+where
+ F: Fold + ?Sized,
+{
+ crate::TypeTuple {
+ paren_token: node.paren_token,
+ elems: crate::punctuated::fold(node.elems, f, F::fold_type),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_un_op<F>(f: &mut F, node: crate::UnOp) -> crate::UnOp
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::UnOp::Deref(_binding_0) => crate::UnOp::Deref(_binding_0),
+ crate::UnOp::Not(_binding_0) => crate::UnOp::Not(_binding_0),
+ crate::UnOp::Neg(_binding_0) => crate::UnOp::Neg(_binding_0),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_glob<F>(f: &mut F, node: crate::UseGlob) -> crate::UseGlob
+where
+ F: Fold + ?Sized,
+{
+ crate::UseGlob {
+ star_token: node.star_token,
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_group<F>(f: &mut F, node: crate::UseGroup) -> crate::UseGroup
+where
+ F: Fold + ?Sized,
+{
+ crate::UseGroup {
+ brace_token: node.brace_token,
+ items: crate::punctuated::fold(node.items, f, F::fold_use_tree),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_name<F>(f: &mut F, node: crate::UseName) -> crate::UseName
+where
+ F: Fold + ?Sized,
+{
+ crate::UseName {
+ ident: f.fold_ident(node.ident),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_path<F>(f: &mut F, node: crate::UsePath) -> crate::UsePath
+where
+ F: Fold + ?Sized,
+{
+ crate::UsePath {
+ ident: f.fold_ident(node.ident),
+ colon2_token: node.colon2_token,
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_rename<F>(f: &mut F, node: crate::UseRename) ->
crate::UseRename
+where
+ F: Fold + ?Sized,
+{
+ crate::UseRename {
+ ident: f.fold_ident(node.ident),
+ as_token: node.as_token,
+ rename: f.fold_ident(node.rename),
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_use_tree<F>(f: &mut F, node: crate::UseTree) -> crate::UseTree
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::UseTree::Path(_binding_0) => {
+ crate::UseTree::Path(f.fold_use_path(_binding_0))
+ }
+ crate::UseTree::Name(_binding_0) => {
+ crate::UseTree::Name(f.fold_use_name(_binding_0))
+ }
+ crate::UseTree::Rename(_binding_0) => {
+ crate::UseTree::Rename(f.fold_use_rename(_binding_0))
+ }
+ crate::UseTree::Glob(_binding_0) => {
+ crate::UseTree::Glob(f.fold_use_glob(_binding_0))
+ }
+ crate::UseTree::Group(_binding_0) => {
+ crate::UseTree::Group(f.fold_use_group(_binding_0))
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn fold_variadic<F>(f: &mut F, node: crate::Variadic) -> crate::Variadic
+where
+ F: Fold + ?Sized,
+{
+ crate::Variadic {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ pat: (node.pat).map(|it| (Box::new(f.fold_pat(*(it).0)), (it).1)),
+ dots: node.dots,
+ comma: node.comma,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_variant<F>(f: &mut F, node: crate::Variant) -> crate::Variant
+where
+ F: Fold + ?Sized,
+{
+ crate::Variant {
+ attrs: fold_vec(node.attrs, f, F::fold_attribute),
+ ident: f.fold_ident(node.ident),
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| ((it).0,
f.fold_expr((it).1))),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_vis_restricted<F>(
+ f: &mut F,
+ node: crate::VisRestricted,
+) -> crate::VisRestricted
+where
+ F: Fold + ?Sized,
+{
+ crate::VisRestricted {
+ pub_token: node.pub_token,
+ paren_token: node.paren_token,
+ in_token: node.in_token,
+ path: Box::new(f.fold_path(*node.path)),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_visibility<F>(f: &mut F, node: crate::Visibility) ->
crate::Visibility
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::Visibility::Public(_binding_0) =>
crate::Visibility::Public(_binding_0),
+ crate::Visibility::Restricted(_binding_0) => {
+ crate::Visibility::Restricted(f.fold_vis_restricted(_binding_0))
+ }
+ crate::Visibility::Inherited => crate::Visibility::Inherited,
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_where_clause<F>(f: &mut F, node: crate::WhereClause) ->
crate::WhereClause
+where
+ F: Fold + ?Sized,
+{
+ crate::WhereClause {
+ where_token: node.where_token,
+ predicates: crate::punctuated::fold(node.predicates, f,
F::fold_where_predicate),
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn fold_where_predicate<F>(
+ f: &mut F,
+ node: crate::WherePredicate,
+) -> crate::WherePredicate
+where
+ F: Fold + ?Sized,
+{
+ match node {
+ crate::WherePredicate::Lifetime(_binding_0) => {
+
crate::WherePredicate::Lifetime(f.fold_predicate_lifetime(_binding_0))
+ }
+ crate::WherePredicate::Type(_binding_0) => {
+ crate::WherePredicate::Type(f.fold_predicate_type(_binding_0))
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+fn fold_vec<T, V, F>(vec: Vec<T>, fold: &mut V, mut f: F) -> Vec<T>
+where
+ V: ?Sized,
+ F: FnMut(&mut V, T) -> T,
+{
+ vec.into_iter().map(|it| f(fold, it)).collect()
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/hash.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/hash.rs
new file mode 100644
index 0000000000..02097b9d6e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/hash.rs
@@ -0,0 +1,2807 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#[cfg(any(feature = "derive", feature = "full"))]
+use crate::tt::TokenStreamHelper;
+use std::hash::{Hash, Hasher};
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Abi {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.name.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::AngleBracketedGenericArguments {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.colon2_token.hash(state);
+ self.args.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Arm {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ self.guard.hash(state);
+ self.body.hash(state);
+ self.comma.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::AssocConst {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.value.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::AssocType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::AttrStyle {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::AttrStyle::Outer => {
+ state.write_u8(0u8);
+ }
+ crate::AttrStyle::Inner(_) => {
+ state.write_u8(1u8);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Attribute {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.style.hash(state);
+ self.meta.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::BareFnArg {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.name.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::BareVariadic {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.name.hash(state);
+ self.comma.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::BinOp {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::BinOp::Add(_) => {
+ state.write_u8(0u8);
+ }
+ crate::BinOp::Sub(_) => {
+ state.write_u8(1u8);
+ }
+ crate::BinOp::Mul(_) => {
+ state.write_u8(2u8);
+ }
+ crate::BinOp::Div(_) => {
+ state.write_u8(3u8);
+ }
+ crate::BinOp::Rem(_) => {
+ state.write_u8(4u8);
+ }
+ crate::BinOp::And(_) => {
+ state.write_u8(5u8);
+ }
+ crate::BinOp::Or(_) => {
+ state.write_u8(6u8);
+ }
+ crate::BinOp::BitXor(_) => {
+ state.write_u8(7u8);
+ }
+ crate::BinOp::BitAnd(_) => {
+ state.write_u8(8u8);
+ }
+ crate::BinOp::BitOr(_) => {
+ state.write_u8(9u8);
+ }
+ crate::BinOp::Shl(_) => {
+ state.write_u8(10u8);
+ }
+ crate::BinOp::Shr(_) => {
+ state.write_u8(11u8);
+ }
+ crate::BinOp::Eq(_) => {
+ state.write_u8(12u8);
+ }
+ crate::BinOp::Lt(_) => {
+ state.write_u8(13u8);
+ }
+ crate::BinOp::Le(_) => {
+ state.write_u8(14u8);
+ }
+ crate::BinOp::Ne(_) => {
+ state.write_u8(15u8);
+ }
+ crate::BinOp::Ge(_) => {
+ state.write_u8(16u8);
+ }
+ crate::BinOp::Gt(_) => {
+ state.write_u8(17u8);
+ }
+ crate::BinOp::AddAssign(_) => {
+ state.write_u8(18u8);
+ }
+ crate::BinOp::SubAssign(_) => {
+ state.write_u8(19u8);
+ }
+ crate::BinOp::MulAssign(_) => {
+ state.write_u8(20u8);
+ }
+ crate::BinOp::DivAssign(_) => {
+ state.write_u8(21u8);
+ }
+ crate::BinOp::RemAssign(_) => {
+ state.write_u8(22u8);
+ }
+ crate::BinOp::BitXorAssign(_) => {
+ state.write_u8(23u8);
+ }
+ crate::BinOp::BitAndAssign(_) => {
+ state.write_u8(24u8);
+ }
+ crate::BinOp::BitOrAssign(_) => {
+ state.write_u8(25u8);
+ }
+ crate::BinOp::ShlAssign(_) => {
+ state.write_u8(26u8);
+ }
+ crate::BinOp::ShrAssign(_) => {
+ state.write_u8(27u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Block {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.stmts.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::BoundLifetimes {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lifetimes.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ConstParam {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.ty.hash(state);
+ self.eq_token.hash(state);
+ self.default.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Constraint {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Data {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Data::Struct(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Data::Enum(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Data::Union(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::DataEnum {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.variants.hash(state);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::DataStruct {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.fields.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::DataUnion {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.fields.hash(state);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::DeriveInput {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.data.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Expr {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ #[cfg(feature = "full")]
+ crate::Expr::Array(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Assign(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Async(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Await(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::Expr::Binary(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Block(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Break(v0) => {
+ state.write_u8(6u8);
+ v0.hash(state);
+ }
+ crate::Expr::Call(v0) => {
+ state.write_u8(7u8);
+ v0.hash(state);
+ }
+ crate::Expr::Cast(v0) => {
+ state.write_u8(8u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Closure(v0) => {
+ state.write_u8(9u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Const(v0) => {
+ state.write_u8(10u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Continue(v0) => {
+ state.write_u8(11u8);
+ v0.hash(state);
+ }
+ crate::Expr::Field(v0) => {
+ state.write_u8(12u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::ForLoop(v0) => {
+ state.write_u8(13u8);
+ v0.hash(state);
+ }
+ crate::Expr::Group(v0) => {
+ state.write_u8(14u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::If(v0) => {
+ state.write_u8(15u8);
+ v0.hash(state);
+ }
+ crate::Expr::Index(v0) => {
+ state.write_u8(16u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Infer(v0) => {
+ state.write_u8(17u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Let(v0) => {
+ state.write_u8(18u8);
+ v0.hash(state);
+ }
+ crate::Expr::Lit(v0) => {
+ state.write_u8(19u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Loop(v0) => {
+ state.write_u8(20u8);
+ v0.hash(state);
+ }
+ crate::Expr::Macro(v0) => {
+ state.write_u8(21u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Match(v0) => {
+ state.write_u8(22u8);
+ v0.hash(state);
+ }
+ crate::Expr::MethodCall(v0) => {
+ state.write_u8(23u8);
+ v0.hash(state);
+ }
+ crate::Expr::Paren(v0) => {
+ state.write_u8(24u8);
+ v0.hash(state);
+ }
+ crate::Expr::Path(v0) => {
+ state.write_u8(25u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Range(v0) => {
+ state.write_u8(26u8);
+ v0.hash(state);
+ }
+ crate::Expr::Reference(v0) => {
+ state.write_u8(27u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Repeat(v0) => {
+ state.write_u8(28u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Return(v0) => {
+ state.write_u8(29u8);
+ v0.hash(state);
+ }
+ crate::Expr::Struct(v0) => {
+ state.write_u8(30u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Try(v0) => {
+ state.write_u8(31u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::TryBlock(v0) => {
+ state.write_u8(32u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Tuple(v0) => {
+ state.write_u8(33u8);
+ v0.hash(state);
+ }
+ crate::Expr::Unary(v0) => {
+ state.write_u8(34u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Unsafe(v0) => {
+ state.write_u8(35u8);
+ v0.hash(state);
+ }
+ crate::Expr::Verbatim(v0) => {
+ state.write_u8(36u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::While(v0) => {
+ state.write_u8(37u8);
+ v0.hash(state);
+ }
+ #[cfg(feature = "full")]
+ crate::Expr::Yield(v0) => {
+ state.write_u8(38u8);
+ v0.hash(state);
+ }
+ #[cfg(not(feature = "full"))]
+ _ => unreachable!(),
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprArray {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.elems.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprAssign {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.left.hash(state);
+ self.right.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprAsync {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.capture.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprAwait {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.base.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprBinary {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.left.hash(state);
+ self.op.hash(state);
+ self.right.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprBlock {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprBreak {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprCall {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.func.hash(state);
+ self.args.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprCast {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprClosure {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.lifetimes.hash(state);
+ self.constness.hash(state);
+ self.movability.hash(state);
+ self.asyncness.hash(state);
+ self.capture.hash(state);
+ self.inputs.hash(state);
+ self.output.hash(state);
+ self.body.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprConst {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprContinue {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprField {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.base.hash(state);
+ self.member.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprForLoop {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ self.pat.hash(state);
+ self.expr.hash(state);
+ self.body.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprGroup {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprIf {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.cond.hash(state);
+ self.then_branch.hash(state);
+ self.else_branch.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprIndex {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ self.index.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprInfer {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprLet {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprLit {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.lit.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprLoop {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ self.body.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mac.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprMatch {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ self.arms.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprMethodCall {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.receiver.hash(state);
+ self.method.hash(state);
+ self.turbofish.hash(state);
+ self.args.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprParen {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprPath {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.qself.hash(state);
+ self.path.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprRange {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.start.hash(state);
+ self.limits.hash(state);
+ self.end.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprReference {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mutability.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprRepeat {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ self.len.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprReturn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprStruct {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.qself.hash(state);
+ self.path.hash(state);
+ self.fields.hash(state);
+ self.dot2_token.hash(state);
+ self.rest.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprTry {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprTryBlock {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprTuple {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.elems.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprUnary {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.op.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprUnsafe {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprWhile {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.label.hash(state);
+ self.cond.hash(state);
+ self.body.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ExprYield {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Field {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.mutability.hash(state);
+ self.ident.hash(state);
+ self.colon_token.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FieldMutability {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::FieldMutability::None => {
+ state.write_u8(0u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FieldPat {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.member.hash(state);
+ self.colon_token.hash(state);
+ self.pat.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FieldValue {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.member.hash(state);
+ self.colon_token.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Fields {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Fields::Named(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Fields::Unnamed(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Fields::Unit => {
+ state.write_u8(2u8);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FieldsNamed {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.named.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FieldsUnnamed {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.unnamed.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::File {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.shebang.hash(state);
+ self.attrs.hash(state);
+ self.items.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::FnArg {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::FnArg::Receiver(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::FnArg::Typed(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ForeignItem {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::ForeignItem::Fn(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::ForeignItem::Static(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::ForeignItem::Type(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::ForeignItem::Macro(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::ForeignItem::Verbatim(v0) => {
+ state.write_u8(4u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ForeignItemFn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.sig.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ForeignItemMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mac.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ForeignItemStatic {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.mutability.hash(state);
+ self.ident.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ForeignItemType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::GenericArgument {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::GenericArgument::Lifetime(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::GenericArgument::Type(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::GenericArgument::Const(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::GenericArgument::AssocType(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::GenericArgument::AssocConst(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ crate::GenericArgument::Constraint(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::GenericParam {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::GenericParam::Lifetime(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::GenericParam::Type(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::GenericParam::Const(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Generics {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lt_token.hash(state);
+ self.params.hash(state);
+ self.gt_token.hash(state);
+ self.where_clause.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplItem {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::ImplItem::Const(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::ImplItem::Fn(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::ImplItem::Type(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::ImplItem::Macro(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::ImplItem::Verbatim(v0) => {
+ state.write_u8(4u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplItemConst {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.defaultness.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplItemFn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.defaultness.hash(state);
+ self.sig.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplItemMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mac.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplItemType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.defaultness.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ImplRestriction {
+ fn hash<H>(&self, _state: &mut H)
+ where
+ H: Hasher,
+ {
+ match *self {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Item {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Item::Const(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Item::Enum(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Item::ExternCrate(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::Item::Fn(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::Item::ForeignMod(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ crate::Item::Impl(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ crate::Item::Macro(v0) => {
+ state.write_u8(6u8);
+ v0.hash(state);
+ }
+ crate::Item::Mod(v0) => {
+ state.write_u8(7u8);
+ v0.hash(state);
+ }
+ crate::Item::Static(v0) => {
+ state.write_u8(8u8);
+ v0.hash(state);
+ }
+ crate::Item::Struct(v0) => {
+ state.write_u8(9u8);
+ v0.hash(state);
+ }
+ crate::Item::Trait(v0) => {
+ state.write_u8(10u8);
+ v0.hash(state);
+ }
+ crate::Item::TraitAlias(v0) => {
+ state.write_u8(11u8);
+ v0.hash(state);
+ }
+ crate::Item::Type(v0) => {
+ state.write_u8(12u8);
+ v0.hash(state);
+ }
+ crate::Item::Union(v0) => {
+ state.write_u8(13u8);
+ v0.hash(state);
+ }
+ crate::Item::Use(v0) => {
+ state.write_u8(14u8);
+ v0.hash(state);
+ }
+ crate::Item::Verbatim(v0) => {
+ state.write_u8(15u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemConst {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemEnum {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.variants.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemExternCrate {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.rename.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemFn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.sig.hash(state);
+ self.block.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemForeignMod {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.unsafety.hash(state);
+ self.abi.hash(state);
+ self.items.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemImpl {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.defaultness.hash(state);
+ self.unsafety.hash(state);
+ self.generics.hash(state);
+ self.trait_.hash(state);
+ self.self_ty.hash(state);
+ self.items.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.mac.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemMod {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.unsafety.hash(state);
+ self.ident.hash(state);
+ self.content.hash(state);
+ self.semi.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemStatic {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.mutability.hash(state);
+ self.ident.hash(state);
+ self.ty.hash(state);
+ self.expr.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemStruct {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.fields.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemTrait {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.unsafety.hash(state);
+ self.auto_token.hash(state);
+ self.restriction.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.colon_token.hash(state);
+ self.supertraits.hash(state);
+ self.items.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemTraitAlias {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemUnion {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.fields.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ItemUse {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.vis.hash(state);
+ self.leading_colon.hash(state);
+ self.tree.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Label {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.name.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::LifetimeParam {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.lifetime.hash(state);
+ self.colon_token.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Lit {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Lit::Str(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Lit::ByteStr(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Lit::CStr(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::Lit::Byte(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::Lit::Char(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ crate::Lit::Int(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ crate::Lit::Float(v0) => {
+ state.write_u8(6u8);
+ v0.hash(state);
+ }
+ crate::Lit::Bool(v0) => {
+ state.write_u8(7u8);
+ v0.hash(state);
+ }
+ crate::Lit::Verbatim(v0) => {
+ state.write_u8(8u8);
+ v0.to_string().hash(state);
+ }
+ }
+ }
+}
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::LitBool {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.value.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Local {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ self.init.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::LocalInit {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.expr.hash(state);
+ self.diverge.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Macro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.path.hash(state);
+ self.delimiter.hash(state);
+ TokenStreamHelper(&self.tokens).hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::MacroDelimiter {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::MacroDelimiter::Paren(_) => {
+ state.write_u8(0u8);
+ }
+ crate::MacroDelimiter::Brace(_) => {
+ state.write_u8(1u8);
+ }
+ crate::MacroDelimiter::Bracket(_) => {
+ state.write_u8(2u8);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Meta {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Meta::Path(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Meta::List(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Meta::NameValue(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::MetaList {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.path.hash(state);
+ self.delimiter.hash(state);
+ TokenStreamHelper(&self.tokens).hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::MetaNameValue {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.path.hash(state);
+ self.value.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ParenthesizedGenericArguments {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.inputs.hash(state);
+ self.output.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Pat {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Pat::Const(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Pat::Ident(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Pat::Lit(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::Pat::Macro(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::Pat::Or(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ crate::Pat::Paren(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ crate::Pat::Path(v0) => {
+ state.write_u8(6u8);
+ v0.hash(state);
+ }
+ crate::Pat::Range(v0) => {
+ state.write_u8(7u8);
+ v0.hash(state);
+ }
+ crate::Pat::Reference(v0) => {
+ state.write_u8(8u8);
+ v0.hash(state);
+ }
+ crate::Pat::Rest(v0) => {
+ state.write_u8(9u8);
+ v0.hash(state);
+ }
+ crate::Pat::Slice(v0) => {
+ state.write_u8(10u8);
+ v0.hash(state);
+ }
+ crate::Pat::Struct(v0) => {
+ state.write_u8(11u8);
+ v0.hash(state);
+ }
+ crate::Pat::Tuple(v0) => {
+ state.write_u8(12u8);
+ v0.hash(state);
+ }
+ crate::Pat::TupleStruct(v0) => {
+ state.write_u8(13u8);
+ v0.hash(state);
+ }
+ crate::Pat::Type(v0) => {
+ state.write_u8(14u8);
+ v0.hash(state);
+ }
+ crate::Pat::Verbatim(v0) => {
+ state.write_u8(15u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ crate::Pat::Wild(v0) => {
+ state.write_u8(16u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatIdent {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.by_ref.hash(state);
+ self.mutability.hash(state);
+ self.ident.hash(state);
+ self.subpat.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatOr {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.leading_vert.hash(state);
+ self.cases.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatParen {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatReference {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mutability.hash(state);
+ self.pat.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatRest {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatSlice {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.elems.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatStruct {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.qself.hash(state);
+ self.path.hash(state);
+ self.fields.hash(state);
+ self.rest.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatTuple {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.elems.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatTupleStruct {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.qself.hash(state);
+ self.path.hash(state);
+ self.elems.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PatWild {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Path {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.leading_colon.hash(state);
+ self.segments.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PathArguments {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::PathArguments::None => {
+ state.write_u8(0u8);
+ }
+ crate::PathArguments::AngleBracketed(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::PathArguments::Parenthesized(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PathSegment {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.arguments.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PredicateLifetime {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lifetime.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::PredicateType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lifetimes.hash(state);
+ self.bounded_ty.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::QSelf {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ty.hash(state);
+ self.position.hash(state);
+ self.as_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::RangeLimits {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::RangeLimits::HalfOpen(_) => {
+ state.write_u8(0u8);
+ }
+ crate::RangeLimits::Closed(_) => {
+ state.write_u8(1u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Receiver {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.reference.hash(state);
+ self.mutability.hash(state);
+ self.colon_token.hash(state);
+ self.ty.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::ReturnType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::ReturnType::Default => {
+ state.write_u8(0u8);
+ }
+ crate::ReturnType::Type(_, v1) => {
+ state.write_u8(1u8);
+ v1.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Signature {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.constness.hash(state);
+ self.asyncness.hash(state);
+ self.unsafety.hash(state);
+ self.abi.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.inputs.hash(state);
+ self.variadic.hash(state);
+ self.output.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::StaticMutability {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::StaticMutability::Mut(_) => {
+ state.write_u8(0u8);
+ }
+ crate::StaticMutability::None => {
+ state.write_u8(1u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Stmt {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Stmt::Local(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Stmt::Item(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Stmt::Expr(v0, v1) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ v1.hash(state);
+ }
+ crate::Stmt::Macro(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::StmtMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mac.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitBound {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.paren_token.hash(state);
+ self.modifier.hash(state);
+ self.lifetimes.hash(state);
+ self.path.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitBoundModifier {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::TraitBoundModifier::None => {
+ state.write_u8(0u8);
+ }
+ crate::TraitBoundModifier::Maybe(_) => {
+ state.write_u8(1u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitItem {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::TraitItem::Const(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::TraitItem::Fn(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::TraitItem::Type(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::TraitItem::Macro(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::TraitItem::Verbatim(v0) => {
+ state.write_u8(4u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitItemConst {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.ty.hash(state);
+ self.default.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitItemFn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.sig.hash(state);
+ self.default.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitItemMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.mac.hash(state);
+ self.semi_token.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TraitItemType {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.generics.hash(state);
+ self.colon_token.hash(state);
+ self.bounds.hash(state);
+ self.default.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Type {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Type::Array(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::Type::BareFn(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Type::Group(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::Type::ImplTrait(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::Type::Infer(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ crate::Type::Macro(v0) => {
+ state.write_u8(5u8);
+ v0.hash(state);
+ }
+ crate::Type::Never(v0) => {
+ state.write_u8(6u8);
+ v0.hash(state);
+ }
+ crate::Type::Paren(v0) => {
+ state.write_u8(7u8);
+ v0.hash(state);
+ }
+ crate::Type::Path(v0) => {
+ state.write_u8(8u8);
+ v0.hash(state);
+ }
+ crate::Type::Ptr(v0) => {
+ state.write_u8(9u8);
+ v0.hash(state);
+ }
+ crate::Type::Reference(v0) => {
+ state.write_u8(10u8);
+ v0.hash(state);
+ }
+ crate::Type::Slice(v0) => {
+ state.write_u8(11u8);
+ v0.hash(state);
+ }
+ crate::Type::TraitObject(v0) => {
+ state.write_u8(12u8);
+ v0.hash(state);
+ }
+ crate::Type::Tuple(v0) => {
+ state.write_u8(13u8);
+ v0.hash(state);
+ }
+ crate::Type::Verbatim(v0) => {
+ state.write_u8(14u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeArray {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.elem.hash(state);
+ self.len.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeBareFn {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lifetimes.hash(state);
+ self.unsafety.hash(state);
+ self.abi.hash(state);
+ self.inputs.hash(state);
+ self.variadic.hash(state);
+ self.output.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeGroup {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.elem.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeImplTrait {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.bounds.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeInfer {
+ fn hash<H>(&self, _state: &mut H)
+ where
+ H: Hasher,
+ {}
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeMacro {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.mac.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeNever {
+ fn hash<H>(&self, _state: &mut H)
+ where
+ H: Hasher,
+ {}
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeParam {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.colon_token.hash(state);
+ self.bounds.hash(state);
+ self.eq_token.hash(state);
+ self.default.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeParamBound {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::TypeParamBound::Trait(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::TypeParamBound::Lifetime(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::TypeParamBound::Verbatim(v0) => {
+ state.write_u8(2u8);
+ TokenStreamHelper(v0).hash(state);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeParen {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.elem.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypePath {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.qself.hash(state);
+ self.path.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypePtr {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.const_token.hash(state);
+ self.mutability.hash(state);
+ self.elem.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeReference {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.lifetime.hash(state);
+ self.mutability.hash(state);
+ self.elem.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeSlice {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.elem.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeTraitObject {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.dyn_token.hash(state);
+ self.bounds.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::TypeTuple {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.elems.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UnOp {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::UnOp::Deref(_) => {
+ state.write_u8(0u8);
+ }
+ crate::UnOp::Not(_) => {
+ state.write_u8(1u8);
+ }
+ crate::UnOp::Neg(_) => {
+ state.write_u8(2u8);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UseGlob {
+ fn hash<H>(&self, _state: &mut H)
+ where
+ H: Hasher,
+ {}
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UseGroup {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.items.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UseName {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UsePath {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.tree.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UseRename {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.ident.hash(state);
+ self.rename.hash(state);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::UseTree {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::UseTree::Path(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::UseTree::Name(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::UseTree::Rename(v0) => {
+ state.write_u8(2u8);
+ v0.hash(state);
+ }
+ crate::UseTree::Glob(v0) => {
+ state.write_u8(3u8);
+ v0.hash(state);
+ }
+ crate::UseTree::Group(v0) => {
+ state.write_u8(4u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Variadic {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.pat.hash(state);
+ self.comma.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Variant {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.attrs.hash(state);
+ self.ident.hash(state);
+ self.fields.hash(state);
+ self.discriminant.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::VisRestricted {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.in_token.hash(state);
+ self.path.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::Visibility {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::Visibility::Public(_) => {
+ state.write_u8(0u8);
+ }
+ crate::Visibility::Restricted(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ crate::Visibility::Inherited => {
+ state.write_u8(2u8);
+ }
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::WhereClause {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.predicates.hash(state);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for crate::WherePredicate {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ match self {
+ crate::WherePredicate::Lifetime(v0) => {
+ state.write_u8(0u8);
+ v0.hash(state);
+ }
+ crate::WherePredicate::Type(v0) => {
+ state.write_u8(1u8);
+ v0.hash(state);
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/visit.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/visit.rs
new file mode 100644
index 0000000000..b61997f177
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/visit.rs
@@ -0,0 +1,3858 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(unused_variables)]
+#![allow(clippy::needless_pass_by_ref_mut)]
+#[cfg(any(feature = "full", feature = "derive"))]
+use crate::punctuated::Punctuated;
+#[cfg(feature = "full")]
+macro_rules! full {
+ ($e:expr) => {
+ $e
+ };
+}
+#[cfg(all(feature = "derive", not(feature = "full")))]
+macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+}
+macro_rules! skip {
+ ($($tt:tt)*) => {};
+}
+/// Syntax tree traversal to walk a shared borrow of a syntax tree.
+///
+/// See the [module documentation] for details.
+///
+/// [module documentation]: self
+pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_abi(&mut self, i: &'ast crate::Abi) {
+ visit_abi(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_angle_bracketed_generic_arguments(
+ &mut self,
+ i: &'ast crate::AngleBracketedGenericArguments,
+ ) {
+ visit_angle_bracketed_generic_arguments(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_arm(&mut self, i: &'ast crate::Arm) {
+ visit_arm(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_assoc_const(&mut self, i: &'ast crate::AssocConst) {
+ visit_assoc_const(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_assoc_type(&mut self, i: &'ast crate::AssocType) {
+ visit_assoc_type(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_attr_style(&mut self, i: &'ast crate::AttrStyle) {
+ visit_attr_style(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_attribute(&mut self, i: &'ast crate::Attribute) {
+ visit_attribute(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bare_fn_arg(&mut self, i: &'ast crate::BareFnArg) {
+ visit_bare_fn_arg(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bare_variadic(&mut self, i: &'ast crate::BareVariadic) {
+ visit_bare_variadic(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bin_op(&mut self, i: &'ast crate::BinOp) {
+ visit_bin_op(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_block(&mut self, i: &'ast crate::Block) {
+ visit_block(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bound_lifetimes(&mut self, i: &'ast crate::BoundLifetimes) {
+ visit_bound_lifetimes(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_const_param(&mut self, i: &'ast crate::ConstParam) {
+ visit_const_param(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_constraint(&mut self, i: &'ast crate::Constraint) {
+ visit_constraint(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data(&mut self, i: &'ast crate::Data) {
+ visit_data(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_enum(&mut self, i: &'ast crate::DataEnum) {
+ visit_data_enum(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_struct(&mut self, i: &'ast crate::DataStruct) {
+ visit_data_struct(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_union(&mut self, i: &'ast crate::DataUnion) {
+ visit_data_union(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_derive_input(&mut self, i: &'ast crate::DeriveInput) {
+ visit_derive_input(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr(&mut self, i: &'ast crate::Expr) {
+ visit_expr(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_array(&mut self, i: &'ast crate::ExprArray) {
+ visit_expr_array(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_assign(&mut self, i: &'ast crate::ExprAssign) {
+ visit_expr_assign(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_async(&mut self, i: &'ast crate::ExprAsync) {
+ visit_expr_async(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_await(&mut self, i: &'ast crate::ExprAwait) {
+ visit_expr_await(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_binary(&mut self, i: &'ast crate::ExprBinary) {
+ visit_expr_binary(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_block(&mut self, i: &'ast crate::ExprBlock) {
+ visit_expr_block(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_break(&mut self, i: &'ast crate::ExprBreak) {
+ visit_expr_break(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_call(&mut self, i: &'ast crate::ExprCall) {
+ visit_expr_call(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_cast(&mut self, i: &'ast crate::ExprCast) {
+ visit_expr_cast(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_closure(&mut self, i: &'ast crate::ExprClosure) {
+ visit_expr_closure(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_const(&mut self, i: &'ast crate::ExprConst) {
+ visit_expr_const(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_continue(&mut self, i: &'ast crate::ExprContinue) {
+ visit_expr_continue(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_field(&mut self, i: &'ast crate::ExprField) {
+ visit_expr_field(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_for_loop(&mut self, i: &'ast crate::ExprForLoop) {
+ visit_expr_for_loop(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_group(&mut self, i: &'ast crate::ExprGroup) {
+ visit_expr_group(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_if(&mut self, i: &'ast crate::ExprIf) {
+ visit_expr_if(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_index(&mut self, i: &'ast crate::ExprIndex) {
+ visit_expr_index(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_infer(&mut self, i: &'ast crate::ExprInfer) {
+ visit_expr_infer(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_let(&mut self, i: &'ast crate::ExprLet) {
+ visit_expr_let(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_lit(&mut self, i: &'ast crate::ExprLit) {
+ visit_expr_lit(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_loop(&mut self, i: &'ast crate::ExprLoop) {
+ visit_expr_loop(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_macro(&mut self, i: &'ast crate::ExprMacro) {
+ visit_expr_macro(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_match(&mut self, i: &'ast crate::ExprMatch) {
+ visit_expr_match(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_method_call(&mut self, i: &'ast crate::ExprMethodCall) {
+ visit_expr_method_call(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_paren(&mut self, i: &'ast crate::ExprParen) {
+ visit_expr_paren(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_path(&mut self, i: &'ast crate::ExprPath) {
+ visit_expr_path(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_range(&mut self, i: &'ast crate::ExprRange) {
+ visit_expr_range(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_reference(&mut self, i: &'ast crate::ExprReference) {
+ visit_expr_reference(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_repeat(&mut self, i: &'ast crate::ExprRepeat) {
+ visit_expr_repeat(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_return(&mut self, i: &'ast crate::ExprReturn) {
+ visit_expr_return(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_struct(&mut self, i: &'ast crate::ExprStruct) {
+ visit_expr_struct(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_try(&mut self, i: &'ast crate::ExprTry) {
+ visit_expr_try(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_try_block(&mut self, i: &'ast crate::ExprTryBlock) {
+ visit_expr_try_block(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_tuple(&mut self, i: &'ast crate::ExprTuple) {
+ visit_expr_tuple(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_unary(&mut self, i: &'ast crate::ExprUnary) {
+ visit_expr_unary(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_unsafe(&mut self, i: &'ast crate::ExprUnsafe) {
+ visit_expr_unsafe(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_while(&mut self, i: &'ast crate::ExprWhile) {
+ visit_expr_while(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_yield(&mut self, i: &'ast crate::ExprYield) {
+ visit_expr_yield(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field(&mut self, i: &'ast crate::Field) {
+ visit_field(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field_mutability(&mut self, i: &'ast crate::FieldMutability) {
+ visit_field_mutability(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_field_pat(&mut self, i: &'ast crate::FieldPat) {
+ visit_field_pat(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field_value(&mut self, i: &'ast crate::FieldValue) {
+ visit_field_value(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields(&mut self, i: &'ast crate::Fields) {
+ visit_fields(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields_named(&mut self, i: &'ast crate::FieldsNamed) {
+ visit_fields_named(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields_unnamed(&mut self, i: &'ast crate::FieldsUnnamed) {
+ visit_fields_unnamed(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_file(&mut self, i: &'ast crate::File) {
+ visit_file(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_fn_arg(&mut self, i: &'ast crate::FnArg) {
+ visit_fn_arg(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item(&mut self, i: &'ast crate::ForeignItem) {
+ visit_foreign_item(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_fn(&mut self, i: &'ast crate::ForeignItemFn) {
+ visit_foreign_item_fn(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_macro(&mut self, i: &'ast crate::ForeignItemMacro) {
+ visit_foreign_item_macro(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_static(&mut self, i: &'ast crate::ForeignItemStatic)
{
+ visit_foreign_item_static(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_type(&mut self, i: &'ast crate::ForeignItemType) {
+ visit_foreign_item_type(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generic_argument(&mut self, i: &'ast crate::GenericArgument) {
+ visit_generic_argument(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generic_param(&mut self, i: &'ast crate::GenericParam) {
+ visit_generic_param(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generics(&mut self, i: &'ast crate::Generics) {
+ visit_generics(self, i);
+ }
+ fn visit_ident(&mut self, i: &'ast proc_macro2::Ident) {
+ visit_ident(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item(&mut self, i: &'ast crate::ImplItem) {
+ visit_impl_item(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_const(&mut self, i: &'ast crate::ImplItemConst) {
+ visit_impl_item_const(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_fn(&mut self, i: &'ast crate::ImplItemFn) {
+ visit_impl_item_fn(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_macro(&mut self, i: &'ast crate::ImplItemMacro) {
+ visit_impl_item_macro(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_type(&mut self, i: &'ast crate::ImplItemType) {
+ visit_impl_item_type(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_restriction(&mut self, i: &'ast crate::ImplRestriction) {
+ visit_impl_restriction(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_index(&mut self, i: &'ast crate::Index) {
+ visit_index(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item(&mut self, i: &'ast crate::Item) {
+ visit_item(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_const(&mut self, i: &'ast crate::ItemConst) {
+ visit_item_const(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_enum(&mut self, i: &'ast crate::ItemEnum) {
+ visit_item_enum(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_extern_crate(&mut self, i: &'ast crate::ItemExternCrate) {
+ visit_item_extern_crate(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_fn(&mut self, i: &'ast crate::ItemFn) {
+ visit_item_fn(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_foreign_mod(&mut self, i: &'ast crate::ItemForeignMod) {
+ visit_item_foreign_mod(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_impl(&mut self, i: &'ast crate::ItemImpl) {
+ visit_item_impl(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_macro(&mut self, i: &'ast crate::ItemMacro) {
+ visit_item_macro(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_mod(&mut self, i: &'ast crate::ItemMod) {
+ visit_item_mod(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_static(&mut self, i: &'ast crate::ItemStatic) {
+ visit_item_static(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_struct(&mut self, i: &'ast crate::ItemStruct) {
+ visit_item_struct(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_trait(&mut self, i: &'ast crate::ItemTrait) {
+ visit_item_trait(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_trait_alias(&mut self, i: &'ast crate::ItemTraitAlias) {
+ visit_item_trait_alias(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_type(&mut self, i: &'ast crate::ItemType) {
+ visit_item_type(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_union(&mut self, i: &'ast crate::ItemUnion) {
+ visit_item_union(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_use(&mut self, i: &'ast crate::ItemUse) {
+ visit_item_use(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_label(&mut self, i: &'ast crate::Label) {
+ visit_label(self, i);
+ }
+ fn visit_lifetime(&mut self, i: &'ast crate::Lifetime) {
+ visit_lifetime(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_lifetime_param(&mut self, i: &'ast crate::LifetimeParam) {
+ visit_lifetime_param(self, i);
+ }
+ fn visit_lit(&mut self, i: &'ast crate::Lit) {
+ visit_lit(self, i);
+ }
+ fn visit_lit_bool(&mut self, i: &'ast crate::LitBool) {
+ visit_lit_bool(self, i);
+ }
+ fn visit_lit_byte(&mut self, i: &'ast crate::LitByte) {
+ visit_lit_byte(self, i);
+ }
+ fn visit_lit_byte_str(&mut self, i: &'ast crate::LitByteStr) {
+ visit_lit_byte_str(self, i);
+ }
+ fn visit_lit_cstr(&mut self, i: &'ast crate::LitCStr) {
+ visit_lit_cstr(self, i);
+ }
+ fn visit_lit_char(&mut self, i: &'ast crate::LitChar) {
+ visit_lit_char(self, i);
+ }
+ fn visit_lit_float(&mut self, i: &'ast crate::LitFloat) {
+ visit_lit_float(self, i);
+ }
+ fn visit_lit_int(&mut self, i: &'ast crate::LitInt) {
+ visit_lit_int(self, i);
+ }
+ fn visit_lit_str(&mut self, i: &'ast crate::LitStr) {
+ visit_lit_str(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_local(&mut self, i: &'ast crate::Local) {
+ visit_local(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_local_init(&mut self, i: &'ast crate::LocalInit) {
+ visit_local_init(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_macro(&mut self, i: &'ast crate::Macro) {
+ visit_macro(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_macro_delimiter(&mut self, i: &'ast crate::MacroDelimiter) {
+ visit_macro_delimiter(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_member(&mut self, i: &'ast crate::Member) {
+ visit_member(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta(&mut self, i: &'ast crate::Meta) {
+ visit_meta(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta_list(&mut self, i: &'ast crate::MetaList) {
+ visit_meta_list(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta_name_value(&mut self, i: &'ast crate::MetaNameValue) {
+ visit_meta_name_value(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_parenthesized_generic_arguments(
+ &mut self,
+ i: &'ast crate::ParenthesizedGenericArguments,
+ ) {
+ visit_parenthesized_generic_arguments(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat(&mut self, i: &'ast crate::Pat) {
+ visit_pat(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_ident(&mut self, i: &'ast crate::PatIdent) {
+ visit_pat_ident(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_or(&mut self, i: &'ast crate::PatOr) {
+ visit_pat_or(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_paren(&mut self, i: &'ast crate::PatParen) {
+ visit_pat_paren(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_reference(&mut self, i: &'ast crate::PatReference) {
+ visit_pat_reference(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_rest(&mut self, i: &'ast crate::PatRest) {
+ visit_pat_rest(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_slice(&mut self, i: &'ast crate::PatSlice) {
+ visit_pat_slice(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_struct(&mut self, i: &'ast crate::PatStruct) {
+ visit_pat_struct(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_tuple(&mut self, i: &'ast crate::PatTuple) {
+ visit_pat_tuple(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_tuple_struct(&mut self, i: &'ast crate::PatTupleStruct) {
+ visit_pat_tuple_struct(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_type(&mut self, i: &'ast crate::PatType) {
+ visit_pat_type(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_wild(&mut self, i: &'ast crate::PatWild) {
+ visit_pat_wild(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path(&mut self, i: &'ast crate::Path) {
+ visit_path(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path_arguments(&mut self, i: &'ast crate::PathArguments) {
+ visit_path_arguments(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path_segment(&mut self, i: &'ast crate::PathSegment) {
+ visit_path_segment(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_predicate_lifetime(&mut self, i: &'ast crate::PredicateLifetime) {
+ visit_predicate_lifetime(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_predicate_type(&mut self, i: &'ast crate::PredicateType) {
+ visit_predicate_type(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_qself(&mut self, i: &'ast crate::QSelf) {
+ visit_qself(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_range_limits(&mut self, i: &'ast crate::RangeLimits) {
+ visit_range_limits(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_receiver(&mut self, i: &'ast crate::Receiver) {
+ visit_receiver(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_return_type(&mut self, i: &'ast crate::ReturnType) {
+ visit_return_type(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_signature(&mut self, i: &'ast crate::Signature) {
+ visit_signature(self, i);
+ }
+ fn visit_span(&mut self, i: &proc_macro2::Span) {
+ visit_span(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_static_mutability(&mut self, i: &'ast crate::StaticMutability) {
+ visit_static_mutability(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_stmt(&mut self, i: &'ast crate::Stmt) {
+ visit_stmt(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_stmt_macro(&mut self, i: &'ast crate::StmtMacro) {
+ visit_stmt_macro(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_trait_bound(&mut self, i: &'ast crate::TraitBound) {
+ visit_trait_bound(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_trait_bound_modifier(&mut self, i: &'ast
crate::TraitBoundModifier) {
+ visit_trait_bound_modifier(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item(&mut self, i: &'ast crate::TraitItem) {
+ visit_trait_item(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_const(&mut self, i: &'ast crate::TraitItemConst) {
+ visit_trait_item_const(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_fn(&mut self, i: &'ast crate::TraitItemFn) {
+ visit_trait_item_fn(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_macro(&mut self, i: &'ast crate::TraitItemMacro) {
+ visit_trait_item_macro(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_type(&mut self, i: &'ast crate::TraitItemType) {
+ visit_trait_item_type(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type(&mut self, i: &'ast crate::Type) {
+ visit_type(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_array(&mut self, i: &'ast crate::TypeArray) {
+ visit_type_array(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_bare_fn(&mut self, i: &'ast crate::TypeBareFn) {
+ visit_type_bare_fn(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_group(&mut self, i: &'ast crate::TypeGroup) {
+ visit_type_group(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_impl_trait(&mut self, i: &'ast crate::TypeImplTrait) {
+ visit_type_impl_trait(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_infer(&mut self, i: &'ast crate::TypeInfer) {
+ visit_type_infer(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_macro(&mut self, i: &'ast crate::TypeMacro) {
+ visit_type_macro(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_never(&mut self, i: &'ast crate::TypeNever) {
+ visit_type_never(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_param(&mut self, i: &'ast crate::TypeParam) {
+ visit_type_param(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_param_bound(&mut self, i: &'ast crate::TypeParamBound) {
+ visit_type_param_bound(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_paren(&mut self, i: &'ast crate::TypeParen) {
+ visit_type_paren(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_path(&mut self, i: &'ast crate::TypePath) {
+ visit_type_path(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_ptr(&mut self, i: &'ast crate::TypePtr) {
+ visit_type_ptr(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_reference(&mut self, i: &'ast crate::TypeReference) {
+ visit_type_reference(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_slice(&mut self, i: &'ast crate::TypeSlice) {
+ visit_type_slice(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_trait_object(&mut self, i: &'ast crate::TypeTraitObject) {
+ visit_type_trait_object(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_tuple(&mut self, i: &'ast crate::TypeTuple) {
+ visit_type_tuple(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_un_op(&mut self, i: &'ast crate::UnOp) {
+ visit_un_op(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_glob(&mut self, i: &'ast crate::UseGlob) {
+ visit_use_glob(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_group(&mut self, i: &'ast crate::UseGroup) {
+ visit_use_group(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_name(&mut self, i: &'ast crate::UseName) {
+ visit_use_name(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_path(&mut self, i: &'ast crate::UsePath) {
+ visit_use_path(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_rename(&mut self, i: &'ast crate::UseRename) {
+ visit_use_rename(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_tree(&mut self, i: &'ast crate::UseTree) {
+ visit_use_tree(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_variadic(&mut self, i: &'ast crate::Variadic) {
+ visit_variadic(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_variant(&mut self, i: &'ast crate::Variant) {
+ visit_variant(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_vis_restricted(&mut self, i: &'ast crate::VisRestricted) {
+ visit_vis_restricted(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_visibility(&mut self, i: &'ast crate::Visibility) {
+ visit_visibility(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_where_clause(&mut self, i: &'ast crate::WhereClause) {
+ visit_where_clause(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_where_predicate(&mut self, i: &'ast crate::WherePredicate) {
+ visit_where_predicate(self, i);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_abi<'ast, V>(v: &mut V, node: &'ast crate::Abi)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.extern_token);
+ if let Some(it) = &node.name {
+ v.visit_lit_str(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_angle_bracketed_generic_arguments<'ast, V>(
+ v: &mut V,
+ node: &'ast crate::AngleBracketedGenericArguments,
+)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.colon2_token);
+ skip!(node.lt_token);
+ for el in Punctuated::pairs(&node.args) {
+ let it = el.value();
+ v.visit_generic_argument(it);
+ }
+ skip!(node.gt_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_arm<'ast, V>(v: &mut V, node: &'ast crate::Arm)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_pat(&node.pat);
+ if let Some(it) = &node.guard {
+ skip!((it).0);
+ v.visit_expr(&*(it).1);
+ }
+ skip!(node.fat_arrow_token);
+ v.visit_expr(&*node.body);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_assoc_const<'ast, V>(v: &mut V, node: &'ast crate::AssocConst)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.generics {
+ v.visit_angle_bracketed_generic_arguments(it);
+ }
+ skip!(node.eq_token);
+ v.visit_expr(&node.value);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_assoc_type<'ast, V>(v: &mut V, node: &'ast crate::AssocType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.generics {
+ v.visit_angle_bracketed_generic_arguments(it);
+ }
+ skip!(node.eq_token);
+ v.visit_type(&node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_attr_style<'ast, V>(v: &mut V, node: &'ast crate::AttrStyle)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::AttrStyle::Outer => {}
+ crate::AttrStyle::Inner(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_attribute<'ast, V>(v: &mut V, node: &'ast crate::Attribute)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.pound_token);
+ v.visit_attr_style(&node.style);
+ skip!(node.bracket_token);
+ v.visit_meta(&node.meta);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bare_fn_arg<'ast, V>(v: &mut V, node: &'ast crate::BareFnArg)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.name {
+ v.visit_ident(&(it).0);
+ skip!((it).1);
+ }
+ v.visit_type(&node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bare_variadic<'ast, V>(v: &mut V, node: &'ast crate::BareVariadic)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.name {
+ v.visit_ident(&(it).0);
+ skip!((it).1);
+ }
+ skip!(node.dots);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bin_op<'ast, V>(v: &mut V, node: &'ast crate::BinOp)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::BinOp::Add(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Sub(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Mul(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Div(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Rem(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::And(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Or(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitXor(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitAnd(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitOr(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Shl(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Shr(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Eq(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Lt(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Le(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Ne(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Ge(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Gt(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::AddAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::SubAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::MulAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::DivAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::RemAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitXorAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitAndAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitOrAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::ShlAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::ShrAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_block<'ast, V>(v: &mut V, node: &'ast crate::Block)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.brace_token);
+ for it in &node.stmts {
+ v.visit_stmt(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bound_lifetimes<'ast, V>(v: &mut V, node: &'ast
crate::BoundLifetimes)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.for_token);
+ skip!(node.lt_token);
+ for el in Punctuated::pairs(&node.lifetimes) {
+ let it = el.value();
+ v.visit_generic_param(it);
+ }
+ skip!(node.gt_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_const_param<'ast, V>(v: &mut V, node: &'ast crate::ConstParam)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.const_token);
+ v.visit_ident(&node.ident);
+ skip!(node.colon_token);
+ v.visit_type(&node.ty);
+ skip!(node.eq_token);
+ if let Some(it) = &node.default {
+ v.visit_expr(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_constraint<'ast, V>(v: &mut V, node: &'ast crate::Constraint)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.generics {
+ v.visit_angle_bracketed_generic_arguments(it);
+ }
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data<'ast, V>(v: &mut V, node: &'ast crate::Data)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Data::Struct(_binding_0) => {
+ v.visit_data_struct(_binding_0);
+ }
+ crate::Data::Enum(_binding_0) => {
+ v.visit_data_enum(_binding_0);
+ }
+ crate::Data::Union(_binding_0) => {
+ v.visit_data_union(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_enum<'ast, V>(v: &mut V, node: &'ast crate::DataEnum)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.enum_token);
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.variants) {
+ let it = el.value();
+ v.visit_variant(it);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_struct<'ast, V>(v: &mut V, node: &'ast crate::DataStruct)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.struct_token);
+ v.visit_fields(&node.fields);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_union<'ast, V>(v: &mut V, node: &'ast crate::DataUnion)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.union_token);
+ v.visit_fields_named(&node.fields);
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_derive_input<'ast, V>(v: &mut V, node: &'ast crate::DeriveInput)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ v.visit_data(&node.data);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr<'ast, V>(v: &mut V, node: &'ast crate::Expr)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Expr::Array(_binding_0) => {
+ full!(v.visit_expr_array(_binding_0));
+ }
+ crate::Expr::Assign(_binding_0) => {
+ full!(v.visit_expr_assign(_binding_0));
+ }
+ crate::Expr::Async(_binding_0) => {
+ full!(v.visit_expr_async(_binding_0));
+ }
+ crate::Expr::Await(_binding_0) => {
+ full!(v.visit_expr_await(_binding_0));
+ }
+ crate::Expr::Binary(_binding_0) => {
+ v.visit_expr_binary(_binding_0);
+ }
+ crate::Expr::Block(_binding_0) => {
+ full!(v.visit_expr_block(_binding_0));
+ }
+ crate::Expr::Break(_binding_0) => {
+ full!(v.visit_expr_break(_binding_0));
+ }
+ crate::Expr::Call(_binding_0) => {
+ v.visit_expr_call(_binding_0);
+ }
+ crate::Expr::Cast(_binding_0) => {
+ v.visit_expr_cast(_binding_0);
+ }
+ crate::Expr::Closure(_binding_0) => {
+ full!(v.visit_expr_closure(_binding_0));
+ }
+ crate::Expr::Const(_binding_0) => {
+ full!(v.visit_expr_const(_binding_0));
+ }
+ crate::Expr::Continue(_binding_0) => {
+ full!(v.visit_expr_continue(_binding_0));
+ }
+ crate::Expr::Field(_binding_0) => {
+ v.visit_expr_field(_binding_0);
+ }
+ crate::Expr::ForLoop(_binding_0) => {
+ full!(v.visit_expr_for_loop(_binding_0));
+ }
+ crate::Expr::Group(_binding_0) => {
+ v.visit_expr_group(_binding_0);
+ }
+ crate::Expr::If(_binding_0) => {
+ full!(v.visit_expr_if(_binding_0));
+ }
+ crate::Expr::Index(_binding_0) => {
+ v.visit_expr_index(_binding_0);
+ }
+ crate::Expr::Infer(_binding_0) => {
+ full!(v.visit_expr_infer(_binding_0));
+ }
+ crate::Expr::Let(_binding_0) => {
+ full!(v.visit_expr_let(_binding_0));
+ }
+ crate::Expr::Lit(_binding_0) => {
+ v.visit_expr_lit(_binding_0);
+ }
+ crate::Expr::Loop(_binding_0) => {
+ full!(v.visit_expr_loop(_binding_0));
+ }
+ crate::Expr::Macro(_binding_0) => {
+ v.visit_expr_macro(_binding_0);
+ }
+ crate::Expr::Match(_binding_0) => {
+ full!(v.visit_expr_match(_binding_0));
+ }
+ crate::Expr::MethodCall(_binding_0) => {
+ v.visit_expr_method_call(_binding_0);
+ }
+ crate::Expr::Paren(_binding_0) => {
+ v.visit_expr_paren(_binding_0);
+ }
+ crate::Expr::Path(_binding_0) => {
+ v.visit_expr_path(_binding_0);
+ }
+ crate::Expr::Range(_binding_0) => {
+ full!(v.visit_expr_range(_binding_0));
+ }
+ crate::Expr::Reference(_binding_0) => {
+ v.visit_expr_reference(_binding_0);
+ }
+ crate::Expr::Repeat(_binding_0) => {
+ full!(v.visit_expr_repeat(_binding_0));
+ }
+ crate::Expr::Return(_binding_0) => {
+ full!(v.visit_expr_return(_binding_0));
+ }
+ crate::Expr::Struct(_binding_0) => {
+ v.visit_expr_struct(_binding_0);
+ }
+ crate::Expr::Try(_binding_0) => {
+ full!(v.visit_expr_try(_binding_0));
+ }
+ crate::Expr::TryBlock(_binding_0) => {
+ full!(v.visit_expr_try_block(_binding_0));
+ }
+ crate::Expr::Tuple(_binding_0) => {
+ full!(v.visit_expr_tuple(_binding_0));
+ }
+ crate::Expr::Unary(_binding_0) => {
+ v.visit_expr_unary(_binding_0);
+ }
+ crate::Expr::Unsafe(_binding_0) => {
+ full!(v.visit_expr_unsafe(_binding_0));
+ }
+ crate::Expr::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Expr::While(_binding_0) => {
+ full!(v.visit_expr_while(_binding_0));
+ }
+ crate::Expr::Yield(_binding_0) => {
+ full!(v.visit_expr_yield(_binding_0));
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_array<'ast, V>(v: &mut V, node: &'ast crate::ExprArray)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.bracket_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_expr(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_assign<'ast, V>(v: &mut V, node: &'ast crate::ExprAssign)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.left);
+ skip!(node.eq_token);
+ v.visit_expr(&*node.right);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_async<'ast, V>(v: &mut V, node: &'ast crate::ExprAsync)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.async_token);
+ skip!(node.capture);
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_await<'ast, V>(v: &mut V, node: &'ast crate::ExprAwait)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.base);
+ skip!(node.dot_token);
+ skip!(node.await_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast crate::ExprBinary)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.left);
+ v.visit_bin_op(&node.op);
+ v.visit_expr(&*node.right);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_block<'ast, V>(v: &mut V, node: &'ast crate::ExprBlock)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.label {
+ v.visit_label(it);
+ }
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_break<'ast, V>(v: &mut V, node: &'ast crate::ExprBreak)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.break_token);
+ if let Some(it) = &node.label {
+ v.visit_lifetime(it);
+ }
+ if let Some(it) = &node.expr {
+ v.visit_expr(&**it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_call<'ast, V>(v: &mut V, node: &'ast crate::ExprCall)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.func);
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.args) {
+ let it = el.value();
+ v.visit_expr(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_cast<'ast, V>(v: &mut V, node: &'ast crate::ExprCast)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.expr);
+ skip!(node.as_token);
+ v.visit_type(&*node.ty);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_closure<'ast, V>(v: &mut V, node: &'ast crate::ExprClosure)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.lifetimes {
+ v.visit_bound_lifetimes(it);
+ }
+ skip!(node.constness);
+ skip!(node.movability);
+ skip!(node.asyncness);
+ skip!(node.capture);
+ skip!(node.or1_token);
+ for el in Punctuated::pairs(&node.inputs) {
+ let it = el.value();
+ v.visit_pat(it);
+ }
+ skip!(node.or2_token);
+ v.visit_return_type(&node.output);
+ v.visit_expr(&*node.body);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_const<'ast, V>(v: &mut V, node: &'ast crate::ExprConst)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.const_token);
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_continue<'ast, V>(v: &mut V, node: &'ast crate::ExprContinue)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.continue_token);
+ if let Some(it) = &node.label {
+ v.visit_lifetime(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_field<'ast, V>(v: &mut V, node: &'ast crate::ExprField)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.base);
+ skip!(node.dot_token);
+ v.visit_member(&node.member);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_for_loop<'ast, V>(v: &mut V, node: &'ast crate::ExprForLoop)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.label {
+ v.visit_label(it);
+ }
+ skip!(node.for_token);
+ v.visit_pat(&*node.pat);
+ skip!(node.in_token);
+ v.visit_expr(&*node.expr);
+ v.visit_block(&node.body);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_group<'ast, V>(v: &mut V, node: &'ast crate::ExprGroup)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.group_token);
+ v.visit_expr(&*node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_if<'ast, V>(v: &mut V, node: &'ast crate::ExprIf)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.if_token);
+ v.visit_expr(&*node.cond);
+ v.visit_block(&node.then_branch);
+ if let Some(it) = &node.else_branch {
+ skip!((it).0);
+ v.visit_expr(&*(it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_index<'ast, V>(v: &mut V, node: &'ast crate::ExprIndex)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.expr);
+ skip!(node.bracket_token);
+ v.visit_expr(&*node.index);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_infer<'ast, V>(v: &mut V, node: &'ast crate::ExprInfer)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.underscore_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_let<'ast, V>(v: &mut V, node: &'ast crate::ExprLet)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.let_token);
+ v.visit_pat(&*node.pat);
+ skip!(node.eq_token);
+ v.visit_expr(&*node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_lit<'ast, V>(v: &mut V, node: &'ast crate::ExprLit)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_lit(&node.lit);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_loop<'ast, V>(v: &mut V, node: &'ast crate::ExprLoop)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.label {
+ v.visit_label(it);
+ }
+ skip!(node.loop_token);
+ v.visit_block(&node.body);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_macro<'ast, V>(v: &mut V, node: &'ast crate::ExprMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_macro(&node.mac);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_match<'ast, V>(v: &mut V, node: &'ast crate::ExprMatch)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.match_token);
+ v.visit_expr(&*node.expr);
+ skip!(node.brace_token);
+ for it in &node.arms {
+ v.visit_arm(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_method_call<'ast, V>(v: &mut V, node: &'ast
crate::ExprMethodCall)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.receiver);
+ skip!(node.dot_token);
+ v.visit_ident(&node.method);
+ if let Some(it) = &node.turbofish {
+ v.visit_angle_bracketed_generic_arguments(it);
+ }
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.args) {
+ let it = el.value();
+ v.visit_expr(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_paren<'ast, V>(v: &mut V, node: &'ast crate::ExprParen)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.paren_token);
+ v.visit_expr(&*node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_path<'ast, V>(v: &mut V, node: &'ast crate::ExprPath)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.qself {
+ v.visit_qself(it);
+ }
+ v.visit_path(&node.path);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_range<'ast, V>(v: &mut V, node: &'ast crate::ExprRange)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.start {
+ v.visit_expr(&**it);
+ }
+ v.visit_range_limits(&node.limits);
+ if let Some(it) = &node.end {
+ v.visit_expr(&**it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_reference<'ast, V>(v: &mut V, node: &'ast
crate::ExprReference)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.and_token);
+ skip!(node.mutability);
+ v.visit_expr(&*node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_repeat<'ast, V>(v: &mut V, node: &'ast crate::ExprRepeat)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.bracket_token);
+ v.visit_expr(&*node.expr);
+ skip!(node.semi_token);
+ v.visit_expr(&*node.len);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_return<'ast, V>(v: &mut V, node: &'ast crate::ExprReturn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.return_token);
+ if let Some(it) = &node.expr {
+ v.visit_expr(&**it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_struct<'ast, V>(v: &mut V, node: &'ast crate::ExprStruct)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.qself {
+ v.visit_qself(it);
+ }
+ v.visit_path(&node.path);
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.fields) {
+ let it = el.value();
+ v.visit_field_value(it);
+ }
+ skip!(node.dot2_token);
+ if let Some(it) = &node.rest {
+ v.visit_expr(&**it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_try<'ast, V>(v: &mut V, node: &'ast crate::ExprTry)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_expr(&*node.expr);
+ skip!(node.question_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_try_block<'ast, V>(v: &mut V, node: &'ast
crate::ExprTryBlock)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.try_token);
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_tuple<'ast, V>(v: &mut V, node: &'ast crate::ExprTuple)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_expr(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_unary<'ast, V>(v: &mut V, node: &'ast crate::ExprUnary)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_un_op(&node.op);
+ v.visit_expr(&*node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_unsafe<'ast, V>(v: &mut V, node: &'ast crate::ExprUnsafe)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.unsafe_token);
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_while<'ast, V>(v: &mut V, node: &'ast crate::ExprWhile)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.label {
+ v.visit_label(it);
+ }
+ skip!(node.while_token);
+ v.visit_expr(&*node.cond);
+ v.visit_block(&node.body);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_yield<'ast, V>(v: &mut V, node: &'ast crate::ExprYield)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.yield_token);
+ if let Some(it) = &node.expr {
+ v.visit_expr(&**it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field<'ast, V>(v: &mut V, node: &'ast crate::Field)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ v.visit_field_mutability(&node.mutability);
+ if let Some(it) = &node.ident {
+ v.visit_ident(it);
+ }
+ skip!(node.colon_token);
+ v.visit_type(&node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field_mutability<'ast, V>(v: &mut V, node: &'ast
crate::FieldMutability)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::FieldMutability::None => {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_field_pat<'ast, V>(v: &mut V, node: &'ast crate::FieldPat)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_member(&node.member);
+ skip!(node.colon_token);
+ v.visit_pat(&*node.pat);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field_value<'ast, V>(v: &mut V, node: &'ast crate::FieldValue)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_member(&node.member);
+ skip!(node.colon_token);
+ v.visit_expr(&node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields<'ast, V>(v: &mut V, node: &'ast crate::Fields)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Fields::Named(_binding_0) => {
+ v.visit_fields_named(_binding_0);
+ }
+ crate::Fields::Unnamed(_binding_0) => {
+ v.visit_fields_unnamed(_binding_0);
+ }
+ crate::Fields::Unit => {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields_named<'ast, V>(v: &mut V, node: &'ast crate::FieldsNamed)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.named) {
+ let it = el.value();
+ v.visit_field(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields_unnamed<'ast, V>(v: &mut V, node: &'ast
crate::FieldsUnnamed)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.unnamed) {
+ let it = el.value();
+ v.visit_field(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_file<'ast, V>(v: &mut V, node: &'ast crate::File)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.shebang);
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ for it in &node.items {
+ v.visit_item(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_fn_arg<'ast, V>(v: &mut V, node: &'ast crate::FnArg)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::FnArg::Receiver(_binding_0) => {
+ v.visit_receiver(_binding_0);
+ }
+ crate::FnArg::Typed(_binding_0) => {
+ v.visit_pat_type(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item<'ast, V>(v: &mut V, node: &'ast crate::ForeignItem)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::ForeignItem::Fn(_binding_0) => {
+ v.visit_foreign_item_fn(_binding_0);
+ }
+ crate::ForeignItem::Static(_binding_0) => {
+ v.visit_foreign_item_static(_binding_0);
+ }
+ crate::ForeignItem::Type(_binding_0) => {
+ v.visit_foreign_item_type(_binding_0);
+ }
+ crate::ForeignItem::Macro(_binding_0) => {
+ v.visit_foreign_item_macro(_binding_0);
+ }
+ crate::ForeignItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_fn<'ast, V>(v: &mut V, node: &'ast
crate::ForeignItemFn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ v.visit_signature(&node.sig);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_macro<'ast, V>(v: &mut V, node: &'ast
crate::ForeignItemMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_macro(&node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_static<'ast, V>(
+ v: &mut V,
+ node: &'ast crate::ForeignItemStatic,
+)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.static_token);
+ v.visit_static_mutability(&node.mutability);
+ v.visit_ident(&node.ident);
+ skip!(node.colon_token);
+ v.visit_type(&*node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_type<'ast, V>(v: &mut V, node: &'ast
crate::ForeignItemType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.type_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generic_argument<'ast, V>(v: &mut V, node: &'ast
crate::GenericArgument)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::GenericArgument::Lifetime(_binding_0) => {
+ v.visit_lifetime(_binding_0);
+ }
+ crate::GenericArgument::Type(_binding_0) => {
+ v.visit_type(_binding_0);
+ }
+ crate::GenericArgument::Const(_binding_0) => {
+ v.visit_expr(_binding_0);
+ }
+ crate::GenericArgument::AssocType(_binding_0) => {
+ v.visit_assoc_type(_binding_0);
+ }
+ crate::GenericArgument::AssocConst(_binding_0) => {
+ v.visit_assoc_const(_binding_0);
+ }
+ crate::GenericArgument::Constraint(_binding_0) => {
+ v.visit_constraint(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generic_param<'ast, V>(v: &mut V, node: &'ast crate::GenericParam)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::GenericParam::Lifetime(_binding_0) => {
+ v.visit_lifetime_param(_binding_0);
+ }
+ crate::GenericParam::Type(_binding_0) => {
+ v.visit_type_param(_binding_0);
+ }
+ crate::GenericParam::Const(_binding_0) => {
+ v.visit_const_param(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generics<'ast, V>(v: &mut V, node: &'ast crate::Generics)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.lt_token);
+ for el in Punctuated::pairs(&node.params) {
+ let it = el.value();
+ v.visit_generic_param(it);
+ }
+ skip!(node.gt_token);
+ if let Some(it) = &node.where_clause {
+ v.visit_where_clause(it);
+ }
+}
+pub fn visit_ident<'ast, V>(v: &mut V, node: &'ast proc_macro2::Ident)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_span(&node.span());
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item<'ast, V>(v: &mut V, node: &'ast crate::ImplItem)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::ImplItem::Const(_binding_0) => {
+ v.visit_impl_item_const(_binding_0);
+ }
+ crate::ImplItem::Fn(_binding_0) => {
+ v.visit_impl_item_fn(_binding_0);
+ }
+ crate::ImplItem::Type(_binding_0) => {
+ v.visit_impl_item_type(_binding_0);
+ }
+ crate::ImplItem::Macro(_binding_0) => {
+ v.visit_impl_item_macro(_binding_0);
+ }
+ crate::ImplItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_const<'ast, V>(v: &mut V, node: &'ast
crate::ImplItemConst)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.defaultness);
+ skip!(node.const_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.colon_token);
+ v.visit_type(&node.ty);
+ skip!(node.eq_token);
+ v.visit_expr(&node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_fn<'ast, V>(v: &mut V, node: &'ast crate::ImplItemFn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.defaultness);
+ v.visit_signature(&node.sig);
+ v.visit_block(&node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_macro<'ast, V>(v: &mut V, node: &'ast
crate::ImplItemMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_macro(&node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_type<'ast, V>(v: &mut V, node: &'ast
crate::ImplItemType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.defaultness);
+ skip!(node.type_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.eq_token);
+ v.visit_type(&node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_restriction<'ast, V>(v: &mut V, node: &'ast
crate::ImplRestriction)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match *node {}
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_index<'ast, V>(v: &mut V, node: &'ast crate::Index)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.index);
+ v.visit_span(&node.span);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item<'ast, V>(v: &mut V, node: &'ast crate::Item)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Item::Const(_binding_0) => {
+ v.visit_item_const(_binding_0);
+ }
+ crate::Item::Enum(_binding_0) => {
+ v.visit_item_enum(_binding_0);
+ }
+ crate::Item::ExternCrate(_binding_0) => {
+ v.visit_item_extern_crate(_binding_0);
+ }
+ crate::Item::Fn(_binding_0) => {
+ v.visit_item_fn(_binding_0);
+ }
+ crate::Item::ForeignMod(_binding_0) => {
+ v.visit_item_foreign_mod(_binding_0);
+ }
+ crate::Item::Impl(_binding_0) => {
+ v.visit_item_impl(_binding_0);
+ }
+ crate::Item::Macro(_binding_0) => {
+ v.visit_item_macro(_binding_0);
+ }
+ crate::Item::Mod(_binding_0) => {
+ v.visit_item_mod(_binding_0);
+ }
+ crate::Item::Static(_binding_0) => {
+ v.visit_item_static(_binding_0);
+ }
+ crate::Item::Struct(_binding_0) => {
+ v.visit_item_struct(_binding_0);
+ }
+ crate::Item::Trait(_binding_0) => {
+ v.visit_item_trait(_binding_0);
+ }
+ crate::Item::TraitAlias(_binding_0) => {
+ v.visit_item_trait_alias(_binding_0);
+ }
+ crate::Item::Type(_binding_0) => {
+ v.visit_item_type(_binding_0);
+ }
+ crate::Item::Union(_binding_0) => {
+ v.visit_item_union(_binding_0);
+ }
+ crate::Item::Use(_binding_0) => {
+ v.visit_item_use(_binding_0);
+ }
+ crate::Item::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_const<'ast, V>(v: &mut V, node: &'ast crate::ItemConst)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.const_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.colon_token);
+ v.visit_type(&*node.ty);
+ skip!(node.eq_token);
+ v.visit_expr(&*node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_enum<'ast, V>(v: &mut V, node: &'ast crate::ItemEnum)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.enum_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.variants) {
+ let it = el.value();
+ v.visit_variant(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_extern_crate<'ast, V>(v: &mut V, node: &'ast
crate::ItemExternCrate)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.extern_token);
+ skip!(node.crate_token);
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.rename {
+ skip!((it).0);
+ v.visit_ident(&(it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_fn<'ast, V>(v: &mut V, node: &'ast crate::ItemFn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ v.visit_signature(&node.sig);
+ v.visit_block(&*node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_foreign_mod<'ast, V>(v: &mut V, node: &'ast
crate::ItemForeignMod)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.unsafety);
+ v.visit_abi(&node.abi);
+ skip!(node.brace_token);
+ for it in &node.items {
+ v.visit_foreign_item(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_impl<'ast, V>(v: &mut V, node: &'ast crate::ItemImpl)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.defaultness);
+ skip!(node.unsafety);
+ skip!(node.impl_token);
+ v.visit_generics(&node.generics);
+ if let Some(it) = &node.trait_ {
+ skip!((it).0);
+ v.visit_path(&(it).1);
+ skip!((it).2);
+ }
+ v.visit_type(&*node.self_ty);
+ skip!(node.brace_token);
+ for it in &node.items {
+ v.visit_impl_item(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_macro<'ast, V>(v: &mut V, node: &'ast crate::ItemMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.ident {
+ v.visit_ident(it);
+ }
+ v.visit_macro(&node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_mod<'ast, V>(v: &mut V, node: &'ast crate::ItemMod)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.unsafety);
+ skip!(node.mod_token);
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.content {
+ skip!((it).0);
+ for it in &(it).1 {
+ v.visit_item(it);
+ }
+ }
+ skip!(node.semi);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_static<'ast, V>(v: &mut V, node: &'ast crate::ItemStatic)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.static_token);
+ v.visit_static_mutability(&node.mutability);
+ v.visit_ident(&node.ident);
+ skip!(node.colon_token);
+ v.visit_type(&*node.ty);
+ skip!(node.eq_token);
+ v.visit_expr(&*node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_struct<'ast, V>(v: &mut V, node: &'ast crate::ItemStruct)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.struct_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ v.visit_fields(&node.fields);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_trait<'ast, V>(v: &mut V, node: &'ast crate::ItemTrait)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.unsafety);
+ skip!(node.auto_token);
+ if let Some(it) = &node.restriction {
+ v.visit_impl_restriction(it);
+ }
+ skip!(node.trait_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.supertraits) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+ skip!(node.brace_token);
+ for it in &node.items {
+ v.visit_trait_item(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_trait_alias<'ast, V>(v: &mut V, node: &'ast
crate::ItemTraitAlias)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.trait_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.eq_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_type<'ast, V>(v: &mut V, node: &'ast crate::ItemType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.type_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.eq_token);
+ v.visit_type(&*node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_union<'ast, V>(v: &mut V, node: &'ast crate::ItemUnion)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.union_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ v.visit_fields_named(&node.fields);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_use<'ast, V>(v: &mut V, node: &'ast crate::ItemUse)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_visibility(&node.vis);
+ skip!(node.use_token);
+ skip!(node.leading_colon);
+ v.visit_use_tree(&node.tree);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_label<'ast, V>(v: &mut V, node: &'ast crate::Label)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_lifetime(&node.name);
+ skip!(node.colon_token);
+}
+pub fn visit_lifetime<'ast, V>(v: &mut V, node: &'ast crate::Lifetime)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_span(&node.apostrophe);
+ v.visit_ident(&node.ident);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_lifetime_param<'ast, V>(v: &mut V, node: &'ast
crate::LifetimeParam)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_lifetime(&node.lifetime);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_lifetime(it);
+ }
+}
+pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast crate::Lit)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Lit::Str(_binding_0) => {
+ v.visit_lit_str(_binding_0);
+ }
+ crate::Lit::ByteStr(_binding_0) => {
+ v.visit_lit_byte_str(_binding_0);
+ }
+ crate::Lit::CStr(_binding_0) => {
+ v.visit_lit_cstr(_binding_0);
+ }
+ crate::Lit::Byte(_binding_0) => {
+ v.visit_lit_byte(_binding_0);
+ }
+ crate::Lit::Char(_binding_0) => {
+ v.visit_lit_char(_binding_0);
+ }
+ crate::Lit::Int(_binding_0) => {
+ v.visit_lit_int(_binding_0);
+ }
+ crate::Lit::Float(_binding_0) => {
+ v.visit_lit_float(_binding_0);
+ }
+ crate::Lit::Bool(_binding_0) => {
+ v.visit_lit_bool(_binding_0);
+ }
+ crate::Lit::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast crate::LitBool)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.value);
+ v.visit_span(&node.span);
+}
+pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast crate::LitByte)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast crate::LitByteStr)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_cstr<'ast, V>(v: &mut V, node: &'ast crate::LitCStr)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast crate::LitChar)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast crate::LitFloat)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast crate::LitInt)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast crate::LitStr)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_local<'ast, V>(v: &mut V, node: &'ast crate::Local)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.let_token);
+ v.visit_pat(&node.pat);
+ if let Some(it) = &node.init {
+ v.visit_local_init(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_local_init<'ast, V>(v: &mut V, node: &'ast crate::LocalInit)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.eq_token);
+ v.visit_expr(&*node.expr);
+ if let Some(it) = &node.diverge {
+ skip!((it).0);
+ v.visit_expr(&*(it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_macro<'ast, V>(v: &mut V, node: &'ast crate::Macro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_path(&node.path);
+ skip!(node.bang_token);
+ v.visit_macro_delimiter(&node.delimiter);
+ skip!(node.tokens);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_macro_delimiter<'ast, V>(v: &mut V, node: &'ast
crate::MacroDelimiter)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::MacroDelimiter::Paren(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::MacroDelimiter::Brace(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::MacroDelimiter::Bracket(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_member<'ast, V>(v: &mut V, node: &'ast crate::Member)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Member::Named(_binding_0) => {
+ v.visit_ident(_binding_0);
+ }
+ crate::Member::Unnamed(_binding_0) => {
+ v.visit_index(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta<'ast, V>(v: &mut V, node: &'ast crate::Meta)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Meta::Path(_binding_0) => {
+ v.visit_path(_binding_0);
+ }
+ crate::Meta::List(_binding_0) => {
+ v.visit_meta_list(_binding_0);
+ }
+ crate::Meta::NameValue(_binding_0) => {
+ v.visit_meta_name_value(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta_list<'ast, V>(v: &mut V, node: &'ast crate::MetaList)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_path(&node.path);
+ v.visit_macro_delimiter(&node.delimiter);
+ skip!(node.tokens);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta_name_value<'ast, V>(v: &mut V, node: &'ast
crate::MetaNameValue)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_path(&node.path);
+ skip!(node.eq_token);
+ v.visit_expr(&node.value);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_parenthesized_generic_arguments<'ast, V>(
+ v: &mut V,
+ node: &'ast crate::ParenthesizedGenericArguments,
+)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.inputs) {
+ let it = el.value();
+ v.visit_type(it);
+ }
+ v.visit_return_type(&node.output);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat<'ast, V>(v: &mut V, node: &'ast crate::Pat)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Pat::Const(_binding_0) => {
+ v.visit_expr_const(_binding_0);
+ }
+ crate::Pat::Ident(_binding_0) => {
+ v.visit_pat_ident(_binding_0);
+ }
+ crate::Pat::Lit(_binding_0) => {
+ v.visit_expr_lit(_binding_0);
+ }
+ crate::Pat::Macro(_binding_0) => {
+ v.visit_expr_macro(_binding_0);
+ }
+ crate::Pat::Or(_binding_0) => {
+ v.visit_pat_or(_binding_0);
+ }
+ crate::Pat::Paren(_binding_0) => {
+ v.visit_pat_paren(_binding_0);
+ }
+ crate::Pat::Path(_binding_0) => {
+ v.visit_expr_path(_binding_0);
+ }
+ crate::Pat::Range(_binding_0) => {
+ v.visit_expr_range(_binding_0);
+ }
+ crate::Pat::Reference(_binding_0) => {
+ v.visit_pat_reference(_binding_0);
+ }
+ crate::Pat::Rest(_binding_0) => {
+ v.visit_pat_rest(_binding_0);
+ }
+ crate::Pat::Slice(_binding_0) => {
+ v.visit_pat_slice(_binding_0);
+ }
+ crate::Pat::Struct(_binding_0) => {
+ v.visit_pat_struct(_binding_0);
+ }
+ crate::Pat::Tuple(_binding_0) => {
+ v.visit_pat_tuple(_binding_0);
+ }
+ crate::Pat::TupleStruct(_binding_0) => {
+ v.visit_pat_tuple_struct(_binding_0);
+ }
+ crate::Pat::Type(_binding_0) => {
+ v.visit_pat_type(_binding_0);
+ }
+ crate::Pat::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Pat::Wild(_binding_0) => {
+ v.visit_pat_wild(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_ident<'ast, V>(v: &mut V, node: &'ast crate::PatIdent)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.by_ref);
+ skip!(node.mutability);
+ v.visit_ident(&node.ident);
+ if let Some(it) = &node.subpat {
+ skip!((it).0);
+ v.visit_pat(&*(it).1);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_or<'ast, V>(v: &mut V, node: &'ast crate::PatOr)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.leading_vert);
+ for el in Punctuated::pairs(&node.cases) {
+ let it = el.value();
+ v.visit_pat(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_paren<'ast, V>(v: &mut V, node: &'ast crate::PatParen)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.paren_token);
+ v.visit_pat(&*node.pat);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_reference<'ast, V>(v: &mut V, node: &'ast crate::PatReference)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.and_token);
+ skip!(node.mutability);
+ v.visit_pat(&*node.pat);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_rest<'ast, V>(v: &mut V, node: &'ast crate::PatRest)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.dot2_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_slice<'ast, V>(v: &mut V, node: &'ast crate::PatSlice)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.bracket_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_pat(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_struct<'ast, V>(v: &mut V, node: &'ast crate::PatStruct)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.qself {
+ v.visit_qself(it);
+ }
+ v.visit_path(&node.path);
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.fields) {
+ let it = el.value();
+ v.visit_field_pat(it);
+ }
+ if let Some(it) = &node.rest {
+ v.visit_pat_rest(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_tuple<'ast, V>(v: &mut V, node: &'ast crate::PatTuple)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_pat(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_tuple_struct<'ast, V>(v: &mut V, node: &'ast
crate::PatTupleStruct)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.qself {
+ v.visit_qself(it);
+ }
+ v.visit_path(&node.path);
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_pat(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_type<'ast, V>(v: &mut V, node: &'ast crate::PatType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_pat(&*node.pat);
+ skip!(node.colon_token);
+ v.visit_type(&*node.ty);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_wild<'ast, V>(v: &mut V, node: &'ast crate::PatWild)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.underscore_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path<'ast, V>(v: &mut V, node: &'ast crate::Path)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.leading_colon);
+ for el in Punctuated::pairs(&node.segments) {
+ let it = el.value();
+ v.visit_path_segment(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path_arguments<'ast, V>(v: &mut V, node: &'ast
crate::PathArguments)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::PathArguments::None => {}
+ crate::PathArguments::AngleBracketed(_binding_0) => {
+ v.visit_angle_bracketed_generic_arguments(_binding_0);
+ }
+ crate::PathArguments::Parenthesized(_binding_0) => {
+ v.visit_parenthesized_generic_arguments(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path_segment<'ast, V>(v: &mut V, node: &'ast crate::PathSegment)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ v.visit_path_arguments(&node.arguments);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_predicate_lifetime<'ast, V>(v: &mut V, node: &'ast
crate::PredicateLifetime)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_lifetime(&node.lifetime);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_lifetime(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_predicate_type<'ast, V>(v: &mut V, node: &'ast
crate::PredicateType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ if let Some(it) = &node.lifetimes {
+ v.visit_bound_lifetimes(it);
+ }
+ v.visit_type(&node.bounded_ty);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_qself<'ast, V>(v: &mut V, node: &'ast crate::QSelf)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.lt_token);
+ v.visit_type(&*node.ty);
+ skip!(node.position);
+ skip!(node.as_token);
+ skip!(node.gt_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_range_limits<'ast, V>(v: &mut V, node: &'ast crate::RangeLimits)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::RangeLimits::HalfOpen(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::RangeLimits::Closed(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_receiver<'ast, V>(v: &mut V, node: &'ast crate::Receiver)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.reference {
+ skip!((it).0);
+ if let Some(it) = &(it).1 {
+ v.visit_lifetime(it);
+ }
+ }
+ skip!(node.mutability);
+ skip!(node.self_token);
+ skip!(node.colon_token);
+ v.visit_type(&*node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_return_type<'ast, V>(v: &mut V, node: &'ast crate::ReturnType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::ReturnType::Default => {}
+ crate::ReturnType::Type(_binding_0, _binding_1) => {
+ skip!(_binding_0);
+ v.visit_type(&**_binding_1);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_signature<'ast, V>(v: &mut V, node: &'ast crate::Signature)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.constness);
+ skip!(node.asyncness);
+ skip!(node.unsafety);
+ if let Some(it) = &node.abi {
+ v.visit_abi(it);
+ }
+ skip!(node.fn_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.inputs) {
+ let it = el.value();
+ v.visit_fn_arg(it);
+ }
+ if let Some(it) = &node.variadic {
+ v.visit_variadic(it);
+ }
+ v.visit_return_type(&node.output);
+}
+pub fn visit_span<'ast, V>(v: &mut V, node: &proc_macro2::Span)
+where
+ V: Visit<'ast> + ?Sized,
+{}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_static_mutability<'ast, V>(v: &mut V, node: &'ast
crate::StaticMutability)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::StaticMutability::Mut(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::StaticMutability::None => {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_stmt<'ast, V>(v: &mut V, node: &'ast crate::Stmt)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Stmt::Local(_binding_0) => {
+ v.visit_local(_binding_0);
+ }
+ crate::Stmt::Item(_binding_0) => {
+ v.visit_item(_binding_0);
+ }
+ crate::Stmt::Expr(_binding_0, _binding_1) => {
+ v.visit_expr(_binding_0);
+ skip!(_binding_1);
+ }
+ crate::Stmt::Macro(_binding_0) => {
+ v.visit_stmt_macro(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_stmt_macro<'ast, V>(v: &mut V, node: &'ast crate::StmtMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_macro(&node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_trait_bound<'ast, V>(v: &mut V, node: &'ast crate::TraitBound)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.paren_token);
+ v.visit_trait_bound_modifier(&node.modifier);
+ if let Some(it) = &node.lifetimes {
+ v.visit_bound_lifetimes(it);
+ }
+ v.visit_path(&node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_trait_bound_modifier<'ast, V>(
+ v: &mut V,
+ node: &'ast crate::TraitBoundModifier,
+)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::TraitBoundModifier::None => {}
+ crate::TraitBoundModifier::Maybe(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item<'ast, V>(v: &mut V, node: &'ast crate::TraitItem)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::TraitItem::Const(_binding_0) => {
+ v.visit_trait_item_const(_binding_0);
+ }
+ crate::TraitItem::Fn(_binding_0) => {
+ v.visit_trait_item_fn(_binding_0);
+ }
+ crate::TraitItem::Type(_binding_0) => {
+ v.visit_trait_item_type(_binding_0);
+ }
+ crate::TraitItem::Macro(_binding_0) => {
+ v.visit_trait_item_macro(_binding_0);
+ }
+ crate::TraitItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_const<'ast, V>(v: &mut V, node: &'ast
crate::TraitItemConst)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.const_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.colon_token);
+ v.visit_type(&node.ty);
+ if let Some(it) = &node.default {
+ skip!((it).0);
+ v.visit_expr(&(it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_fn<'ast, V>(v: &mut V, node: &'ast crate::TraitItemFn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_signature(&node.sig);
+ if let Some(it) = &node.default {
+ v.visit_block(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_macro<'ast, V>(v: &mut V, node: &'ast
crate::TraitItemMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_macro(&node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_type<'ast, V>(v: &mut V, node: &'ast
crate::TraitItemType)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ skip!(node.type_token);
+ v.visit_ident(&node.ident);
+ v.visit_generics(&node.generics);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+ if let Some(it) = &node.default {
+ skip!((it).0);
+ v.visit_type(&(it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type<'ast, V>(v: &mut V, node: &'ast crate::Type)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Type::Array(_binding_0) => {
+ v.visit_type_array(_binding_0);
+ }
+ crate::Type::BareFn(_binding_0) => {
+ v.visit_type_bare_fn(_binding_0);
+ }
+ crate::Type::Group(_binding_0) => {
+ v.visit_type_group(_binding_0);
+ }
+ crate::Type::ImplTrait(_binding_0) => {
+ v.visit_type_impl_trait(_binding_0);
+ }
+ crate::Type::Infer(_binding_0) => {
+ v.visit_type_infer(_binding_0);
+ }
+ crate::Type::Macro(_binding_0) => {
+ v.visit_type_macro(_binding_0);
+ }
+ crate::Type::Never(_binding_0) => {
+ v.visit_type_never(_binding_0);
+ }
+ crate::Type::Paren(_binding_0) => {
+ v.visit_type_paren(_binding_0);
+ }
+ crate::Type::Path(_binding_0) => {
+ v.visit_type_path(_binding_0);
+ }
+ crate::Type::Ptr(_binding_0) => {
+ v.visit_type_ptr(_binding_0);
+ }
+ crate::Type::Reference(_binding_0) => {
+ v.visit_type_reference(_binding_0);
+ }
+ crate::Type::Slice(_binding_0) => {
+ v.visit_type_slice(_binding_0);
+ }
+ crate::Type::TraitObject(_binding_0) => {
+ v.visit_type_trait_object(_binding_0);
+ }
+ crate::Type::Tuple(_binding_0) => {
+ v.visit_type_tuple(_binding_0);
+ }
+ crate::Type::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_array<'ast, V>(v: &mut V, node: &'ast crate::TypeArray)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.bracket_token);
+ v.visit_type(&*node.elem);
+ skip!(node.semi_token);
+ v.visit_expr(&node.len);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_bare_fn<'ast, V>(v: &mut V, node: &'ast crate::TypeBareFn)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ if let Some(it) = &node.lifetimes {
+ v.visit_bound_lifetimes(it);
+ }
+ skip!(node.unsafety);
+ if let Some(it) = &node.abi {
+ v.visit_abi(it);
+ }
+ skip!(node.fn_token);
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.inputs) {
+ let it = el.value();
+ v.visit_bare_fn_arg(it);
+ }
+ if let Some(it) = &node.variadic {
+ v.visit_bare_variadic(it);
+ }
+ v.visit_return_type(&node.output);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_group<'ast, V>(v: &mut V, node: &'ast crate::TypeGroup)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.group_token);
+ v.visit_type(&*node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_impl_trait<'ast, V>(v: &mut V, node: &'ast
crate::TypeImplTrait)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.impl_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_infer<'ast, V>(v: &mut V, node: &'ast crate::TypeInfer)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.underscore_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_macro<'ast, V>(v: &mut V, node: &'ast crate::TypeMacro)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_macro(&node.mac);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_never<'ast, V>(v: &mut V, node: &'ast crate::TypeNever)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.bang_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_param<'ast, V>(v: &mut V, node: &'ast crate::TypeParam)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_ident(&node.ident);
+ skip!(node.colon_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+ skip!(node.eq_token);
+ if let Some(it) = &node.default {
+ v.visit_type(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_param_bound<'ast, V>(v: &mut V, node: &'ast
crate::TypeParamBound)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::TypeParamBound::Trait(_binding_0) => {
+ v.visit_trait_bound(_binding_0);
+ }
+ crate::TypeParamBound::Lifetime(_binding_0) => {
+ v.visit_lifetime(_binding_0);
+ }
+ crate::TypeParamBound::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_paren<'ast, V>(v: &mut V, node: &'ast crate::TypeParen)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.paren_token);
+ v.visit_type(&*node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_path<'ast, V>(v: &mut V, node: &'ast crate::TypePath)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ if let Some(it) = &node.qself {
+ v.visit_qself(it);
+ }
+ v.visit_path(&node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_ptr<'ast, V>(v: &mut V, node: &'ast crate::TypePtr)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.star_token);
+ skip!(node.const_token);
+ skip!(node.mutability);
+ v.visit_type(&*node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_reference<'ast, V>(v: &mut V, node: &'ast
crate::TypeReference)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.and_token);
+ if let Some(it) = &node.lifetime {
+ v.visit_lifetime(it);
+ }
+ skip!(node.mutability);
+ v.visit_type(&*node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_slice<'ast, V>(v: &mut V, node: &'ast crate::TypeSlice)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.bracket_token);
+ v.visit_type(&*node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_trait_object<'ast, V>(v: &mut V, node: &'ast
crate::TypeTraitObject)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.dyn_token);
+ for el in Punctuated::pairs(&node.bounds) {
+ let it = el.value();
+ v.visit_type_param_bound(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_tuple<'ast, V>(v: &mut V, node: &'ast crate::TypeTuple)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.paren_token);
+ for el in Punctuated::pairs(&node.elems) {
+ let it = el.value();
+ v.visit_type(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_un_op<'ast, V>(v: &mut V, node: &'ast crate::UnOp)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::UnOp::Deref(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::UnOp::Not(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::UnOp::Neg(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_glob<'ast, V>(v: &mut V, node: &'ast crate::UseGlob)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.star_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_group<'ast, V>(v: &mut V, node: &'ast crate::UseGroup)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.brace_token);
+ for el in Punctuated::pairs(&node.items) {
+ let it = el.value();
+ v.visit_use_tree(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_name<'ast, V>(v: &mut V, node: &'ast crate::UseName)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_path<'ast, V>(v: &mut V, node: &'ast crate::UsePath)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ skip!(node.colon2_token);
+ v.visit_use_tree(&*node.tree);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_rename<'ast, V>(v: &mut V, node: &'ast crate::UseRename)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ v.visit_ident(&node.ident);
+ skip!(node.as_token);
+ v.visit_ident(&node.rename);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_tree<'ast, V>(v: &mut V, node: &'ast crate::UseTree)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::UseTree::Path(_binding_0) => {
+ v.visit_use_path(_binding_0);
+ }
+ crate::UseTree::Name(_binding_0) => {
+ v.visit_use_name(_binding_0);
+ }
+ crate::UseTree::Rename(_binding_0) => {
+ v.visit_use_rename(_binding_0);
+ }
+ crate::UseTree::Glob(_binding_0) => {
+ v.visit_use_glob(_binding_0);
+ }
+ crate::UseTree::Group(_binding_0) => {
+ v.visit_use_group(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_variadic<'ast, V>(v: &mut V, node: &'ast crate::Variadic)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ if let Some(it) = &node.pat {
+ v.visit_pat(&*(it).0);
+ skip!((it).1);
+ }
+ skip!(node.dots);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_variant<'ast, V>(v: &mut V, node: &'ast crate::Variant)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ for it in &node.attrs {
+ v.visit_attribute(it);
+ }
+ v.visit_ident(&node.ident);
+ v.visit_fields(&node.fields);
+ if let Some(it) = &node.discriminant {
+ skip!((it).0);
+ v.visit_expr(&(it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_vis_restricted<'ast, V>(v: &mut V, node: &'ast
crate::VisRestricted)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.pub_token);
+ skip!(node.paren_token);
+ skip!(node.in_token);
+ v.visit_path(&*node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_visibility<'ast, V>(v: &mut V, node: &'ast crate::Visibility)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::Visibility::Public(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Visibility::Restricted(_binding_0) => {
+ v.visit_vis_restricted(_binding_0);
+ }
+ crate::Visibility::Inherited => {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_where_clause<'ast, V>(v: &mut V, node: &'ast crate::WhereClause)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ skip!(node.where_token);
+ for el in Punctuated::pairs(&node.predicates) {
+ let it = el.value();
+ v.visit_where_predicate(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_where_predicate<'ast, V>(v: &mut V, node: &'ast
crate::WherePredicate)
+where
+ V: Visit<'ast> + ?Sized,
+{
+ match node {
+ crate::WherePredicate::Lifetime(_binding_0) => {
+ v.visit_predicate_lifetime(_binding_0);
+ }
+ crate::WherePredicate::Type(_binding_0) => {
+ v.visit_predicate_type(_binding_0);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/gen/visit_mut.rs
b/rust/hw/char/pl011/vendor/syn/src/gen/visit_mut.rs
new file mode 100644
index 0000000000..9d457f1640
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/gen/visit_mut.rs
@@ -0,0 +1,3855 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(unused_variables)]
+#![allow(clippy::needless_pass_by_ref_mut)]
+#[cfg(any(feature = "full", feature = "derive"))]
+use crate::punctuated::Punctuated;
+#[cfg(feature = "full")]
+macro_rules! full {
+ ($e:expr) => {
+ $e
+ };
+}
+#[cfg(all(feature = "derive", not(feature = "full")))]
+macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+}
+macro_rules! skip {
+ ($($tt:tt)*) => {};
+}
+/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
+/// place.
+///
+/// See the [module documentation] for details.
+///
+/// [module documentation]: self
+pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_abi_mut(&mut self, i: &mut crate::Abi) {
+ visit_abi_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_angle_bracketed_generic_arguments_mut(
+ &mut self,
+ i: &mut crate::AngleBracketedGenericArguments,
+ ) {
+ visit_angle_bracketed_generic_arguments_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_arm_mut(&mut self, i: &mut crate::Arm) {
+ visit_arm_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_assoc_const_mut(&mut self, i: &mut crate::AssocConst) {
+ visit_assoc_const_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_assoc_type_mut(&mut self, i: &mut crate::AssocType) {
+ visit_assoc_type_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_attr_style_mut(&mut self, i: &mut crate::AttrStyle) {
+ visit_attr_style_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_attribute_mut(&mut self, i: &mut crate::Attribute) {
+ visit_attribute_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bare_fn_arg_mut(&mut self, i: &mut crate::BareFnArg) {
+ visit_bare_fn_arg_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bare_variadic_mut(&mut self, i: &mut crate::BareVariadic) {
+ visit_bare_variadic_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bin_op_mut(&mut self, i: &mut crate::BinOp) {
+ visit_bin_op_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_block_mut(&mut self, i: &mut crate::Block) {
+ visit_block_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_bound_lifetimes_mut(&mut self, i: &mut crate::BoundLifetimes) {
+ visit_bound_lifetimes_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_const_param_mut(&mut self, i: &mut crate::ConstParam) {
+ visit_const_param_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_constraint_mut(&mut self, i: &mut crate::Constraint) {
+ visit_constraint_mut(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_mut(&mut self, i: &mut crate::Data) {
+ visit_data_mut(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_enum_mut(&mut self, i: &mut crate::DataEnum) {
+ visit_data_enum_mut(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_struct_mut(&mut self, i: &mut crate::DataStruct) {
+ visit_data_struct_mut(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_data_union_mut(&mut self, i: &mut crate::DataUnion) {
+ visit_data_union_mut(self, i);
+ }
+ #[cfg(feature = "derive")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+ fn visit_derive_input_mut(&mut self, i: &mut crate::DeriveInput) {
+ visit_derive_input_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_mut(&mut self, i: &mut crate::Expr) {
+ visit_expr_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_array_mut(&mut self, i: &mut crate::ExprArray) {
+ visit_expr_array_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_assign_mut(&mut self, i: &mut crate::ExprAssign) {
+ visit_expr_assign_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_async_mut(&mut self, i: &mut crate::ExprAsync) {
+ visit_expr_async_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_await_mut(&mut self, i: &mut crate::ExprAwait) {
+ visit_expr_await_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_binary_mut(&mut self, i: &mut crate::ExprBinary) {
+ visit_expr_binary_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_block_mut(&mut self, i: &mut crate::ExprBlock) {
+ visit_expr_block_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_break_mut(&mut self, i: &mut crate::ExprBreak) {
+ visit_expr_break_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_call_mut(&mut self, i: &mut crate::ExprCall) {
+ visit_expr_call_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_cast_mut(&mut self, i: &mut crate::ExprCast) {
+ visit_expr_cast_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_closure_mut(&mut self, i: &mut crate::ExprClosure) {
+ visit_expr_closure_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_const_mut(&mut self, i: &mut crate::ExprConst) {
+ visit_expr_const_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_continue_mut(&mut self, i: &mut crate::ExprContinue) {
+ visit_expr_continue_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_field_mut(&mut self, i: &mut crate::ExprField) {
+ visit_expr_field_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_for_loop_mut(&mut self, i: &mut crate::ExprForLoop) {
+ visit_expr_for_loop_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_group_mut(&mut self, i: &mut crate::ExprGroup) {
+ visit_expr_group_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_if_mut(&mut self, i: &mut crate::ExprIf) {
+ visit_expr_if_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_index_mut(&mut self, i: &mut crate::ExprIndex) {
+ visit_expr_index_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_infer_mut(&mut self, i: &mut crate::ExprInfer) {
+ visit_expr_infer_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_let_mut(&mut self, i: &mut crate::ExprLet) {
+ visit_expr_let_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_lit_mut(&mut self, i: &mut crate::ExprLit) {
+ visit_expr_lit_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_loop_mut(&mut self, i: &mut crate::ExprLoop) {
+ visit_expr_loop_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_macro_mut(&mut self, i: &mut crate::ExprMacro) {
+ visit_expr_macro_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_match_mut(&mut self, i: &mut crate::ExprMatch) {
+ visit_expr_match_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_method_call_mut(&mut self, i: &mut crate::ExprMethodCall) {
+ visit_expr_method_call_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_paren_mut(&mut self, i: &mut crate::ExprParen) {
+ visit_expr_paren_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_path_mut(&mut self, i: &mut crate::ExprPath) {
+ visit_expr_path_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_range_mut(&mut self, i: &mut crate::ExprRange) {
+ visit_expr_range_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_reference_mut(&mut self, i: &mut crate::ExprReference) {
+ visit_expr_reference_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_repeat_mut(&mut self, i: &mut crate::ExprRepeat) {
+ visit_expr_repeat_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_return_mut(&mut self, i: &mut crate::ExprReturn) {
+ visit_expr_return_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_struct_mut(&mut self, i: &mut crate::ExprStruct) {
+ visit_expr_struct_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_try_mut(&mut self, i: &mut crate::ExprTry) {
+ visit_expr_try_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_try_block_mut(&mut self, i: &mut crate::ExprTryBlock) {
+ visit_expr_try_block_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_tuple_mut(&mut self, i: &mut crate::ExprTuple) {
+ visit_expr_tuple_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_expr_unary_mut(&mut self, i: &mut crate::ExprUnary) {
+ visit_expr_unary_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_unsafe_mut(&mut self, i: &mut crate::ExprUnsafe) {
+ visit_expr_unsafe_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_while_mut(&mut self, i: &mut crate::ExprWhile) {
+ visit_expr_while_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_expr_yield_mut(&mut self, i: &mut crate::ExprYield) {
+ visit_expr_yield_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field_mut(&mut self, i: &mut crate::Field) {
+ visit_field_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field_mutability_mut(&mut self, i: &mut crate::FieldMutability) {
+ visit_field_mutability_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_field_pat_mut(&mut self, i: &mut crate::FieldPat) {
+ visit_field_pat_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_field_value_mut(&mut self, i: &mut crate::FieldValue) {
+ visit_field_value_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields_mut(&mut self, i: &mut crate::Fields) {
+ visit_fields_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields_named_mut(&mut self, i: &mut crate::FieldsNamed) {
+ visit_fields_named_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_fields_unnamed_mut(&mut self, i: &mut crate::FieldsUnnamed) {
+ visit_fields_unnamed_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_file_mut(&mut self, i: &mut crate::File) {
+ visit_file_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_fn_arg_mut(&mut self, i: &mut crate::FnArg) {
+ visit_fn_arg_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_mut(&mut self, i: &mut crate::ForeignItem) {
+ visit_foreign_item_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_fn_mut(&mut self, i: &mut crate::ForeignItemFn) {
+ visit_foreign_item_fn_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_macro_mut(&mut self, i: &mut
crate::ForeignItemMacro) {
+ visit_foreign_item_macro_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_static_mut(&mut self, i: &mut
crate::ForeignItemStatic) {
+ visit_foreign_item_static_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_foreign_item_type_mut(&mut self, i: &mut crate::ForeignItemType) {
+ visit_foreign_item_type_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generic_argument_mut(&mut self, i: &mut crate::GenericArgument) {
+ visit_generic_argument_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generic_param_mut(&mut self, i: &mut crate::GenericParam) {
+ visit_generic_param_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_generics_mut(&mut self, i: &mut crate::Generics) {
+ visit_generics_mut(self, i);
+ }
+ fn visit_ident_mut(&mut self, i: &mut proc_macro2::Ident) {
+ visit_ident_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_mut(&mut self, i: &mut crate::ImplItem) {
+ visit_impl_item_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_const_mut(&mut self, i: &mut crate::ImplItemConst) {
+ visit_impl_item_const_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_fn_mut(&mut self, i: &mut crate::ImplItemFn) {
+ visit_impl_item_fn_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_macro_mut(&mut self, i: &mut crate::ImplItemMacro) {
+ visit_impl_item_macro_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_item_type_mut(&mut self, i: &mut crate::ImplItemType) {
+ visit_impl_item_type_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_impl_restriction_mut(&mut self, i: &mut crate::ImplRestriction) {
+ visit_impl_restriction_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_index_mut(&mut self, i: &mut crate::Index) {
+ visit_index_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_mut(&mut self, i: &mut crate::Item) {
+ visit_item_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_const_mut(&mut self, i: &mut crate::ItemConst) {
+ visit_item_const_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_enum_mut(&mut self, i: &mut crate::ItemEnum) {
+ visit_item_enum_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_extern_crate_mut(&mut self, i: &mut crate::ItemExternCrate) {
+ visit_item_extern_crate_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_fn_mut(&mut self, i: &mut crate::ItemFn) {
+ visit_item_fn_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_foreign_mod_mut(&mut self, i: &mut crate::ItemForeignMod) {
+ visit_item_foreign_mod_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_impl_mut(&mut self, i: &mut crate::ItemImpl) {
+ visit_item_impl_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_macro_mut(&mut self, i: &mut crate::ItemMacro) {
+ visit_item_macro_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_mod_mut(&mut self, i: &mut crate::ItemMod) {
+ visit_item_mod_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_static_mut(&mut self, i: &mut crate::ItemStatic) {
+ visit_item_static_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_struct_mut(&mut self, i: &mut crate::ItemStruct) {
+ visit_item_struct_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_trait_mut(&mut self, i: &mut crate::ItemTrait) {
+ visit_item_trait_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_trait_alias_mut(&mut self, i: &mut crate::ItemTraitAlias) {
+ visit_item_trait_alias_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_type_mut(&mut self, i: &mut crate::ItemType) {
+ visit_item_type_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_union_mut(&mut self, i: &mut crate::ItemUnion) {
+ visit_item_union_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_item_use_mut(&mut self, i: &mut crate::ItemUse) {
+ visit_item_use_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_label_mut(&mut self, i: &mut crate::Label) {
+ visit_label_mut(self, i);
+ }
+ fn visit_lifetime_mut(&mut self, i: &mut crate::Lifetime) {
+ visit_lifetime_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_lifetime_param_mut(&mut self, i: &mut crate::LifetimeParam) {
+ visit_lifetime_param_mut(self, i);
+ }
+ fn visit_lit_mut(&mut self, i: &mut crate::Lit) {
+ visit_lit_mut(self, i);
+ }
+ fn visit_lit_bool_mut(&mut self, i: &mut crate::LitBool) {
+ visit_lit_bool_mut(self, i);
+ }
+ fn visit_lit_byte_mut(&mut self, i: &mut crate::LitByte) {
+ visit_lit_byte_mut(self, i);
+ }
+ fn visit_lit_byte_str_mut(&mut self, i: &mut crate::LitByteStr) {
+ visit_lit_byte_str_mut(self, i);
+ }
+ fn visit_lit_cstr_mut(&mut self, i: &mut crate::LitCStr) {
+ visit_lit_cstr_mut(self, i);
+ }
+ fn visit_lit_char_mut(&mut self, i: &mut crate::LitChar) {
+ visit_lit_char_mut(self, i);
+ }
+ fn visit_lit_float_mut(&mut self, i: &mut crate::LitFloat) {
+ visit_lit_float_mut(self, i);
+ }
+ fn visit_lit_int_mut(&mut self, i: &mut crate::LitInt) {
+ visit_lit_int_mut(self, i);
+ }
+ fn visit_lit_str_mut(&mut self, i: &mut crate::LitStr) {
+ visit_lit_str_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_local_mut(&mut self, i: &mut crate::Local) {
+ visit_local_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_local_init_mut(&mut self, i: &mut crate::LocalInit) {
+ visit_local_init_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_macro_mut(&mut self, i: &mut crate::Macro) {
+ visit_macro_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_macro_delimiter_mut(&mut self, i: &mut crate::MacroDelimiter) {
+ visit_macro_delimiter_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_member_mut(&mut self, i: &mut crate::Member) {
+ visit_member_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta_mut(&mut self, i: &mut crate::Meta) {
+ visit_meta_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta_list_mut(&mut self, i: &mut crate::MetaList) {
+ visit_meta_list_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_meta_name_value_mut(&mut self, i: &mut crate::MetaNameValue) {
+ visit_meta_name_value_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_parenthesized_generic_arguments_mut(
+ &mut self,
+ i: &mut crate::ParenthesizedGenericArguments,
+ ) {
+ visit_parenthesized_generic_arguments_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_mut(&mut self, i: &mut crate::Pat) {
+ visit_pat_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_ident_mut(&mut self, i: &mut crate::PatIdent) {
+ visit_pat_ident_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_or_mut(&mut self, i: &mut crate::PatOr) {
+ visit_pat_or_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_paren_mut(&mut self, i: &mut crate::PatParen) {
+ visit_pat_paren_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_reference_mut(&mut self, i: &mut crate::PatReference) {
+ visit_pat_reference_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_rest_mut(&mut self, i: &mut crate::PatRest) {
+ visit_pat_rest_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_slice_mut(&mut self, i: &mut crate::PatSlice) {
+ visit_pat_slice_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_struct_mut(&mut self, i: &mut crate::PatStruct) {
+ visit_pat_struct_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_tuple_mut(&mut self, i: &mut crate::PatTuple) {
+ visit_pat_tuple_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_tuple_struct_mut(&mut self, i: &mut crate::PatTupleStruct) {
+ visit_pat_tuple_struct_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_type_mut(&mut self, i: &mut crate::PatType) {
+ visit_pat_type_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_pat_wild_mut(&mut self, i: &mut crate::PatWild) {
+ visit_pat_wild_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path_mut(&mut self, i: &mut crate::Path) {
+ visit_path_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path_arguments_mut(&mut self, i: &mut crate::PathArguments) {
+ visit_path_arguments_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_path_segment_mut(&mut self, i: &mut crate::PathSegment) {
+ visit_path_segment_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_predicate_lifetime_mut(&mut self, i: &mut
crate::PredicateLifetime) {
+ visit_predicate_lifetime_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_predicate_type_mut(&mut self, i: &mut crate::PredicateType) {
+ visit_predicate_type_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_qself_mut(&mut self, i: &mut crate::QSelf) {
+ visit_qself_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_range_limits_mut(&mut self, i: &mut crate::RangeLimits) {
+ visit_range_limits_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_receiver_mut(&mut self, i: &mut crate::Receiver) {
+ visit_receiver_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_return_type_mut(&mut self, i: &mut crate::ReturnType) {
+ visit_return_type_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_signature_mut(&mut self, i: &mut crate::Signature) {
+ visit_signature_mut(self, i);
+ }
+ fn visit_span_mut(&mut self, i: &mut proc_macro2::Span) {
+ visit_span_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_static_mutability_mut(&mut self, i: &mut crate::StaticMutability)
{
+ visit_static_mutability_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_stmt_mut(&mut self, i: &mut crate::Stmt) {
+ visit_stmt_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_stmt_macro_mut(&mut self, i: &mut crate::StmtMacro) {
+ visit_stmt_macro_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_trait_bound_mut(&mut self, i: &mut crate::TraitBound) {
+ visit_trait_bound_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_trait_bound_modifier_mut(&mut self, i: &mut
crate::TraitBoundModifier) {
+ visit_trait_bound_modifier_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_mut(&mut self, i: &mut crate::TraitItem) {
+ visit_trait_item_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_const_mut(&mut self, i: &mut crate::TraitItemConst) {
+ visit_trait_item_const_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_fn_mut(&mut self, i: &mut crate::TraitItemFn) {
+ visit_trait_item_fn_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_macro_mut(&mut self, i: &mut crate::TraitItemMacro) {
+ visit_trait_item_macro_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_trait_item_type_mut(&mut self, i: &mut crate::TraitItemType) {
+ visit_trait_item_type_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_mut(&mut self, i: &mut crate::Type) {
+ visit_type_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_array_mut(&mut self, i: &mut crate::TypeArray) {
+ visit_type_array_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_bare_fn_mut(&mut self, i: &mut crate::TypeBareFn) {
+ visit_type_bare_fn_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_group_mut(&mut self, i: &mut crate::TypeGroup) {
+ visit_type_group_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_impl_trait_mut(&mut self, i: &mut crate::TypeImplTrait) {
+ visit_type_impl_trait_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_infer_mut(&mut self, i: &mut crate::TypeInfer) {
+ visit_type_infer_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_macro_mut(&mut self, i: &mut crate::TypeMacro) {
+ visit_type_macro_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_never_mut(&mut self, i: &mut crate::TypeNever) {
+ visit_type_never_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_param_mut(&mut self, i: &mut crate::TypeParam) {
+ visit_type_param_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_param_bound_mut(&mut self, i: &mut crate::TypeParamBound) {
+ visit_type_param_bound_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_paren_mut(&mut self, i: &mut crate::TypeParen) {
+ visit_type_paren_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_path_mut(&mut self, i: &mut crate::TypePath) {
+ visit_type_path_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_ptr_mut(&mut self, i: &mut crate::TypePtr) {
+ visit_type_ptr_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_reference_mut(&mut self, i: &mut crate::TypeReference) {
+ visit_type_reference_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_slice_mut(&mut self, i: &mut crate::TypeSlice) {
+ visit_type_slice_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_trait_object_mut(&mut self, i: &mut crate::TypeTraitObject) {
+ visit_type_trait_object_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_type_tuple_mut(&mut self, i: &mut crate::TypeTuple) {
+ visit_type_tuple_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_un_op_mut(&mut self, i: &mut crate::UnOp) {
+ visit_un_op_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_glob_mut(&mut self, i: &mut crate::UseGlob) {
+ visit_use_glob_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_group_mut(&mut self, i: &mut crate::UseGroup) {
+ visit_use_group_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_name_mut(&mut self, i: &mut crate::UseName) {
+ visit_use_name_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_path_mut(&mut self, i: &mut crate::UsePath) {
+ visit_use_path_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_rename_mut(&mut self, i: &mut crate::UseRename) {
+ visit_use_rename_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_use_tree_mut(&mut self, i: &mut crate::UseTree) {
+ visit_use_tree_mut(self, i);
+ }
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ fn visit_variadic_mut(&mut self, i: &mut crate::Variadic) {
+ visit_variadic_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_variant_mut(&mut self, i: &mut crate::Variant) {
+ visit_variant_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_vis_restricted_mut(&mut self, i: &mut crate::VisRestricted) {
+ visit_vis_restricted_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_visibility_mut(&mut self, i: &mut crate::Visibility) {
+ visit_visibility_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_where_clause_mut(&mut self, i: &mut crate::WhereClause) {
+ visit_where_clause_mut(self, i);
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+ fn visit_where_predicate_mut(&mut self, i: &mut crate::WherePredicate) {
+ visit_where_predicate_mut(self, i);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_abi_mut<V>(v: &mut V, node: &mut crate::Abi)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.extern_token);
+ if let Some(it) = &mut node.name {
+ v.visit_lit_str_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_angle_bracketed_generic_arguments_mut<V>(
+ v: &mut V,
+ node: &mut crate::AngleBracketedGenericArguments,
+)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.colon2_token);
+ skip!(node.lt_token);
+ for mut el in Punctuated::pairs_mut(&mut node.args) {
+ let it = el.value_mut();
+ v.visit_generic_argument_mut(it);
+ }
+ skip!(node.gt_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_arm_mut<V>(v: &mut V, node: &mut crate::Arm)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_pat_mut(&mut node.pat);
+ if let Some(it) = &mut node.guard {
+ skip!((it).0);
+ v.visit_expr_mut(&mut *(it).1);
+ }
+ skip!(node.fat_arrow_token);
+ v.visit_expr_mut(&mut *node.body);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_assoc_const_mut<V>(v: &mut V, node: &mut crate::AssocConst)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.generics {
+ v.visit_angle_bracketed_generic_arguments_mut(it);
+ }
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut node.value);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_assoc_type_mut<V>(v: &mut V, node: &mut crate::AssocType)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.generics {
+ v.visit_angle_bracketed_generic_arguments_mut(it);
+ }
+ skip!(node.eq_token);
+ v.visit_type_mut(&mut node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_attr_style_mut<V>(v: &mut V, node: &mut crate::AttrStyle)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::AttrStyle::Outer => {}
+ crate::AttrStyle::Inner(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_attribute_mut<V>(v: &mut V, node: &mut crate::Attribute)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.pound_token);
+ v.visit_attr_style_mut(&mut node.style);
+ skip!(node.bracket_token);
+ v.visit_meta_mut(&mut node.meta);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bare_fn_arg_mut<V>(v: &mut V, node: &mut crate::BareFnArg)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.name {
+ v.visit_ident_mut(&mut (it).0);
+ skip!((it).1);
+ }
+ v.visit_type_mut(&mut node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bare_variadic_mut<V>(v: &mut V, node: &mut crate::BareVariadic)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.name {
+ v.visit_ident_mut(&mut (it).0);
+ skip!((it).1);
+ }
+ skip!(node.dots);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bin_op_mut<V>(v: &mut V, node: &mut crate::BinOp)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::BinOp::Add(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Sub(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Mul(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Div(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Rem(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::And(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Or(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitXor(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitAnd(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitOr(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Shl(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Shr(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Eq(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Lt(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Le(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Ne(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Ge(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::Gt(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::AddAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::SubAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::MulAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::DivAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::RemAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitXorAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitAndAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::BitOrAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::ShlAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::BinOp::ShrAssign(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_block_mut<V>(v: &mut V, node: &mut crate::Block)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.brace_token);
+ for it in &mut node.stmts {
+ v.visit_stmt_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_bound_lifetimes_mut<V>(v: &mut V, node: &mut
crate::BoundLifetimes)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.for_token);
+ skip!(node.lt_token);
+ for mut el in Punctuated::pairs_mut(&mut node.lifetimes) {
+ let it = el.value_mut();
+ v.visit_generic_param_mut(it);
+ }
+ skip!(node.gt_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_const_param_mut<V>(v: &mut V, node: &mut crate::ConstParam)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.const_token);
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut node.ty);
+ skip!(node.eq_token);
+ if let Some(it) = &mut node.default {
+ v.visit_expr_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_constraint_mut<V>(v: &mut V, node: &mut crate::Constraint)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.generics {
+ v.visit_angle_bracketed_generic_arguments_mut(it);
+ }
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_mut<V>(v: &mut V, node: &mut crate::Data)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Data::Struct(_binding_0) => {
+ v.visit_data_struct_mut(_binding_0);
+ }
+ crate::Data::Enum(_binding_0) => {
+ v.visit_data_enum_mut(_binding_0);
+ }
+ crate::Data::Union(_binding_0) => {
+ v.visit_data_union_mut(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_enum_mut<V>(v: &mut V, node: &mut crate::DataEnum)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.enum_token);
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.variants) {
+ let it = el.value_mut();
+ v.visit_variant_mut(it);
+ }
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_struct_mut<V>(v: &mut V, node: &mut crate::DataStruct)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.struct_token);
+ v.visit_fields_mut(&mut node.fields);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_data_union_mut<V>(v: &mut V, node: &mut crate::DataUnion)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.union_token);
+ v.visit_fields_named_mut(&mut node.fields);
+}
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub fn visit_derive_input_mut<V>(v: &mut V, node: &mut crate::DeriveInput)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ v.visit_data_mut(&mut node.data);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_mut<V>(v: &mut V, node: &mut crate::Expr)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Expr::Array(_binding_0) => {
+ full!(v.visit_expr_array_mut(_binding_0));
+ }
+ crate::Expr::Assign(_binding_0) => {
+ full!(v.visit_expr_assign_mut(_binding_0));
+ }
+ crate::Expr::Async(_binding_0) => {
+ full!(v.visit_expr_async_mut(_binding_0));
+ }
+ crate::Expr::Await(_binding_0) => {
+ full!(v.visit_expr_await_mut(_binding_0));
+ }
+ crate::Expr::Binary(_binding_0) => {
+ v.visit_expr_binary_mut(_binding_0);
+ }
+ crate::Expr::Block(_binding_0) => {
+ full!(v.visit_expr_block_mut(_binding_0));
+ }
+ crate::Expr::Break(_binding_0) => {
+ full!(v.visit_expr_break_mut(_binding_0));
+ }
+ crate::Expr::Call(_binding_0) => {
+ v.visit_expr_call_mut(_binding_0);
+ }
+ crate::Expr::Cast(_binding_0) => {
+ v.visit_expr_cast_mut(_binding_0);
+ }
+ crate::Expr::Closure(_binding_0) => {
+ full!(v.visit_expr_closure_mut(_binding_0));
+ }
+ crate::Expr::Const(_binding_0) => {
+ full!(v.visit_expr_const_mut(_binding_0));
+ }
+ crate::Expr::Continue(_binding_0) => {
+ full!(v.visit_expr_continue_mut(_binding_0));
+ }
+ crate::Expr::Field(_binding_0) => {
+ v.visit_expr_field_mut(_binding_0);
+ }
+ crate::Expr::ForLoop(_binding_0) => {
+ full!(v.visit_expr_for_loop_mut(_binding_0));
+ }
+ crate::Expr::Group(_binding_0) => {
+ v.visit_expr_group_mut(_binding_0);
+ }
+ crate::Expr::If(_binding_0) => {
+ full!(v.visit_expr_if_mut(_binding_0));
+ }
+ crate::Expr::Index(_binding_0) => {
+ v.visit_expr_index_mut(_binding_0);
+ }
+ crate::Expr::Infer(_binding_0) => {
+ full!(v.visit_expr_infer_mut(_binding_0));
+ }
+ crate::Expr::Let(_binding_0) => {
+ full!(v.visit_expr_let_mut(_binding_0));
+ }
+ crate::Expr::Lit(_binding_0) => {
+ v.visit_expr_lit_mut(_binding_0);
+ }
+ crate::Expr::Loop(_binding_0) => {
+ full!(v.visit_expr_loop_mut(_binding_0));
+ }
+ crate::Expr::Macro(_binding_0) => {
+ v.visit_expr_macro_mut(_binding_0);
+ }
+ crate::Expr::Match(_binding_0) => {
+ full!(v.visit_expr_match_mut(_binding_0));
+ }
+ crate::Expr::MethodCall(_binding_0) => {
+ v.visit_expr_method_call_mut(_binding_0);
+ }
+ crate::Expr::Paren(_binding_0) => {
+ v.visit_expr_paren_mut(_binding_0);
+ }
+ crate::Expr::Path(_binding_0) => {
+ v.visit_expr_path_mut(_binding_0);
+ }
+ crate::Expr::Range(_binding_0) => {
+ full!(v.visit_expr_range_mut(_binding_0));
+ }
+ crate::Expr::Reference(_binding_0) => {
+ v.visit_expr_reference_mut(_binding_0);
+ }
+ crate::Expr::Repeat(_binding_0) => {
+ full!(v.visit_expr_repeat_mut(_binding_0));
+ }
+ crate::Expr::Return(_binding_0) => {
+ full!(v.visit_expr_return_mut(_binding_0));
+ }
+ crate::Expr::Struct(_binding_0) => {
+ v.visit_expr_struct_mut(_binding_0);
+ }
+ crate::Expr::Try(_binding_0) => {
+ full!(v.visit_expr_try_mut(_binding_0));
+ }
+ crate::Expr::TryBlock(_binding_0) => {
+ full!(v.visit_expr_try_block_mut(_binding_0));
+ }
+ crate::Expr::Tuple(_binding_0) => {
+ full!(v.visit_expr_tuple_mut(_binding_0));
+ }
+ crate::Expr::Unary(_binding_0) => {
+ v.visit_expr_unary_mut(_binding_0);
+ }
+ crate::Expr::Unsafe(_binding_0) => {
+ full!(v.visit_expr_unsafe_mut(_binding_0));
+ }
+ crate::Expr::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Expr::While(_binding_0) => {
+ full!(v.visit_expr_while_mut(_binding_0));
+ }
+ crate::Expr::Yield(_binding_0) => {
+ full!(v.visit_expr_yield_mut(_binding_0));
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_array_mut<V>(v: &mut V, node: &mut crate::ExprArray)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.bracket_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_expr_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_assign_mut<V>(v: &mut V, node: &mut crate::ExprAssign)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.left);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut *node.right);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_async_mut<V>(v: &mut V, node: &mut crate::ExprAsync)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.async_token);
+ skip!(node.capture);
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_await_mut<V>(v: &mut V, node: &mut crate::ExprAwait)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.base);
+ skip!(node.dot_token);
+ skip!(node.await_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut crate::ExprBinary)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.left);
+ v.visit_bin_op_mut(&mut node.op);
+ v.visit_expr_mut(&mut *node.right);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_block_mut<V>(v: &mut V, node: &mut crate::ExprBlock)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.label {
+ v.visit_label_mut(it);
+ }
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_break_mut<V>(v: &mut V, node: &mut crate::ExprBreak)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.break_token);
+ if let Some(it) = &mut node.label {
+ v.visit_lifetime_mut(it);
+ }
+ if let Some(it) = &mut node.expr {
+ v.visit_expr_mut(&mut **it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_call_mut<V>(v: &mut V, node: &mut crate::ExprCall)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.func);
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.args) {
+ let it = el.value_mut();
+ v.visit_expr_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_cast_mut<V>(v: &mut V, node: &mut crate::ExprCast)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.as_token);
+ v.visit_type_mut(&mut *node.ty);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_closure_mut<V>(v: &mut V, node: &mut crate::ExprClosure)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.lifetimes {
+ v.visit_bound_lifetimes_mut(it);
+ }
+ skip!(node.constness);
+ skip!(node.movability);
+ skip!(node.asyncness);
+ skip!(node.capture);
+ skip!(node.or1_token);
+ for mut el in Punctuated::pairs_mut(&mut node.inputs) {
+ let it = el.value_mut();
+ v.visit_pat_mut(it);
+ }
+ skip!(node.or2_token);
+ v.visit_return_type_mut(&mut node.output);
+ v.visit_expr_mut(&mut *node.body);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_const_mut<V>(v: &mut V, node: &mut crate::ExprConst)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.const_token);
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_continue_mut<V>(v: &mut V, node: &mut crate::ExprContinue)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.continue_token);
+ if let Some(it) = &mut node.label {
+ v.visit_lifetime_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_field_mut<V>(v: &mut V, node: &mut crate::ExprField)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.base);
+ skip!(node.dot_token);
+ v.visit_member_mut(&mut node.member);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_for_loop_mut<V>(v: &mut V, node: &mut crate::ExprForLoop)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.label {
+ v.visit_label_mut(it);
+ }
+ skip!(node.for_token);
+ v.visit_pat_mut(&mut *node.pat);
+ skip!(node.in_token);
+ v.visit_expr_mut(&mut *node.expr);
+ v.visit_block_mut(&mut node.body);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_group_mut<V>(v: &mut V, node: &mut crate::ExprGroup)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.group_token);
+ v.visit_expr_mut(&mut *node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_if_mut<V>(v: &mut V, node: &mut crate::ExprIf)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.if_token);
+ v.visit_expr_mut(&mut *node.cond);
+ v.visit_block_mut(&mut node.then_branch);
+ if let Some(it) = &mut node.else_branch {
+ skip!((it).0);
+ v.visit_expr_mut(&mut *(it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_index_mut<V>(v: &mut V, node: &mut crate::ExprIndex)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.bracket_token);
+ v.visit_expr_mut(&mut *node.index);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_infer_mut<V>(v: &mut V, node: &mut crate::ExprInfer)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.underscore_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_let_mut<V>(v: &mut V, node: &mut crate::ExprLet)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.let_token);
+ v.visit_pat_mut(&mut *node.pat);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut *node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_lit_mut<V>(v: &mut V, node: &mut crate::ExprLit)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_lit_mut(&mut node.lit);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_loop_mut<V>(v: &mut V, node: &mut crate::ExprLoop)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.label {
+ v.visit_label_mut(it);
+ }
+ skip!(node.loop_token);
+ v.visit_block_mut(&mut node.body);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_macro_mut<V>(v: &mut V, node: &mut crate::ExprMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_match_mut<V>(v: &mut V, node: &mut crate::ExprMatch)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.match_token);
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.brace_token);
+ for it in &mut node.arms {
+ v.visit_arm_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_method_call_mut<V>(v: &mut V, node: &mut
crate::ExprMethodCall)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.receiver);
+ skip!(node.dot_token);
+ v.visit_ident_mut(&mut node.method);
+ if let Some(it) = &mut node.turbofish {
+ v.visit_angle_bracketed_generic_arguments_mut(it);
+ }
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.args) {
+ let it = el.value_mut();
+ v.visit_expr_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_paren_mut<V>(v: &mut V, node: &mut crate::ExprParen)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.paren_token);
+ v.visit_expr_mut(&mut *node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_path_mut<V>(v: &mut V, node: &mut crate::ExprPath)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.qself {
+ v.visit_qself_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_range_mut<V>(v: &mut V, node: &mut crate::ExprRange)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.start {
+ v.visit_expr_mut(&mut **it);
+ }
+ v.visit_range_limits_mut(&mut node.limits);
+ if let Some(it) = &mut node.end {
+ v.visit_expr_mut(&mut **it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_reference_mut<V>(v: &mut V, node: &mut crate::ExprReference)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.and_token);
+ skip!(node.mutability);
+ v.visit_expr_mut(&mut *node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_repeat_mut<V>(v: &mut V, node: &mut crate::ExprRepeat)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.bracket_token);
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.semi_token);
+ v.visit_expr_mut(&mut *node.len);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_return_mut<V>(v: &mut V, node: &mut crate::ExprReturn)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.return_token);
+ if let Some(it) = &mut node.expr {
+ v.visit_expr_mut(&mut **it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_struct_mut<V>(v: &mut V, node: &mut crate::ExprStruct)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.qself {
+ v.visit_qself_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.fields) {
+ let it = el.value_mut();
+ v.visit_field_value_mut(it);
+ }
+ skip!(node.dot2_token);
+ if let Some(it) = &mut node.rest {
+ v.visit_expr_mut(&mut **it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_try_mut<V>(v: &mut V, node: &mut crate::ExprTry)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.question_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_try_block_mut<V>(v: &mut V, node: &mut crate::ExprTryBlock)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.try_token);
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_tuple_mut<V>(v: &mut V, node: &mut crate::ExprTuple)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_expr_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_expr_unary_mut<V>(v: &mut V, node: &mut crate::ExprUnary)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_un_op_mut(&mut node.op);
+ v.visit_expr_mut(&mut *node.expr);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_unsafe_mut<V>(v: &mut V, node: &mut crate::ExprUnsafe)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.unsafe_token);
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_while_mut<V>(v: &mut V, node: &mut crate::ExprWhile)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.label {
+ v.visit_label_mut(it);
+ }
+ skip!(node.while_token);
+ v.visit_expr_mut(&mut *node.cond);
+ v.visit_block_mut(&mut node.body);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_expr_yield_mut<V>(v: &mut V, node: &mut crate::ExprYield)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.yield_token);
+ if let Some(it) = &mut node.expr {
+ v.visit_expr_mut(&mut **it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field_mut<V>(v: &mut V, node: &mut crate::Field)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ v.visit_field_mutability_mut(&mut node.mutability);
+ if let Some(it) = &mut node.ident {
+ v.visit_ident_mut(it);
+ }
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field_mutability_mut<V>(v: &mut V, node: &mut
crate::FieldMutability)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::FieldMutability::None => {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_field_pat_mut<V>(v: &mut V, node: &mut crate::FieldPat)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_member_mut(&mut node.member);
+ skip!(node.colon_token);
+ v.visit_pat_mut(&mut *node.pat);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_field_value_mut<V>(v: &mut V, node: &mut crate::FieldValue)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_member_mut(&mut node.member);
+ skip!(node.colon_token);
+ v.visit_expr_mut(&mut node.expr);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields_mut<V>(v: &mut V, node: &mut crate::Fields)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Fields::Named(_binding_0) => {
+ v.visit_fields_named_mut(_binding_0);
+ }
+ crate::Fields::Unnamed(_binding_0) => {
+ v.visit_fields_unnamed_mut(_binding_0);
+ }
+ crate::Fields::Unit => {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields_named_mut<V>(v: &mut V, node: &mut crate::FieldsNamed)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.named) {
+ let it = el.value_mut();
+ v.visit_field_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_fields_unnamed_mut<V>(v: &mut V, node: &mut crate::FieldsUnnamed)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.unnamed) {
+ let it = el.value_mut();
+ v.visit_field_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_file_mut<V>(v: &mut V, node: &mut crate::File)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.shebang);
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ for it in &mut node.items {
+ v.visit_item_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_fn_arg_mut<V>(v: &mut V, node: &mut crate::FnArg)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::FnArg::Receiver(_binding_0) => {
+ v.visit_receiver_mut(_binding_0);
+ }
+ crate::FnArg::Typed(_binding_0) => {
+ v.visit_pat_type_mut(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_mut<V>(v: &mut V, node: &mut crate::ForeignItem)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::ForeignItem::Fn(_binding_0) => {
+ v.visit_foreign_item_fn_mut(_binding_0);
+ }
+ crate::ForeignItem::Static(_binding_0) => {
+ v.visit_foreign_item_static_mut(_binding_0);
+ }
+ crate::ForeignItem::Type(_binding_0) => {
+ v.visit_foreign_item_type_mut(_binding_0);
+ }
+ crate::ForeignItem::Macro(_binding_0) => {
+ v.visit_foreign_item_macro_mut(_binding_0);
+ }
+ crate::ForeignItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_fn_mut<V>(v: &mut V, node: &mut crate::ForeignItemFn)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ v.visit_signature_mut(&mut node.sig);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_macro_mut<V>(v: &mut V, node: &mut
crate::ForeignItemMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_static_mut<V>(v: &mut V, node: &mut
crate::ForeignItemStatic)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.static_token);
+ v.visit_static_mutability_mut(&mut node.mutability);
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut *node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_foreign_item_type_mut<V>(v: &mut V, node: &mut
crate::ForeignItemType)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.type_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generic_argument_mut<V>(v: &mut V, node: &mut
crate::GenericArgument)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::GenericArgument::Lifetime(_binding_0) => {
+ v.visit_lifetime_mut(_binding_0);
+ }
+ crate::GenericArgument::Type(_binding_0) => {
+ v.visit_type_mut(_binding_0);
+ }
+ crate::GenericArgument::Const(_binding_0) => {
+ v.visit_expr_mut(_binding_0);
+ }
+ crate::GenericArgument::AssocType(_binding_0) => {
+ v.visit_assoc_type_mut(_binding_0);
+ }
+ crate::GenericArgument::AssocConst(_binding_0) => {
+ v.visit_assoc_const_mut(_binding_0);
+ }
+ crate::GenericArgument::Constraint(_binding_0) => {
+ v.visit_constraint_mut(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generic_param_mut<V>(v: &mut V, node: &mut crate::GenericParam)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::GenericParam::Lifetime(_binding_0) => {
+ v.visit_lifetime_param_mut(_binding_0);
+ }
+ crate::GenericParam::Type(_binding_0) => {
+ v.visit_type_param_mut(_binding_0);
+ }
+ crate::GenericParam::Const(_binding_0) => {
+ v.visit_const_param_mut(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_generics_mut<V>(v: &mut V, node: &mut crate::Generics)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.lt_token);
+ for mut el in Punctuated::pairs_mut(&mut node.params) {
+ let it = el.value_mut();
+ v.visit_generic_param_mut(it);
+ }
+ skip!(node.gt_token);
+ if let Some(it) = &mut node.where_clause {
+ v.visit_where_clause_mut(it);
+ }
+}
+pub fn visit_ident_mut<V>(v: &mut V, node: &mut proc_macro2::Ident)
+where
+ V: VisitMut + ?Sized,
+{
+ let mut span = node.span();
+ v.visit_span_mut(&mut span);
+ node.set_span(span);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_mut<V>(v: &mut V, node: &mut crate::ImplItem)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::ImplItem::Const(_binding_0) => {
+ v.visit_impl_item_const_mut(_binding_0);
+ }
+ crate::ImplItem::Fn(_binding_0) => {
+ v.visit_impl_item_fn_mut(_binding_0);
+ }
+ crate::ImplItem::Type(_binding_0) => {
+ v.visit_impl_item_type_mut(_binding_0);
+ }
+ crate::ImplItem::Macro(_binding_0) => {
+ v.visit_impl_item_macro_mut(_binding_0);
+ }
+ crate::ImplItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_const_mut<V>(v: &mut V, node: &mut crate::ImplItemConst)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.defaultness);
+ skip!(node.const_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut node.ty);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_fn_mut<V>(v: &mut V, node: &mut crate::ImplItemFn)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.defaultness);
+ v.visit_signature_mut(&mut node.sig);
+ v.visit_block_mut(&mut node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_macro_mut<V>(v: &mut V, node: &mut crate::ImplItemMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_item_type_mut<V>(v: &mut V, node: &mut crate::ImplItemType)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.defaultness);
+ skip!(node.type_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.eq_token);
+ v.visit_type_mut(&mut node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_impl_restriction_mut<V>(v: &mut V, node: &mut
crate::ImplRestriction)
+where
+ V: VisitMut + ?Sized,
+{
+ match *node {}
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_index_mut<V>(v: &mut V, node: &mut crate::Index)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.index);
+ v.visit_span_mut(&mut node.span);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_mut<V>(v: &mut V, node: &mut crate::Item)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Item::Const(_binding_0) => {
+ v.visit_item_const_mut(_binding_0);
+ }
+ crate::Item::Enum(_binding_0) => {
+ v.visit_item_enum_mut(_binding_0);
+ }
+ crate::Item::ExternCrate(_binding_0) => {
+ v.visit_item_extern_crate_mut(_binding_0);
+ }
+ crate::Item::Fn(_binding_0) => {
+ v.visit_item_fn_mut(_binding_0);
+ }
+ crate::Item::ForeignMod(_binding_0) => {
+ v.visit_item_foreign_mod_mut(_binding_0);
+ }
+ crate::Item::Impl(_binding_0) => {
+ v.visit_item_impl_mut(_binding_0);
+ }
+ crate::Item::Macro(_binding_0) => {
+ v.visit_item_macro_mut(_binding_0);
+ }
+ crate::Item::Mod(_binding_0) => {
+ v.visit_item_mod_mut(_binding_0);
+ }
+ crate::Item::Static(_binding_0) => {
+ v.visit_item_static_mut(_binding_0);
+ }
+ crate::Item::Struct(_binding_0) => {
+ v.visit_item_struct_mut(_binding_0);
+ }
+ crate::Item::Trait(_binding_0) => {
+ v.visit_item_trait_mut(_binding_0);
+ }
+ crate::Item::TraitAlias(_binding_0) => {
+ v.visit_item_trait_alias_mut(_binding_0);
+ }
+ crate::Item::Type(_binding_0) => {
+ v.visit_item_type_mut(_binding_0);
+ }
+ crate::Item::Union(_binding_0) => {
+ v.visit_item_union_mut(_binding_0);
+ }
+ crate::Item::Use(_binding_0) => {
+ v.visit_item_use_mut(_binding_0);
+ }
+ crate::Item::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_const_mut<V>(v: &mut V, node: &mut crate::ItemConst)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.const_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut *node.ty);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_enum_mut<V>(v: &mut V, node: &mut crate::ItemEnum)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.enum_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.variants) {
+ let it = el.value_mut();
+ v.visit_variant_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_extern_crate_mut<V>(v: &mut V, node: &mut
crate::ItemExternCrate)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.extern_token);
+ skip!(node.crate_token);
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.rename {
+ skip!((it).0);
+ v.visit_ident_mut(&mut (it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_fn_mut<V>(v: &mut V, node: &mut crate::ItemFn)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ v.visit_signature_mut(&mut node.sig);
+ v.visit_block_mut(&mut *node.block);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_foreign_mod_mut<V>(v: &mut V, node: &mut
crate::ItemForeignMod)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.unsafety);
+ v.visit_abi_mut(&mut node.abi);
+ skip!(node.brace_token);
+ for it in &mut node.items {
+ v.visit_foreign_item_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_impl_mut<V>(v: &mut V, node: &mut crate::ItemImpl)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.defaultness);
+ skip!(node.unsafety);
+ skip!(node.impl_token);
+ v.visit_generics_mut(&mut node.generics);
+ if let Some(it) = &mut node.trait_ {
+ skip!((it).0);
+ v.visit_path_mut(&mut (it).1);
+ skip!((it).2);
+ }
+ v.visit_type_mut(&mut *node.self_ty);
+ skip!(node.brace_token);
+ for it in &mut node.items {
+ v.visit_impl_item_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_macro_mut<V>(v: &mut V, node: &mut crate::ItemMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.ident {
+ v.visit_ident_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_mod_mut<V>(v: &mut V, node: &mut crate::ItemMod)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.unsafety);
+ skip!(node.mod_token);
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.content {
+ skip!((it).0);
+ for it in &mut (it).1 {
+ v.visit_item_mut(it);
+ }
+ }
+ skip!(node.semi);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_static_mut<V>(v: &mut V, node: &mut crate::ItemStatic)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.static_token);
+ v.visit_static_mutability_mut(&mut node.mutability);
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut *node.ty);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut *node.expr);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_struct_mut<V>(v: &mut V, node: &mut crate::ItemStruct)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.struct_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ v.visit_fields_mut(&mut node.fields);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_trait_mut<V>(v: &mut V, node: &mut crate::ItemTrait)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.unsafety);
+ skip!(node.auto_token);
+ if let Some(it) = &mut node.restriction {
+ v.visit_impl_restriction_mut(it);
+ }
+ skip!(node.trait_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.supertraits) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+ skip!(node.brace_token);
+ for it in &mut node.items {
+ v.visit_trait_item_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_trait_alias_mut<V>(v: &mut V, node: &mut
crate::ItemTraitAlias)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.trait_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.eq_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_type_mut<V>(v: &mut V, node: &mut crate::ItemType)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.type_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.eq_token);
+ v.visit_type_mut(&mut *node.ty);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_union_mut<V>(v: &mut V, node: &mut crate::ItemUnion)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.union_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ v.visit_fields_named_mut(&mut node.fields);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_item_use_mut<V>(v: &mut V, node: &mut crate::ItemUse)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_visibility_mut(&mut node.vis);
+ skip!(node.use_token);
+ skip!(node.leading_colon);
+ v.visit_use_tree_mut(&mut node.tree);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_label_mut<V>(v: &mut V, node: &mut crate::Label)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_lifetime_mut(&mut node.name);
+ skip!(node.colon_token);
+}
+pub fn visit_lifetime_mut<V>(v: &mut V, node: &mut crate::Lifetime)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_span_mut(&mut node.apostrophe);
+ v.visit_ident_mut(&mut node.ident);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_lifetime_param_mut<V>(v: &mut V, node: &mut crate::LifetimeParam)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_lifetime_mut(&mut node.lifetime);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_lifetime_mut(it);
+ }
+}
+pub fn visit_lit_mut<V>(v: &mut V, node: &mut crate::Lit)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Lit::Str(_binding_0) => {
+ v.visit_lit_str_mut(_binding_0);
+ }
+ crate::Lit::ByteStr(_binding_0) => {
+ v.visit_lit_byte_str_mut(_binding_0);
+ }
+ crate::Lit::CStr(_binding_0) => {
+ v.visit_lit_cstr_mut(_binding_0);
+ }
+ crate::Lit::Byte(_binding_0) => {
+ v.visit_lit_byte_mut(_binding_0);
+ }
+ crate::Lit::Char(_binding_0) => {
+ v.visit_lit_char_mut(_binding_0);
+ }
+ crate::Lit::Int(_binding_0) => {
+ v.visit_lit_int_mut(_binding_0);
+ }
+ crate::Lit::Float(_binding_0) => {
+ v.visit_lit_float_mut(_binding_0);
+ }
+ crate::Lit::Bool(_binding_0) => {
+ v.visit_lit_bool_mut(_binding_0);
+ }
+ crate::Lit::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut crate::LitBool)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+}
+pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut crate::LitByte)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut crate::LitByteStr)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_cstr_mut<V>(v: &mut V, node: &mut crate::LitCStr)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut crate::LitChar)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut crate::LitFloat)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut crate::LitInt)
+where
+ V: VisitMut + ?Sized,
+{}
+pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut crate::LitStr)
+where
+ V: VisitMut + ?Sized,
+{}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_local_mut<V>(v: &mut V, node: &mut crate::Local)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.let_token);
+ v.visit_pat_mut(&mut node.pat);
+ if let Some(it) = &mut node.init {
+ v.visit_local_init_mut(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_local_init_mut<V>(v: &mut V, node: &mut crate::LocalInit)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut *node.expr);
+ if let Some(it) = &mut node.diverge {
+ skip!((it).0);
+ v.visit_expr_mut(&mut *(it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_macro_mut<V>(v: &mut V, node: &mut crate::Macro)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_path_mut(&mut node.path);
+ skip!(node.bang_token);
+ v.visit_macro_delimiter_mut(&mut node.delimiter);
+ skip!(node.tokens);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_macro_delimiter_mut<V>(v: &mut V, node: &mut
crate::MacroDelimiter)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::MacroDelimiter::Paren(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::MacroDelimiter::Brace(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::MacroDelimiter::Bracket(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_member_mut<V>(v: &mut V, node: &mut crate::Member)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Member::Named(_binding_0) => {
+ v.visit_ident_mut(_binding_0);
+ }
+ crate::Member::Unnamed(_binding_0) => {
+ v.visit_index_mut(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta_mut<V>(v: &mut V, node: &mut crate::Meta)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Meta::Path(_binding_0) => {
+ v.visit_path_mut(_binding_0);
+ }
+ crate::Meta::List(_binding_0) => {
+ v.visit_meta_list_mut(_binding_0);
+ }
+ crate::Meta::NameValue(_binding_0) => {
+ v.visit_meta_name_value_mut(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta_list_mut<V>(v: &mut V, node: &mut crate::MetaList)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_path_mut(&mut node.path);
+ v.visit_macro_delimiter_mut(&mut node.delimiter);
+ skip!(node.tokens);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_meta_name_value_mut<V>(v: &mut V, node: &mut crate::MetaNameValue)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_path_mut(&mut node.path);
+ skip!(node.eq_token);
+ v.visit_expr_mut(&mut node.value);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_parenthesized_generic_arguments_mut<V>(
+ v: &mut V,
+ node: &mut crate::ParenthesizedGenericArguments,
+)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.inputs) {
+ let it = el.value_mut();
+ v.visit_type_mut(it);
+ }
+ v.visit_return_type_mut(&mut node.output);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_mut<V>(v: &mut V, node: &mut crate::Pat)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Pat::Const(_binding_0) => {
+ v.visit_expr_const_mut(_binding_0);
+ }
+ crate::Pat::Ident(_binding_0) => {
+ v.visit_pat_ident_mut(_binding_0);
+ }
+ crate::Pat::Lit(_binding_0) => {
+ v.visit_expr_lit_mut(_binding_0);
+ }
+ crate::Pat::Macro(_binding_0) => {
+ v.visit_expr_macro_mut(_binding_0);
+ }
+ crate::Pat::Or(_binding_0) => {
+ v.visit_pat_or_mut(_binding_0);
+ }
+ crate::Pat::Paren(_binding_0) => {
+ v.visit_pat_paren_mut(_binding_0);
+ }
+ crate::Pat::Path(_binding_0) => {
+ v.visit_expr_path_mut(_binding_0);
+ }
+ crate::Pat::Range(_binding_0) => {
+ v.visit_expr_range_mut(_binding_0);
+ }
+ crate::Pat::Reference(_binding_0) => {
+ v.visit_pat_reference_mut(_binding_0);
+ }
+ crate::Pat::Rest(_binding_0) => {
+ v.visit_pat_rest_mut(_binding_0);
+ }
+ crate::Pat::Slice(_binding_0) => {
+ v.visit_pat_slice_mut(_binding_0);
+ }
+ crate::Pat::Struct(_binding_0) => {
+ v.visit_pat_struct_mut(_binding_0);
+ }
+ crate::Pat::Tuple(_binding_0) => {
+ v.visit_pat_tuple_mut(_binding_0);
+ }
+ crate::Pat::TupleStruct(_binding_0) => {
+ v.visit_pat_tuple_struct_mut(_binding_0);
+ }
+ crate::Pat::Type(_binding_0) => {
+ v.visit_pat_type_mut(_binding_0);
+ }
+ crate::Pat::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Pat::Wild(_binding_0) => {
+ v.visit_pat_wild_mut(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_ident_mut<V>(v: &mut V, node: &mut crate::PatIdent)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.by_ref);
+ skip!(node.mutability);
+ v.visit_ident_mut(&mut node.ident);
+ if let Some(it) = &mut node.subpat {
+ skip!((it).0);
+ v.visit_pat_mut(&mut *(it).1);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_or_mut<V>(v: &mut V, node: &mut crate::PatOr)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.leading_vert);
+ for mut el in Punctuated::pairs_mut(&mut node.cases) {
+ let it = el.value_mut();
+ v.visit_pat_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_paren_mut<V>(v: &mut V, node: &mut crate::PatParen)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.paren_token);
+ v.visit_pat_mut(&mut *node.pat);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_reference_mut<V>(v: &mut V, node: &mut crate::PatReference)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.and_token);
+ skip!(node.mutability);
+ v.visit_pat_mut(&mut *node.pat);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_rest_mut<V>(v: &mut V, node: &mut crate::PatRest)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.dot2_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_slice_mut<V>(v: &mut V, node: &mut crate::PatSlice)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.bracket_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_pat_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_struct_mut<V>(v: &mut V, node: &mut crate::PatStruct)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.qself {
+ v.visit_qself_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.fields) {
+ let it = el.value_mut();
+ v.visit_field_pat_mut(it);
+ }
+ if let Some(it) = &mut node.rest {
+ v.visit_pat_rest_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_tuple_mut<V>(v: &mut V, node: &mut crate::PatTuple)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_pat_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_tuple_struct_mut<V>(v: &mut V, node: &mut
crate::PatTupleStruct)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.qself {
+ v.visit_qself_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_pat_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_type_mut<V>(v: &mut V, node: &mut crate::PatType)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_pat_mut(&mut *node.pat);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut *node.ty);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_pat_wild_mut<V>(v: &mut V, node: &mut crate::PatWild)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.underscore_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path_mut<V>(v: &mut V, node: &mut crate::Path)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.leading_colon);
+ for mut el in Punctuated::pairs_mut(&mut node.segments) {
+ let it = el.value_mut();
+ v.visit_path_segment_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path_arguments_mut<V>(v: &mut V, node: &mut crate::PathArguments)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::PathArguments::None => {}
+ crate::PathArguments::AngleBracketed(_binding_0) => {
+ v.visit_angle_bracketed_generic_arguments_mut(_binding_0);
+ }
+ crate::PathArguments::Parenthesized(_binding_0) => {
+ v.visit_parenthesized_generic_arguments_mut(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_path_segment_mut<V>(v: &mut V, node: &mut crate::PathSegment)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_path_arguments_mut(&mut node.arguments);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_predicate_lifetime_mut<V>(v: &mut V, node: &mut
crate::PredicateLifetime)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_lifetime_mut(&mut node.lifetime);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_lifetime_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_predicate_type_mut<V>(v: &mut V, node: &mut crate::PredicateType)
+where
+ V: VisitMut + ?Sized,
+{
+ if let Some(it) = &mut node.lifetimes {
+ v.visit_bound_lifetimes_mut(it);
+ }
+ v.visit_type_mut(&mut node.bounded_ty);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_qself_mut<V>(v: &mut V, node: &mut crate::QSelf)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.lt_token);
+ v.visit_type_mut(&mut *node.ty);
+ skip!(node.position);
+ skip!(node.as_token);
+ skip!(node.gt_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_range_limits_mut<V>(v: &mut V, node: &mut crate::RangeLimits)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::RangeLimits::HalfOpen(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::RangeLimits::Closed(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_receiver_mut<V>(v: &mut V, node: &mut crate::Receiver)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.reference {
+ skip!((it).0);
+ if let Some(it) = &mut (it).1 {
+ v.visit_lifetime_mut(it);
+ }
+ }
+ skip!(node.mutability);
+ skip!(node.self_token);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut *node.ty);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_return_type_mut<V>(v: &mut V, node: &mut crate::ReturnType)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::ReturnType::Default => {}
+ crate::ReturnType::Type(_binding_0, _binding_1) => {
+ skip!(_binding_0);
+ v.visit_type_mut(&mut **_binding_1);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_signature_mut<V>(v: &mut V, node: &mut crate::Signature)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.constness);
+ skip!(node.asyncness);
+ skip!(node.unsafety);
+ if let Some(it) = &mut node.abi {
+ v.visit_abi_mut(it);
+ }
+ skip!(node.fn_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.inputs) {
+ let it = el.value_mut();
+ v.visit_fn_arg_mut(it);
+ }
+ if let Some(it) = &mut node.variadic {
+ v.visit_variadic_mut(it);
+ }
+ v.visit_return_type_mut(&mut node.output);
+}
+pub fn visit_span_mut<V>(v: &mut V, node: &mut proc_macro2::Span)
+where
+ V: VisitMut + ?Sized,
+{}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_static_mutability_mut<V>(v: &mut V, node: &mut
crate::StaticMutability)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::StaticMutability::Mut(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::StaticMutability::None => {}
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_stmt_mut<V>(v: &mut V, node: &mut crate::Stmt)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Stmt::Local(_binding_0) => {
+ v.visit_local_mut(_binding_0);
+ }
+ crate::Stmt::Item(_binding_0) => {
+ v.visit_item_mut(_binding_0);
+ }
+ crate::Stmt::Expr(_binding_0, _binding_1) => {
+ v.visit_expr_mut(_binding_0);
+ skip!(_binding_1);
+ }
+ crate::Stmt::Macro(_binding_0) => {
+ v.visit_stmt_macro_mut(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_stmt_macro_mut<V>(v: &mut V, node: &mut crate::StmtMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_trait_bound_mut<V>(v: &mut V, node: &mut crate::TraitBound)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.paren_token);
+ v.visit_trait_bound_modifier_mut(&mut node.modifier);
+ if let Some(it) = &mut node.lifetimes {
+ v.visit_bound_lifetimes_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_trait_bound_modifier_mut<V>(v: &mut V, node: &mut
crate::TraitBoundModifier)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::TraitBoundModifier::None => {}
+ crate::TraitBoundModifier::Maybe(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_mut<V>(v: &mut V, node: &mut crate::TraitItem)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::TraitItem::Const(_binding_0) => {
+ v.visit_trait_item_const_mut(_binding_0);
+ }
+ crate::TraitItem::Fn(_binding_0) => {
+ v.visit_trait_item_fn_mut(_binding_0);
+ }
+ crate::TraitItem::Type(_binding_0) => {
+ v.visit_trait_item_type_mut(_binding_0);
+ }
+ crate::TraitItem::Macro(_binding_0) => {
+ v.visit_trait_item_macro_mut(_binding_0);
+ }
+ crate::TraitItem::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_const_mut<V>(v: &mut V, node: &mut
crate::TraitItemConst)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.const_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.colon_token);
+ v.visit_type_mut(&mut node.ty);
+ if let Some(it) = &mut node.default {
+ skip!((it).0);
+ v.visit_expr_mut(&mut (it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_fn_mut<V>(v: &mut V, node: &mut crate::TraitItemFn)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_signature_mut(&mut node.sig);
+ if let Some(it) = &mut node.default {
+ v.visit_block_mut(it);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_macro_mut<V>(v: &mut V, node: &mut
crate::TraitItemMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_macro_mut(&mut node.mac);
+ skip!(node.semi_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_trait_item_type_mut<V>(v: &mut V, node: &mut crate::TraitItemType)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ skip!(node.type_token);
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_generics_mut(&mut node.generics);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+ if let Some(it) = &mut node.default {
+ skip!((it).0);
+ v.visit_type_mut(&mut (it).1);
+ }
+ skip!(node.semi_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_mut<V>(v: &mut V, node: &mut crate::Type)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Type::Array(_binding_0) => {
+ v.visit_type_array_mut(_binding_0);
+ }
+ crate::Type::BareFn(_binding_0) => {
+ v.visit_type_bare_fn_mut(_binding_0);
+ }
+ crate::Type::Group(_binding_0) => {
+ v.visit_type_group_mut(_binding_0);
+ }
+ crate::Type::ImplTrait(_binding_0) => {
+ v.visit_type_impl_trait_mut(_binding_0);
+ }
+ crate::Type::Infer(_binding_0) => {
+ v.visit_type_infer_mut(_binding_0);
+ }
+ crate::Type::Macro(_binding_0) => {
+ v.visit_type_macro_mut(_binding_0);
+ }
+ crate::Type::Never(_binding_0) => {
+ v.visit_type_never_mut(_binding_0);
+ }
+ crate::Type::Paren(_binding_0) => {
+ v.visit_type_paren_mut(_binding_0);
+ }
+ crate::Type::Path(_binding_0) => {
+ v.visit_type_path_mut(_binding_0);
+ }
+ crate::Type::Ptr(_binding_0) => {
+ v.visit_type_ptr_mut(_binding_0);
+ }
+ crate::Type::Reference(_binding_0) => {
+ v.visit_type_reference_mut(_binding_0);
+ }
+ crate::Type::Slice(_binding_0) => {
+ v.visit_type_slice_mut(_binding_0);
+ }
+ crate::Type::TraitObject(_binding_0) => {
+ v.visit_type_trait_object_mut(_binding_0);
+ }
+ crate::Type::Tuple(_binding_0) => {
+ v.visit_type_tuple_mut(_binding_0);
+ }
+ crate::Type::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_array_mut<V>(v: &mut V, node: &mut crate::TypeArray)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.bracket_token);
+ v.visit_type_mut(&mut *node.elem);
+ skip!(node.semi_token);
+ v.visit_expr_mut(&mut node.len);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_bare_fn_mut<V>(v: &mut V, node: &mut crate::TypeBareFn)
+where
+ V: VisitMut + ?Sized,
+{
+ if let Some(it) = &mut node.lifetimes {
+ v.visit_bound_lifetimes_mut(it);
+ }
+ skip!(node.unsafety);
+ if let Some(it) = &mut node.abi {
+ v.visit_abi_mut(it);
+ }
+ skip!(node.fn_token);
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.inputs) {
+ let it = el.value_mut();
+ v.visit_bare_fn_arg_mut(it);
+ }
+ if let Some(it) = &mut node.variadic {
+ v.visit_bare_variadic_mut(it);
+ }
+ v.visit_return_type_mut(&mut node.output);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_group_mut<V>(v: &mut V, node: &mut crate::TypeGroup)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.group_token);
+ v.visit_type_mut(&mut *node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_impl_trait_mut<V>(v: &mut V, node: &mut crate::TypeImplTrait)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.impl_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_infer_mut<V>(v: &mut V, node: &mut crate::TypeInfer)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.underscore_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_macro_mut<V>(v: &mut V, node: &mut crate::TypeMacro)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_macro_mut(&mut node.mac);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_never_mut<V>(v: &mut V, node: &mut crate::TypeNever)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.bang_token);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_param_mut<V>(v: &mut V, node: &mut crate::TypeParam)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.colon_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+ skip!(node.eq_token);
+ if let Some(it) = &mut node.default {
+ v.visit_type_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_param_bound_mut<V>(v: &mut V, node: &mut
crate::TypeParamBound)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::TypeParamBound::Trait(_binding_0) => {
+ v.visit_trait_bound_mut(_binding_0);
+ }
+ crate::TypeParamBound::Lifetime(_binding_0) => {
+ v.visit_lifetime_mut(_binding_0);
+ }
+ crate::TypeParamBound::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_paren_mut<V>(v: &mut V, node: &mut crate::TypeParen)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.paren_token);
+ v.visit_type_mut(&mut *node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_path_mut<V>(v: &mut V, node: &mut crate::TypePath)
+where
+ V: VisitMut + ?Sized,
+{
+ if let Some(it) = &mut node.qself {
+ v.visit_qself_mut(it);
+ }
+ v.visit_path_mut(&mut node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_ptr_mut<V>(v: &mut V, node: &mut crate::TypePtr)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.star_token);
+ skip!(node.const_token);
+ skip!(node.mutability);
+ v.visit_type_mut(&mut *node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_reference_mut<V>(v: &mut V, node: &mut crate::TypeReference)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.and_token);
+ if let Some(it) = &mut node.lifetime {
+ v.visit_lifetime_mut(it);
+ }
+ skip!(node.mutability);
+ v.visit_type_mut(&mut *node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_slice_mut<V>(v: &mut V, node: &mut crate::TypeSlice)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.bracket_token);
+ v.visit_type_mut(&mut *node.elem);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_trait_object_mut<V>(v: &mut V, node: &mut
crate::TypeTraitObject)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.dyn_token);
+ for mut el in Punctuated::pairs_mut(&mut node.bounds) {
+ let it = el.value_mut();
+ v.visit_type_param_bound_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_type_tuple_mut<V>(v: &mut V, node: &mut crate::TypeTuple)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.paren_token);
+ for mut el in Punctuated::pairs_mut(&mut node.elems) {
+ let it = el.value_mut();
+ v.visit_type_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_un_op_mut<V>(v: &mut V, node: &mut crate::UnOp)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::UnOp::Deref(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::UnOp::Not(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::UnOp::Neg(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_glob_mut<V>(v: &mut V, node: &mut crate::UseGlob)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.star_token);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_group_mut<V>(v: &mut V, node: &mut crate::UseGroup)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.brace_token);
+ for mut el in Punctuated::pairs_mut(&mut node.items) {
+ let it = el.value_mut();
+ v.visit_use_tree_mut(it);
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_name_mut<V>(v: &mut V, node: &mut crate::UseName)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_path_mut<V>(v: &mut V, node: &mut crate::UsePath)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.colon2_token);
+ v.visit_use_tree_mut(&mut *node.tree);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_rename_mut<V>(v: &mut V, node: &mut crate::UseRename)
+where
+ V: VisitMut + ?Sized,
+{
+ v.visit_ident_mut(&mut node.ident);
+ skip!(node.as_token);
+ v.visit_ident_mut(&mut node.rename);
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_use_tree_mut<V>(v: &mut V, node: &mut crate::UseTree)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::UseTree::Path(_binding_0) => {
+ v.visit_use_path_mut(_binding_0);
+ }
+ crate::UseTree::Name(_binding_0) => {
+ v.visit_use_name_mut(_binding_0);
+ }
+ crate::UseTree::Rename(_binding_0) => {
+ v.visit_use_rename_mut(_binding_0);
+ }
+ crate::UseTree::Glob(_binding_0) => {
+ v.visit_use_glob_mut(_binding_0);
+ }
+ crate::UseTree::Group(_binding_0) => {
+ v.visit_use_group_mut(_binding_0);
+ }
+ }
+}
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub fn visit_variadic_mut<V>(v: &mut V, node: &mut crate::Variadic)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ if let Some(it) = &mut node.pat {
+ v.visit_pat_mut(&mut *(it).0);
+ skip!((it).1);
+ }
+ skip!(node.dots);
+ skip!(node.comma);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_variant_mut<V>(v: &mut V, node: &mut crate::Variant)
+where
+ V: VisitMut + ?Sized,
+{
+ for it in &mut node.attrs {
+ v.visit_attribute_mut(it);
+ }
+ v.visit_ident_mut(&mut node.ident);
+ v.visit_fields_mut(&mut node.fields);
+ if let Some(it) = &mut node.discriminant {
+ skip!((it).0);
+ v.visit_expr_mut(&mut (it).1);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_vis_restricted_mut<V>(v: &mut V, node: &mut crate::VisRestricted)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.pub_token);
+ skip!(node.paren_token);
+ skip!(node.in_token);
+ v.visit_path_mut(&mut *node.path);
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_visibility_mut<V>(v: &mut V, node: &mut crate::Visibility)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::Visibility::Public(_binding_0) => {
+ skip!(_binding_0);
+ }
+ crate::Visibility::Restricted(_binding_0) => {
+ v.visit_vis_restricted_mut(_binding_0);
+ }
+ crate::Visibility::Inherited => {}
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_where_clause_mut<V>(v: &mut V, node: &mut crate::WhereClause)
+where
+ V: VisitMut + ?Sized,
+{
+ skip!(node.where_token);
+ for mut el in Punctuated::pairs_mut(&mut node.predicates) {
+ let it = el.value_mut();
+ v.visit_where_predicate_mut(it);
+ }
+}
+#[cfg(any(feature = "derive", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))]
+pub fn visit_where_predicate_mut<V>(v: &mut V, node: &mut
crate::WherePredicate)
+where
+ V: VisitMut + ?Sized,
+{
+ match node {
+ crate::WherePredicate::Lifetime(_binding_0) => {
+ v.visit_predicate_lifetime_mut(_binding_0);
+ }
+ crate::WherePredicate::Type(_binding_0) => {
+ v.visit_predicate_type_mut(_binding_0);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/generics.rs
b/rust/hw/char/pl011/vendor/syn/src/generics.rs
new file mode 100644
index 0000000000..c755151d4b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/generics.rs
@@ -0,0 +1,1286 @@
+use crate::attr::Attribute;
+use crate::expr::Expr;
+use crate::ident::Ident;
+use crate::lifetime::Lifetime;
+use crate::path::Path;
+use crate::punctuated::{Iter, IterMut, Punctuated};
+use crate::token;
+use crate::ty::Type;
+use proc_macro2::TokenStream;
+#[cfg(all(feature = "printing", feature = "extra-traits"))]
+use std::fmt::{self, Debug};
+#[cfg(all(feature = "printing", feature = "extra-traits"))]
+use std::hash::{Hash, Hasher};
+
+ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+ /// This struct represents two distinct optional syntactic elements,
+ /// [generic parameters] and [where clause]. In some locations of the
+ /// grammar, there may be other tokens in between these two things.
+ ///
+ /// [generic parameters]:
https://doc.rust-lang.org/stable/reference/items/generics.html#generic-parameters
+ /// [where clause]:
https://doc.rust-lang.org/stable/reference/items/generics.html#where-clauses
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+ pub gt_token: Option<Token![>]>,
+ pub where_clause: Option<WhereClause>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T:
Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum GenericParam {
+ /// A lifetime parameter: `'a: 'b + 'c + 'd`.
+ Lifetime(LifetimeParam),
+
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+
+ /// A const generic parameter: `const LENGTH: usize`.
+ Const(ConstParam),
+ }
+}
+
+ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct LifetimeParam {
+ pub attrs: Vec<Attribute>,
+ pub lifetime: Lifetime,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<Lifetime, Token![+]>,
+ }
+}
+
+ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+ pub ident: Ident,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub eq_token: Option<Token![=]>,
+ pub default: Option<Type>,
+ }
+}
+
+ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Type,
+ pub eq_token: Option<Token![=]>,
+ pub default: Option<Expr>,
+ }
+}
+
+impl Default for Generics {
+ fn default() -> Self {
+ Generics {
+ lt_token: None,
+ params: Punctuated::new(),
+ gt_token: None,
+ where_clause: None,
+ }
+ }
+}
+
+impl Generics {
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &</code><a
+ /// href="struct.LifetimeParam.html"><code
+ /// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the lifetime parameters in `self.params`.
+ pub fn lifetimes(&self) -> Lifetimes {
+ Lifetimes(self.params.iter())
+ }
+
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &mut </code><a
+ /// href="struct.LifetimeParam.html"><code
+ /// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the lifetime parameters in `self.params`.
+ pub fn lifetimes_mut(&mut self) -> LifetimesMut {
+ LifetimesMut(self.params.iter_mut())
+ }
+
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &</code><a
+ /// href="struct.TypeParam.html"><code
+ /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the type parameters in `self.params`.
+ pub fn type_params(&self) -> TypeParams {
+ TypeParams(self.params.iter())
+ }
+
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &mut </code><a
+ /// href="struct.TypeParam.html"><code
+ /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the type parameters in `self.params`.
+ pub fn type_params_mut(&mut self) -> TypeParamsMut {
+ TypeParamsMut(self.params.iter_mut())
+ }
+
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &</code><a
+ /// href="struct.ConstParam.html"><code
+ /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the constant parameters in `self.params`.
+ pub fn const_params(&self) -> ConstParams {
+ ConstParams(self.params.iter())
+ }
+
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator<Item = &mut </code><a
+ /// href="struct.ConstParam.html"><code
+ /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code
+ /// style="padding-left:0;">></code>
+ /// over the constant parameters in `self.params`.
+ pub fn const_params_mut(&mut self) -> ConstParamsMut {
+ ConstParamsMut(self.params.iter_mut())
+ }
+
+ /// Initializes an empty `where`-clause if there is not one present
already.
+ pub fn make_where_clause(&mut self) -> &mut WhereClause {
+ self.where_clause.get_or_insert_with(|| WhereClause {
+ where_token: <Token![where]>::default(),
+ predicates: Punctuated::new(),
+ })
+ }
+}
+
+pub struct Lifetimes<'a>(Iter<'a, GenericParam>);
+
+impl<'a> Iterator for Lifetimes<'a> {
+ type Item = &'a LifetimeParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Lifetime(lifetime) = next {
+ Some(lifetime)
+ } else {
+ self.next()
+ }
+ }
+}
+
+pub struct LifetimesMut<'a>(IterMut<'a, GenericParam>);
+
+impl<'a> Iterator for LifetimesMut<'a> {
+ type Item = &'a mut LifetimeParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Lifetime(lifetime) = next {
+ Some(lifetime)
+ } else {
+ self.next()
+ }
+ }
+}
+
+pub struct TypeParams<'a>(Iter<'a, GenericParam>);
+
+impl<'a> Iterator for TypeParams<'a> {
+ type Item = &'a TypeParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Type(type_param) = next {
+ Some(type_param)
+ } else {
+ self.next()
+ }
+ }
+}
+
+pub struct TypeParamsMut<'a>(IterMut<'a, GenericParam>);
+
+impl<'a> Iterator for TypeParamsMut<'a> {
+ type Item = &'a mut TypeParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Type(type_param) = next {
+ Some(type_param)
+ } else {
+ self.next()
+ }
+ }
+}
+
+pub struct ConstParams<'a>(Iter<'a, GenericParam>);
+
+impl<'a> Iterator for ConstParams<'a> {
+ type Item = &'a ConstParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Const(const_param) = next {
+ Some(const_param)
+ } else {
+ self.next()
+ }
+ }
+}
+
+pub struct ConstParamsMut<'a>(IterMut<'a, GenericParam>);
+
+impl<'a> Iterator for ConstParamsMut<'a> {
+ type Item = &'a mut ConstParam;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = match self.0.next() {
+ Some(item) => item,
+ None => return None,
+ };
+ if let GenericParam::Const(const_param) = next {
+ Some(const_param)
+ } else {
+ self.next()
+ }
+ }
+}
+
+/// Returned by `Generics::split_for_impl`.
+#[cfg(feature = "printing")]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(any(feature = "full", feature = "derive"), feature =
"printing")))
+)]
+pub struct ImplGenerics<'a>(&'a Generics);
+
+/// Returned by `Generics::split_for_impl`.
+#[cfg(feature = "printing")]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(any(feature = "full", feature = "derive"), feature =
"printing")))
+)]
+pub struct TypeGenerics<'a>(&'a Generics);
+
+/// Returned by `TypeGenerics::as_turbofish`.
+#[cfg(feature = "printing")]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(any(feature = "full", feature = "derive"), feature =
"printing")))
+)]
+pub struct Turbofish<'a>(&'a Generics);
+
+#[cfg(feature = "printing")]
+impl Generics {
+ /// Split a type's generics into the pieces required for impl'ing a trait
+ /// for that type.
+ ///
+ /// ```
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+ /// # let generics: syn::Generics = Default::default();
+ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) =
generics.split_for_impl();
+ /// quote! {
+ /// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
+ /// // ...
+ /// }
+ /// }
+ /// # ;
+ /// ```
+ #[cfg_attr(
+ docsrs,
+ doc(cfg(all(any(feature = "full", feature = "derive"), feature =
"printing")))
+ )]
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics,
Option<&WhereClause>) {
+ (
+ ImplGenerics(self),
+ TypeGenerics(self),
+ self.where_clause.as_ref(),
+ )
+ }
+}
+
+#[cfg(feature = "printing")]
+macro_rules! generics_wrapper_impls {
+ ($ty:ident) => {
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl<'a> Clone for $ty<'a> {
+ fn clone(&self) -> Self {
+ $ty(self.0)
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl<'a> Debug for $ty<'a> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_tuple(stringify!($ty))
+ .field(self.0)
+ .finish()
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl<'a> Eq for $ty<'a> {}
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl<'a> PartialEq for $ty<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl<'a> Hash for $ty<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.0.hash(state);
+ }
+ }
+ };
+}
+
+#[cfg(feature = "printing")]
+generics_wrapper_impls!(ImplGenerics);
+#[cfg(feature = "printing")]
+generics_wrapper_impls!(TypeGenerics);
+#[cfg(feature = "printing")]
+generics_wrapper_impls!(Turbofish);
+
+#[cfg(feature = "printing")]
+impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+ }
+}
+
+ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+ pub lifetimes: Punctuated<GenericParam, Token![,]>,
+ pub gt_token: Token![>],
+ }
+}
+
+impl Default for BoundLifetimes {
+ fn default() -> Self {
+ BoundLifetimes {
+ for_token: Default::default(),
+ lt_token: Default::default(),
+ lifetimes: Punctuated::new(),
+ gt_token: Default::default(),
+ }
+ }
+}
+
+impl LifetimeParam {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeParam {
+ attrs: Vec::new(),
+ lifetime,
+ colon_token: None,
+ bounds: Punctuated::new(),
+ }
+ }
+}
+
+impl From<Ident> for TypeParam {
+ fn from(ident: Ident) -> Self {
+ TypeParam {
+ attrs: vec![],
+ ident,
+ colon_token: None,
+ bounds: Punctuated::new(),
+ eq_token: None,
+ default: None,
+ }
+ }
+}
+
+ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+ Lifetime(Lifetime),
+ Verbatim(TokenStream),
+ }
+}
+
+ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+ pub modifier: TraitBoundModifier,
+ /// The `for<'a>` in `for<'a> Foo<&'a T>`
+ pub lifetimes: Option<BoundLifetimes>,
+ /// The `Foo<&'a T>` in `for<'a> Foo<&'a T>`
+ pub path: Path,
+ }
+}
+
+ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+ }
+}
+
+ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct WhereClause {
+ pub where_token: Token![where],
+ pub predicates: Punctuated<WherePredicate, Token![,]>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum WherePredicate {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ Lifetime(PredicateLifetime),
+
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+ }
+}
+
+ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+ pub colon_token: Token![:],
+ pub bounds: Punctuated<Lifetime, Token![+]>,
+ }
+}
+
+ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+ pub lifetimes: Option<BoundLifetimes>,
+ /// The type being bounded
+ pub bounded_ty: Type,
+ pub colon_token: Token![:],
+ /// Trait and lifetime bounds (`Clone+Send+'static`)
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::error::Result;
+ use crate::ext::IdentExt as _;
+ use crate::generics::{
+ BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeParam,
PredicateLifetime,
+ PredicateType, TraitBound, TraitBoundModifier, TypeParam,
TypeParamBound, WhereClause,
+ WherePredicate,
+ };
+ use crate::ident::Ident;
+ use crate::lifetime::Lifetime;
+ use crate::parse::{Parse, ParseStream};
+ use crate::path::{self, ParenthesizedGenericArguments, Path,
PathArguments};
+ use crate::punctuated::Punctuated;
+ use crate::token;
+ use crate::ty::Type;
+ use crate::verbatim;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Generics {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if !input.peek(Token![<]) {
+ return Ok(Generics::default());
+ }
+
+ let lt_token: Token![<] = input.parse()?;
+
+ let mut params = Punctuated::new();
+ loop {
+ if input.peek(Token![>]) {
+ break;
+ }
+
+ let attrs = input.call(Attribute::parse_outer)?;
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Lifetime) {
+ params.push_value(GenericParam::Lifetime(LifetimeParam {
+ attrs,
+ ..input.parse()?
+ }));
+ } else if lookahead.peek(Ident) {
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+ ..input.parse()?
+ }));
+ } else if lookahead.peek(Token![const]) {
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+ }));
+ } else if input.peek(Token![_]) {
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+ ident: input.call(Ident::parse_any)?,
+ colon_token: None,
+ bounds: Punctuated::new(),
+ eq_token: None,
+ default: None,
+ }));
+ } else {
+ return Err(lookahead.error());
+ }
+
+ if input.peek(Token![>]) {
+ break;
+ }
+ let punct = input.parse()?;
+ params.push_punct(punct);
+ }
+
+ let gt_token: Token![>] = input.parse()?;
+
+ Ok(Generics {
+ lt_token: Some(lt_token),
+ params,
+ gt_token: Some(gt_token),
+ where_clause: None,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for GenericParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident) {
+ Ok(GenericParam::Type(TypeParam {
+ attrs,
+ ..input.parse()?
+ }))
+ } else if lookahead.peek(Lifetime) {
+ Ok(GenericParam::Lifetime(LifetimeParam {
+ attrs,
+ ..input.parse()?
+ }))
+ } else if lookahead.peek(Token![const]) {
+ Ok(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+ }))
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LifetimeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let has_colon;
+ Ok(LifetimeParam {
+ attrs: input.call(Attribute::parse_outer)?,
+ lifetime: input.parse()?,
+ colon_token: {
+ if input.peek(Token![:]) {
+ has_colon = true;
+ Some(input.parse()?)
+ } else {
+ has_colon = false;
+ None
+ }
+ },
+ bounds: {
+ let mut bounds = Punctuated::new();
+ if has_colon {
+ loop {
+ if input.peek(Token![,]) || input.peek(Token![>]) {
+ break;
+ }
+ let value = input.parse()?;
+ bounds.push_value(value);
+ if !input.peek(Token![+]) {
+ break;
+ }
+ let punct = input.parse()?;
+ bounds.push_punct(punct);
+ }
+ }
+ bounds
+ },
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for BoundLifetimes {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(BoundLifetimes {
+ for_token: input.parse()?,
+ lt_token: input.parse()?,
+ lifetimes: {
+ let mut lifetimes = Punctuated::new();
+ while !input.peek(Token![>]) {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let lifetime: Lifetime = input.parse()?;
+
lifetimes.push_value(GenericParam::Lifetime(LifetimeParam {
+ attrs,
+ lifetime,
+ colon_token: None,
+ bounds: Punctuated::new(),
+ }));
+ if input.peek(Token![>]) {
+ break;
+ }
+ lifetimes.push_punct(input.parse()?);
+ }
+ lifetimes
+ },
+ gt_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Option<BoundLifetimes> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![for]) {
+ input.parse().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let ident: Ident = input.parse()?;
+ let colon_token: Option<Token![:]> = input.parse()?;
+
+ let mut bounds = Punctuated::new();
+ if colon_token.is_some() {
+ loop {
+ if input.peek(Token![,]) || input.peek(Token![>]) ||
input.peek(Token![=]) {
+ break;
+ }
+ let value: TypeParamBound = input.parse()?;
+ bounds.push_value(value);
+ if !input.peek(Token![+]) {
+ break;
+ }
+ let punct: Token![+] = input.parse()?;
+ bounds.push_punct(punct);
+ }
+ }
+
+ let eq_token: Option<Token![=]> = input.parse()?;
+ let default = if eq_token.is_some() {
+ Some(input.parse::<Type>()?)
+ } else {
+ None
+ };
+
+ Ok(TypeParam {
+ attrs,
+ ident,
+ colon_token,
+ bounds,
+ eq_token,
+ default,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeParamBound {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Lifetime) {
+ return input.parse().map(TypeParamBound::Lifetime);
+ }
+
+ let begin = input.fork();
+
+ let content;
+ let (paren_token, content) = if input.peek(token::Paren) {
+ (Some(parenthesized!(content in input)), &content)
+ } else {
+ (None, input)
+ };
+
+ let is_tilde_const =
+ cfg!(feature = "full") && content.peek(Token![~]) &&
content.peek2(Token![const]);
+ if is_tilde_const {
+ content.parse::<Token![~]>()?;
+ content.parse::<Token![const]>()?;
+ }
+
+ let mut bound: TraitBound = content.parse()?;
+ bound.paren_token = paren_token;
+
+ if is_tilde_const {
+ Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(TypeParamBound::Trait(bound))
+ }
+ }
+ }
+
+ impl TypeParamBound {
+ pub(crate) fn parse_multiple(
+ input: ParseStream,
+ allow_plus: bool,
+ ) -> Result<Punctuated<Self, Token![+]>> {
+ let mut bounds = Punctuated::new();
+ loop {
+ bounds.push_value(input.parse()?);
+ if !(allow_plus && input.peek(Token![+])) {
+ break;
+ }
+ bounds.push_punct(input.parse()?);
+ if !(input.peek(Ident::peek_any)
+ || input.peek(Token![::])
+ || input.peek(Token![?])
+ || input.peek(Lifetime)
+ || input.peek(token::Paren)
+ || input.peek(Token![~]))
+ {
+ break;
+ }
+ }
+ Ok(bounds)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitBound {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let modifier: TraitBoundModifier = input.parse()?;
+ let lifetimes: Option<BoundLifetimes> = input.parse()?;
+
+ let mut path: Path = input.parse()?;
+ if path.segments.last().unwrap().arguments.is_empty()
+ && (input.peek(token::Paren) || input.peek(Token![::]) &&
input.peek3(token::Paren))
+ {
+ input.parse::<Option<Token![::]>>()?;
+ let args: ParenthesizedGenericArguments = input.parse()?;
+ let parenthesized = PathArguments::Parenthesized(args);
+ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
+
+ Ok(TraitBound {
+ paren_token: None,
+ modifier,
+ lifetimes,
+ path,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitBoundModifier {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![?]) {
+ input.parse().map(TraitBoundModifier::Maybe)
+ } else {
+ Ok(TraitBoundModifier::None)
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ConstParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut default = None;
+ Ok(ConstParam {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+ ident: input.parse()?,
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: {
+ if input.peek(Token![=]) {
+ let eq_token = input.parse()?;
+ default = Some(path::parsing::const_argument(input)?);
+ Some(eq_token)
+ } else {
+ None
+ }
+ },
+ default,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for WhereClause {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(WhereClause {
+ where_token: input.parse()?,
+ predicates: {
+ let mut predicates = Punctuated::new();
+ loop {
+ if input.is_empty()
+ || input.peek(token::Brace)
+ || input.peek(Token![,])
+ || input.peek(Token![;])
+ || input.peek(Token![:]) && !input.peek(Token![::])
+ || input.peek(Token![=])
+ {
+ break;
+ }
+ let value = input.parse()?;
+ predicates.push_value(value);
+ if !input.peek(Token![,]) {
+ break;
+ }
+ let punct = input.parse()?;
+ predicates.push_punct(punct);
+ }
+ predicates
+ },
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Option<WhereClause> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![where]) {
+ input.parse().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for WherePredicate {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Lifetime) && input.peek2(Token![:]) {
+ Ok(WherePredicate::Lifetime(PredicateLifetime {
+ lifetime: input.parse()?,
+ colon_token: input.parse()?,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ loop {
+ if input.is_empty()
+ || input.peek(token::Brace)
+ || input.peek(Token![,])
+ || input.peek(Token![;])
+ || input.peek(Token![:])
+ || input.peek(Token![=])
+ {
+ break;
+ }
+ let value = input.parse()?;
+ bounds.push_value(value);
+ if !input.peek(Token![+]) {
+ break;
+ }
+ let punct = input.parse()?;
+ bounds.push_punct(punct);
+ }
+ bounds
+ },
+ }))
+ } else {
+ Ok(WherePredicate::Type(PredicateType {
+ lifetimes: input.parse()?,
+ bounded_ty: input.parse()?,
+ colon_token: input.parse()?,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ loop {
+ if input.is_empty()
+ || input.peek(token::Brace)
+ || input.peek(Token![,])
+ || input.peek(Token![;])
+ || input.peek(Token![:]) &&
!input.peek(Token![::])
+ || input.peek(Token![=])
+ {
+ break;
+ }
+ let value = input.parse()?;
+ bounds.push_value(value);
+ if !input.peek(Token![+]) {
+ break;
+ }
+ let punct = input.parse()?;
+ bounds.push_punct(punct);
+ }
+ bounds
+ },
+ }))
+ }
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+pub(crate) mod printing {
+ use crate::attr::FilterAttrs;
+ #[cfg(feature = "full")]
+ use crate::expr;
+ use crate::expr::Expr;
+ #[cfg(feature = "full")]
+ use crate::fixup::FixupContext;
+ use crate::generics::{
+ BoundLifetimes, ConstParam, GenericParam, Generics, ImplGenerics,
LifetimeParam,
+ PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier,
Turbofish, TypeGenerics,
+ TypeParam, WhereClause,
+ };
+ use crate::print::TokensOrDefault;
+ use crate::token;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Generics {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if self.params.is_empty() {
+ return;
+ }
+
+ TokensOrDefault(&self.lt_token).to_tokens(tokens);
+
+ // Print lifetimes before types and consts, regardless of their
+ // order in self.params.
+ let mut trailing_or_empty = true;
+ for param in self.params.pairs() {
+ if let GenericParam::Lifetime(_) = **param.value() {
+ param.to_tokens(tokens);
+ trailing_or_empty = param.punct().is_some();
+ }
+ }
+ for param in self.params.pairs() {
+ match param.value() {
+ GenericParam::Type(_) | GenericParam::Const(_) => {
+ if !trailing_or_empty {
+ <Token![,]>::default().to_tokens(tokens);
+ trailing_or_empty = true;
+ }
+ param.to_tokens(tokens);
+ }
+ GenericParam::Lifetime(_) => {}
+ }
+ }
+
+ TokensOrDefault(&self.gt_token).to_tokens(tokens);
+ }
+ }
+
+ impl<'a> ToTokens for ImplGenerics<'a> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if self.0.params.is_empty() {
+ return;
+ }
+
+ TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
+
+ // Print lifetimes before types and consts, regardless of their
+ // order in self.params.
+ let mut trailing_or_empty = true;
+ for param in self.0.params.pairs() {
+ if let GenericParam::Lifetime(_) = **param.value() {
+ param.to_tokens(tokens);
+ trailing_or_empty = param.punct().is_some();
+ }
+ }
+ for param in self.0.params.pairs() {
+ if let GenericParam::Lifetime(_) = **param.value() {
+ continue;
+ }
+ if !trailing_or_empty {
+ <Token![,]>::default().to_tokens(tokens);
+ trailing_or_empty = true;
+ }
+ match param.value() {
+ GenericParam::Lifetime(_) => unreachable!(),
+ GenericParam::Type(param) => {
+ // Leave off the type parameter defaults
+ tokens.append_all(param.attrs.outer());
+ param.ident.to_tokens(tokens);
+ if !param.bounds.is_empty() {
+
TokensOrDefault(¶m.colon_token).to_tokens(tokens);
+ param.bounds.to_tokens(tokens);
+ }
+ }
+ GenericParam::Const(param) => {
+ // Leave off the const parameter defaults
+ tokens.append_all(param.attrs.outer());
+ param.const_token.to_tokens(tokens);
+ param.ident.to_tokens(tokens);
+ param.colon_token.to_tokens(tokens);
+ param.ty.to_tokens(tokens);
+ }
+ }
+ param.punct().to_tokens(tokens);
+ }
+
+ TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
+ }
+ }
+
+ impl<'a> ToTokens for TypeGenerics<'a> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if self.0.params.is_empty() {
+ return;
+ }
+
+ TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
+
+ // Print lifetimes before types and consts, regardless of their
+ // order in self.params.
+ let mut trailing_or_empty = true;
+ for param in self.0.params.pairs() {
+ if let GenericParam::Lifetime(def) = *param.value() {
+ // Leave off the lifetime bounds and attributes
+ def.lifetime.to_tokens(tokens);
+ param.punct().to_tokens(tokens);
+ trailing_or_empty = param.punct().is_some();
+ }
+ }
+ for param in self.0.params.pairs() {
+ if let GenericParam::Lifetime(_) = **param.value() {
+ continue;
+ }
+ if !trailing_or_empty {
+ <Token![,]>::default().to_tokens(tokens);
+ trailing_or_empty = true;
+ }
+ match param.value() {
+ GenericParam::Lifetime(_) => unreachable!(),
+ GenericParam::Type(param) => {
+ // Leave off the type parameter defaults
+ param.ident.to_tokens(tokens);
+ }
+ GenericParam::Const(param) => {
+ // Leave off the const parameter defaults
+ param.ident.to_tokens(tokens);
+ }
+ }
+ param.punct().to_tokens(tokens);
+ }
+
+ TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
+ }
+ }
+
+ impl<'a> ToTokens for Turbofish<'a> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if !self.0.params.is_empty() {
+ <Token![::]>::default().to_tokens(tokens);
+ TypeGenerics(self.0).to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for BoundLifetimes {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.for_token.to_tokens(tokens);
+ self.lt_token.to_tokens(tokens);
+ self.lifetimes.to_tokens(tokens);
+ self.gt_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LifetimeParam {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.lifetime.to_tokens(tokens);
+ if !self.bounds.is_empty() {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeParam {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.ident.to_tokens(tokens);
+ if !self.bounds.is_empty() {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+ default.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitBound {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let to_tokens = |tokens: &mut TokenStream| {
+ self.modifier.to_tokens(tokens);
+ self.lifetimes.to_tokens(tokens);
+ self.path.to_tokens(tokens);
+ };
+ match &self.paren_token {
+ Some(paren) => paren.surround(tokens, to_tokens),
+ None => to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitBoundModifier {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ TraitBoundModifier::None => {}
+ TraitBoundModifier::Maybe(t) => t.to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ConstParam {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.const_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+ print_const_argument(default, tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for WhereClause {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if !self.predicates.is_empty() {
+ self.where_token.to_tokens(tokens);
+ self.predicates.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PredicateLifetime {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.lifetime.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PredicateType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.lifetimes.to_tokens(tokens);
+ self.bounded_ty.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+
+ pub(crate) fn print_const_argument(expr: &Expr, tokens: &mut TokenStream) {
+ match expr {
+ Expr::Lit(expr) => expr.to_tokens(tokens),
+
+ Expr::Path(expr)
+ if expr.attrs.is_empty()
+ && expr.qself.is_none()
+ && expr.path.get_ident().is_some() =>
+ {
+ expr.to_tokens(tokens);
+ }
+
+ #[cfg(feature = "full")]
+ Expr::Block(expr) => expr.to_tokens(tokens),
+
+ #[cfg(not(feature = "full"))]
+ Expr::Verbatim(expr) => expr.to_tokens(tokens),
+
+ // ERROR CORRECTION: Add braces to make sure that the
+ // generated code is valid.
+ _ => token::Brace::default().surround(tokens, |tokens| {
+ #[cfg(feature = "full")]
+ expr::printing::print_expr(expr, tokens,
FixupContext::new_stmt());
+
+ #[cfg(not(feature = "full"))]
+ expr.to_tokens(tokens);
+ }),
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/group.rs
b/rust/hw/char/pl011/vendor/syn/src/group.rs
new file mode 100644
index 0000000000..b742927eef
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/group.rs
@@ -0,0 +1,291 @@
+use crate::error::Result;
+use crate::parse::ParseBuffer;
+use crate::token;
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::Delimiter;
+
+// Not public API.
+#[doc(hidden)]
+pub struct Parens<'a> {
+ #[doc(hidden)]
+ pub token: token::Paren,
+ #[doc(hidden)]
+ pub content: ParseBuffer<'a>,
+}
+
+// Not public API.
+#[doc(hidden)]
+pub struct Braces<'a> {
+ #[doc(hidden)]
+ pub token: token::Brace,
+ #[doc(hidden)]
+ pub content: ParseBuffer<'a>,
+}
+
+// Not public API.
+#[doc(hidden)]
+pub struct Brackets<'a> {
+ #[doc(hidden)]
+ pub token: token::Bracket,
+ #[doc(hidden)]
+ pub content: ParseBuffer<'a>,
+}
+
+// Not public API.
+#[cfg(any(feature = "full", feature = "derive"))]
+#[doc(hidden)]
+pub struct Group<'a> {
+ #[doc(hidden)]
+ pub token: token::Group,
+ #[doc(hidden)]
+ pub content: ParseBuffer<'a>,
+}
+
+// Not public API.
+#[doc(hidden)]
+pub fn parse_parens<'a>(input: &ParseBuffer<'a>) -> Result<Parens<'a>> {
+ parse_delimited(input, Delimiter::Parenthesis).map(|(span, content)|
Parens {
+ token: token::Paren(span),
+ content,
+ })
+}
+
+// Not public API.
+#[doc(hidden)]
+pub fn parse_braces<'a>(input: &ParseBuffer<'a>) -> Result<Braces<'a>> {
+ parse_delimited(input, Delimiter::Brace).map(|(span, content)| Braces {
+ token: token::Brace(span),
+ content,
+ })
+}
+
+// Not public API.
+#[doc(hidden)]
+pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> {
+ parse_delimited(input, Delimiter::Bracket).map(|(span, content)| Brackets {
+ token: token::Bracket(span),
+ content,
+ })
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
+ parse_delimited(input, Delimiter::None).map(|(span, content)| Group {
+ token: token::Group(span.join()),
+ content,
+ })
+}
+
+fn parse_delimited<'a>(
+ input: &ParseBuffer<'a>,
+ delimiter: Delimiter,
+) -> Result<(DelimSpan, ParseBuffer<'a>)> {
+ input.step(|cursor| {
+ if let Some((content, span, rest)) = cursor.group(delimiter) {
+ let scope = crate::buffer::close_span_of_group(*cursor);
+ let nested = crate::parse::advance_step_cursor(cursor, content);
+ let unexpected = crate::parse::get_unexpected(input);
+ let content = crate::parse::new_parse_buffer(scope, nested,
unexpected);
+ Ok(((span, content), rest))
+ } else {
+ let message = match delimiter {
+ Delimiter::Parenthesis => "expected parentheses",
+ Delimiter::Brace => "expected curly braces",
+ Delimiter::Bracket => "expected square brackets",
+ Delimiter::None => "expected invisible group",
+ };
+ Err(cursor.error(message))
+ }
+ })
+}
+
+/// Parse a set of parentheses and expose their content to subsequent parsers.
+///
+/// # Example
+///
+/// ```
+/// # use quote::quote;
+/// #
+/// use syn::{parenthesized, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
+/// use syn::punctuated::Punctuated;
+///
+/// // Parse a simplified tuple struct syntax like:
+/// //
+/// // struct S(A, B);
+/// struct TupleStruct {
+/// struct_token: Token![struct],
+/// ident: Ident,
+/// paren_token: token::Paren,
+/// fields: Punctuated<Type, Token![,]>,
+/// semi_token: Token![;],
+/// }
+///
+/// impl Parse for TupleStruct {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let content;
+/// Ok(TupleStruct {
+/// struct_token: input.parse()?,
+/// ident: input.parse()?,
+/// paren_token: parenthesized!(content in input),
+/// fields: content.parse_terminated(Type::parse, Token![,])?,
+/// semi_token: input.parse()?,
+/// })
+/// }
+/// }
+/// #
+/// # fn main() {
+/// # let input = quote! {
+/// # struct S(A, B);
+/// # };
+/// # syn::parse2::<TupleStruct>(input).unwrap();
+/// # }
+/// ```
+#[macro_export]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+macro_rules! parenthesized {
+ ($content:ident in $cursor:expr) => {
+ match $crate::__private::parse_parens(&$cursor) {
+ $crate::__private::Ok(parens) => {
+ $content = parens.content;
+ parens.token
+ }
+ $crate::__private::Err(error) => {
+ return $crate::__private::Err(error);
+ }
+ }
+ };
+}
+
+/// Parse a set of curly braces and expose their content to subsequent parsers.
+///
+/// # Example
+///
+/// ```
+/// # use quote::quote;
+/// #
+/// use syn::{braced, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
+/// use syn::punctuated::Punctuated;
+///
+/// // Parse a simplified struct syntax like:
+/// //
+/// // struct S {
+/// // a: A,
+/// // b: B,
+/// // }
+/// struct Struct {
+/// struct_token: Token![struct],
+/// ident: Ident,
+/// brace_token: token::Brace,
+/// fields: Punctuated<Field, Token![,]>,
+/// }
+///
+/// struct Field {
+/// name: Ident,
+/// colon_token: Token![:],
+/// ty: Type,
+/// }
+///
+/// impl Parse for Struct {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let content;
+/// Ok(Struct {
+/// struct_token: input.parse()?,
+/// ident: input.parse()?,
+/// brace_token: braced!(content in input),
+/// fields: content.parse_terminated(Field::parse, Token![,])?,
+/// })
+/// }
+/// }
+///
+/// impl Parse for Field {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// Ok(Field {
+/// name: input.parse()?,
+/// colon_token: input.parse()?,
+/// ty: input.parse()?,
+/// })
+/// }
+/// }
+/// #
+/// # fn main() {
+/// # let input = quote! {
+/// # struct S {
+/// # a: A,
+/// # b: B,
+/// # }
+/// # };
+/// # syn::parse2::<Struct>(input).unwrap();
+/// # }
+/// ```
+#[macro_export]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+macro_rules! braced {
+ ($content:ident in $cursor:expr) => {
+ match $crate::__private::parse_braces(&$cursor) {
+ $crate::__private::Ok(braces) => {
+ $content = braces.content;
+ braces.token
+ }
+ $crate::__private::Err(error) => {
+ return $crate::__private::Err(error);
+ }
+ }
+ };
+}
+
+/// Parse a set of square brackets and expose their content to subsequent
+/// parsers.
+///
+/// # Example
+///
+/// ```
+/// # use quote::quote;
+/// #
+/// use proc_macro2::TokenStream;
+/// use syn::{bracketed, token, Result, Token};
+/// use syn::parse::{Parse, ParseStream};
+///
+/// // Parse an outer attribute like:
+/// //
+/// // #[repr(C, packed)]
+/// struct OuterAttribute {
+/// pound_token: Token![#],
+/// bracket_token: token::Bracket,
+/// content: TokenStream,
+/// }
+///
+/// impl Parse for OuterAttribute {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let content;
+/// Ok(OuterAttribute {
+/// pound_token: input.parse()?,
+/// bracket_token: bracketed!(content in input),
+/// content: content.parse()?,
+/// })
+/// }
+/// }
+/// #
+/// # fn main() {
+/// # let input = quote! {
+/// # #[repr(C, packed)]
+/// # };
+/// # syn::parse2::<OuterAttribute>(input).unwrap();
+/// # }
+/// ```
+#[macro_export]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+macro_rules! bracketed {
+ ($content:ident in $cursor:expr) => {
+ match $crate::__private::parse_brackets(&$cursor) {
+ $crate::__private::Ok(brackets) => {
+ $content = brackets.content;
+ brackets.token
+ }
+ $crate::__private::Err(error) => {
+ return $crate::__private::Err(error);
+ }
+ }
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/ident.rs
b/rust/hw/char/pl011/vendor/syn/src/ident.rs
new file mode 100644
index 0000000000..8a8e8a50d9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/ident.rs
@@ -0,0 +1,108 @@
+#[cfg(feature = "parsing")]
+use crate::lookahead;
+
+pub use proc_macro2::Ident;
+
+#[cfg(feature = "parsing")]
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
+ match marker {}
+ }
+}
+
+macro_rules! ident_from_token {
+ ($token:ident) => {
+ impl From<Token![$token]> for Ident {
+ fn from(token: Token![$token]) -> Ident {
+ Ident::new(stringify!($token), token.span)
+ }
+ }
+ };
+}
+
+ident_from_token!(self);
+ident_from_token!(Self);
+ident_from_token!(super);
+ident_from_token!(crate);
+ident_from_token!(extern);
+
+impl From<Token![_]> for Ident {
+ fn from(token: Token![_]) -> Ident {
+ Ident::new("_", token.span)
+ }
+}
+
+pub(crate) fn xid_ok(symbol: &str) -> bool {
+ let mut chars = symbol.chars();
+ let first = chars.next().unwrap();
+ if !(first == '_' || unicode_ident::is_xid_start(first)) {
+ return false;
+ }
+ for ch in chars {
+ if !unicode_ident::is_xid_continue(ch) {
+ return false;
+ }
+ }
+ true
+}
+
+#[cfg(feature = "parsing")]
+mod parsing {
+ use crate::buffer::Cursor;
+ use crate::error::Result;
+ use crate::parse::{Parse, ParseStream};
+ use crate::token::Token;
+ use proc_macro2::Ident;
+
+ fn accept_as_ident(ident: &Ident) -> bool {
+ match ident.to_string().as_str() {
+ "_" |
+ // Based on
https://doc.rust-lang.org/1.65.0/reference/keywords.html
+ "abstract" | "as" | "async" | "await" | "become" | "box" | "break"
|
+ "const" | "continue" | "crate" | "do" | "dyn" | "else" | "enum" |
+ "extern" | "false" | "final" | "fn" | "for" | "if" | "impl" | "in"
|
+ "let" | "loop" | "macro" | "match" | "mod" | "move" | "mut" |
+ "override" | "priv" | "pub" | "ref" | "return" | "Self" | "self" |
+ "static" | "struct" | "super" | "trait" | "true" | "try" | "type" |
+ "typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
+ "while" | "yield" => false,
+ _ => true,
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Ident {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ if let Some((ident, rest)) = cursor.ident() {
+ if accept_as_ident(&ident) {
+ Ok((ident, rest))
+ } else {
+ Err(cursor.error(format_args!(
+ "expected identifier, found keyword `{}`",
+ ident,
+ )))
+ }
+ } else {
+ Err(cursor.error("expected identifier"))
+ }
+ })
+ }
+ }
+
+ impl Token for Ident {
+ fn peek(cursor: Cursor) -> bool {
+ if let Some((ident, _rest)) = cursor.ident() {
+ accept_as_ident(&ident)
+ } else {
+ false
+ }
+ }
+
+ fn display() -> &'static str {
+ "identifier"
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/item.rs
b/rust/hw/char/pl011/vendor/syn/src/item.rs
new file mode 100644
index 0000000000..fa87b42b96
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/item.rs
@@ -0,0 +1,3441 @@
+use crate::attr::Attribute;
+use crate::data::{Fields, FieldsNamed, Variant};
+use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+use crate::expr::Expr;
+use crate::generics::{Generics, TypeParamBound};
+use crate::ident::Ident;
+use crate::lifetime::Lifetime;
+use crate::mac::Macro;
+use crate::pat::{Pat, PatType};
+use crate::path::Path;
+use crate::punctuated::Punctuated;
+use crate::restriction::Visibility;
+use crate::stmt::Block;
+use crate::token;
+use crate::ty::{Abi, ReturnType, Type};
+use proc_macro2::TokenStream;
+#[cfg(feature = "parsing")]
+use std::mem;
+
+ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ Enum(ItemEnum),
+
+ /// An `extern crate` item: `extern crate serde`.
+ ExternCrate(ItemExternCrate),
+
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ Fn(ItemFn),
+
+ /// A block of foreign items: `extern "C" { ... }`.
+ ForeignMod(ItemForeignMod),
+
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ Impl(ItemImpl),
+
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ Macro(ItemMacro),
+
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ Mod(ItemMod),
+
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ Static(ItemStatic),
+
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ Struct(ItemStruct),
+
+ /// A trait definition: `pub trait Iterator { ... }`.
+ Trait(ItemTrait),
+
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ TraitAlias(ItemTraitAlias),
+
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ Type(ItemType),
+
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ Union(ItemUnion),
+
+ /// A use declaration: `use std::collections::HashMap`.
+ Use(ItemUse),
+
+ /// Tokens forming an item not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match item {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // Item::Const(item) => {...}
+ // Item::Enum(item) => {...}
+ // ...
+ // Item::Verbatim(item) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub enum_token: Token![enum],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub brace_token: token::Brace,
+ pub variants: Punctuated<Variant, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub extern_token: Token![extern],
+ pub crate_token: Token![crate],
+ pub ident: Ident,
+ pub rename: Option<(Token![as], Ident)>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub sig: Signature,
+ pub block: Box<Block>,
+ }
+}
+
+ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub abi: Abi,
+ pub brace_token: token::Brace,
+ pub items: Vec<ForeignItem>,
+ }
+}
+
+ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub impl_token: Token![impl],
+ pub generics: Generics,
+ /// Trait this impl implements.
+ pub trait_: Option<(Option<Token![!]>, Path, Token![for])>,
+ /// The Self type of the impl.
+ pub self_ty: Box<Type>,
+ pub brace_token: token::Brace,
+ pub items: Vec<ImplItem>,
+ }
+}
+
+ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+ pub ident: Option<Ident>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub unsafety: Option<Token![unsafe]>,
+ pub mod_token: Token![mod],
+ pub ident: Ident,
+ pub content: Option<(token::Brace, Vec<Item>)>,
+ pub semi: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub static_token: Token![static],
+ pub mutability: StaticMutability,
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub struct_token: Token![struct],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub fields: Fields,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub unsafety: Option<Token![unsafe]>,
+ pub auto_token: Option<Token![auto]>,
+ pub restriction: Option<ImplRestriction>,
+ pub trait_token: Token![trait],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Option<Token![:]>,
+ pub supertraits: Punctuated<TypeParamBound, Token![+]>,
+ pub brace_token: token::Brace,
+ pub items: Vec<TraitItem>,
+ }
+}
+
+ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub trait_token: Token![trait],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub ty: Box<Type>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub union_token: Token![union],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub fields: FieldsNamed,
+ }
+}
+
+ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub use_token: Token![use],
+ pub leading_colon: Option<Token![::]>,
+ pub tree: UseTree,
+ pub semi_token: Token![;],
+ }
+}
+
+impl Item {
+ #[cfg(feature = "parsing")]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) ->
Vec<Attribute> {
+ match self {
+ Item::Const(ItemConst { attrs, .. })
+ | Item::Enum(ItemEnum { attrs, .. })
+ | Item::ExternCrate(ItemExternCrate { attrs, .. })
+ | Item::Fn(ItemFn { attrs, .. })
+ | Item::ForeignMod(ItemForeignMod { attrs, .. })
+ | Item::Impl(ItemImpl { attrs, .. })
+ | Item::Macro(ItemMacro { attrs, .. })
+ | Item::Mod(ItemMod { attrs, .. })
+ | Item::Static(ItemStatic { attrs, .. })
+ | Item::Struct(ItemStruct { attrs, .. })
+ | Item::Trait(ItemTrait { attrs, .. })
+ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
+ | Item::Type(ItemType { attrs, .. })
+ | Item::Union(ItemUnion { attrs, .. })
+ | Item::Use(ItemUse { attrs, .. }) => mem::replace(attrs, new),
+ Item::Verbatim(_) => Vec::new(),
+ }
+ }
+}
+
+impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+ Data::Struct(data) => Item::Struct(ItemStruct {
+ attrs: input.attrs,
+ vis: input.vis,
+ struct_token: data.struct_token,
+ ident: input.ident,
+ generics: input.generics,
+ fields: data.fields,
+ semi_token: data.semi_token,
+ }),
+ Data::Enum(data) => Item::Enum(ItemEnum {
+ attrs: input.attrs,
+ vis: input.vis,
+ enum_token: data.enum_token,
+ ident: input.ident,
+ generics: input.generics,
+ brace_token: data.brace_token,
+ variants: data.variants,
+ }),
+ Data::Union(data) => Item::Union(ItemUnion {
+ attrs: input.attrs,
+ vis: input.vis,
+ union_token: data.union_token,
+ ident: input.ident,
+ generics: input.generics,
+ fields: data.fields,
+ }),
+ }
+ }
+}
+
+impl From<ItemStruct> for DeriveInput {
+ fn from(input: ItemStruct) -> DeriveInput {
+ DeriveInput {
+ attrs: input.attrs,
+ vis: input.vis,
+ ident: input.ident,
+ generics: input.generics,
+ data: Data::Struct(DataStruct {
+ struct_token: input.struct_token,
+ fields: input.fields,
+ semi_token: input.semi_token,
+ }),
+ }
+ }
+}
+
+impl From<ItemEnum> for DeriveInput {
+ fn from(input: ItemEnum) -> DeriveInput {
+ DeriveInput {
+ attrs: input.attrs,
+ vis: input.vis,
+ ident: input.ident,
+ generics: input.generics,
+ data: Data::Enum(DataEnum {
+ enum_token: input.enum_token,
+ brace_token: input.brace_token,
+ variants: input.variants,
+ }),
+ }
+ }
+}
+
+impl From<ItemUnion> for DeriveInput {
+ fn from(input: ItemUnion) -> DeriveInput {
+ DeriveInput {
+ attrs: input.attrs,
+ vis: input.vis,
+ ident: input.ident,
+ generics: input.generics,
+ data: Data::Union(DataUnion {
+ union_token: input.union_token,
+ fields: input.fields,
+ }),
+ }
+ }
+}
+
+ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub enum UseTree {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ Path(UsePath),
+
+ /// An identifier imported by a `use` item: `HashMap`.
+ Name(UseName),
+
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ Rename(UseRename),
+
+ /// A glob import in a `use` item: `*`.
+ Glob(UseGlob),
+
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ Group(UseGroup),
+ }
+}
+
+ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+ pub tree: Box<UseTree>,
+ }
+}
+
+ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct UseName {
+ pub ident: Ident,
+ }
+}
+
+ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+ pub rename: Ident,
+ }
+}
+
+ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+}
+
+ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ Static(ForeignItemStatic),
+
+ /// A foreign type in an `extern` block: `type void`.
+ Type(ForeignItemType),
+
+ /// A macro invocation within an extern block.
+ Macro(ForeignItemMacro),
+
+ /// Tokens in an `extern` block not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match item {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // ForeignItem::Fn(item) => {...}
+ // ForeignItem::Static(item) => {...}
+ // ...
+ // ForeignItem::Verbatim(item) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// A foreign function in an `extern` block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub sig: Signature,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub static_token: Token![static],
+ pub mutability: StaticMutability,
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A macro invocation within an extern block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+ /// An associated function within the definition of a trait.
+ Fn(TraitItemFn),
+
+ /// An associated type within the definition of a trait.
+ Type(TraitItemType),
+
+ /// A macro invocation within the definition of a trait.
+ Macro(TraitItemMacro),
+
+ /// Tokens within the definition of a trait not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match item {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // TraitItem::Const(item) => {...}
+ // TraitItem::Fn(item) => {...}
+ // ...
+ // TraitItem::Verbatim(item) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// An associated constant within the definition of a trait.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Token![:],
+ pub ty: Type,
+ pub default: Option<(Token![=], Expr)>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// An associated function within the definition of a trait.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct TraitItemFn {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+ pub default: Option<Block>,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// An associated type within the definition of a trait.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub default: Option<(Token![=], Type)>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+ /// An associated function within an impl block.
+ Fn(ImplItemFn),
+
+ /// An associated type within an impl block.
+ Type(ImplItemType),
+
+ /// A macro invocation within an impl block.
+ Macro(ImplItemMacro),
+
+ /// Tokens within an impl block not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match item {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // ImplItem::Const(item) => {...}
+ // ImplItem::Fn(item) => {...}
+ // ...
+ // ImplItem::Verbatim(item) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// An associated constant within an impl block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Token![:],
+ pub ty: Type,
+ pub eq_token: Token![=],
+ pub expr: Expr,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// An associated function within an impl block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ImplItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub sig: Signature,
+ pub block: Block,
+ }
+}
+
+ast_struct! {
+ /// An associated type within an impl block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub ty: Type,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// A macro invocation within an impl block.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub abi: Option<Abi>,
+ pub fn_token: Token![fn],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub paren_token: token::Paren,
+ pub inputs: Punctuated<FnArg, Token![,]>,
+ pub variadic: Option<Variadic>,
+ pub output: ReturnType,
+ }
+}
+
+impl Signature {
+ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
+ pub fn receiver(&self) -> Option<&Receiver> {
+ let arg = self.inputs.first()?;
+ match arg {
+ FnArg::Receiver(receiver) => Some(receiver),
+ FnArg::Typed(_) => None,
+ }
+ }
+}
+
+ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n:
usize)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub enum FnArg {
+ /// The `self` argument of an associated method.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+ Typed(PatType),
+ }
+}
+
+ast_struct! {
+ /// The `self` argument of an associated method.
+ ///
+ /// If `colon_token` is present, the receiver is written with an explicit
+ /// type such as `self: Box<Self>`. If `colon_token` is absent, the
receiver
+ /// is written in shorthand such as `self` or `&self` or `&mut self`. In
the
+ /// shorthand case, the type in `ty` is reconstructed as one of `Self`,
+ /// `&Self`, or `&mut Self`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+ pub mutability: Option<Token![mut]>,
+ pub self_token: Token![self],
+ pub colon_token: Option<Token![:]>,
+ pub ty: Box<Type>,
+ }
+}
+
+impl Receiver {
+ pub fn lifetime(&self) -> Option<&Lifetime> {
+ self.reference.as_ref()?.1.as_ref()
+ }
+}
+
+ast_struct! {
+ /// The variadic argument of a foreign function.
+ ///
+ /// ```rust
+ /// # struct c_char;
+ /// # struct c_int;
+ /// #
+ /// extern "C" {
+ /// fn printf(format: *const c_char, ...) -> c_int;
+ /// // ^^^
+ /// }
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+ pub pat: Option<(Box<Pat>, Token![:])>,
+ pub dots: Token![...],
+ pub comma: Option<Token![,]>,
+ }
+}
+
+ast_enum! {
+ /// The mutability of an `Item::Static` or `ForeignItem::Static`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum StaticMutability {
+ Mut(Token![mut]),
+ None,
+ }
+}
+
+ast_enum! {
+ /// Unused, but reserved for RFC 3323 restrictions.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum ImplRestriction {}
+
+
+ // TODO: https://rust-lang.github.io/rfcs/3323-restrictions.html
+ //
+ // pub struct ImplRestriction {
+ // pub impl_token: Token![impl],
+ // pub paren_token: token::Paren,
+ // pub in_token: Option<Token![in]>,
+ // pub path: Box<Path>,
+ // }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::{self, Attribute};
+ use crate::derive;
+ use crate::error::{Error, Result};
+ use crate::expr::Expr;
+ use crate::ext::IdentExt as _;
+ use crate::generics::{Generics, TypeParamBound};
+ use crate::ident::Ident;
+ use crate::item::{
+ FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro,
ForeignItemStatic, ForeignItemType,
+ ImplItem, ImplItemConst, ImplItemFn, ImplItemMacro, ImplItemType,
Item, ItemConst,
+ ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl,
ItemMacro, ItemMod,
+ ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType,
ItemUnion, ItemUse, Receiver,
+ Signature, StaticMutability, TraitItem, TraitItemConst, TraitItemFn,
TraitItemMacro,
+ TraitItemType, UseGlob, UseGroup, UseName, UsePath, UseRename,
UseTree, Variadic,
+ };
+ use crate::lifetime::Lifetime;
+ use crate::lit::LitStr;
+ use crate::mac::{self, Macro};
+ use crate::parse::discouraged::Speculative as _;
+ use crate::parse::{Parse, ParseBuffer, ParseStream};
+ use crate::pat::{Pat, PatType, PatWild};
+ use crate::path::Path;
+ use crate::punctuated::Punctuated;
+ use crate::restriction::Visibility;
+ use crate::stmt::Block;
+ use crate::token;
+ use crate::ty::{Abi, ReturnType, Type, TypePath, TypeReference};
+ use crate::verbatim;
+ use proc_macro2::TokenStream;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let begin = input.fork();
+ let attrs = input.call(Attribute::parse_outer)?;
+ parse_rest_of_item(begin, attrs, input)
+ }
+ }
+
+ pub(crate) fn parse_rest_of_item(
+ begin: ParseBuffer,
+ mut attrs: Vec<Attribute>,
+ input: ParseStream,
+ ) -> Result<Item> {
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead)
{
+ let vis: Visibility = input.parse()?;
+ let sig: Signature = input.parse()?;
+ if input.peek(Token![;]) {
+ input.parse::<Token![;]>()?;
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
+ }
+ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+ ahead.parse::<LitStr>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else {
+ Err(lookahead.error())
+ }
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![use]) {
+ let allow_crate_root_in_path = true;
+ match parse_item_use(input, allow_crate_root_in_path)? {
+ Some(item_use) => Ok(Item::Use(item_use)),
+ None => Ok(Item::Verbatim(verbatim::between(&begin, input))),
+ }
+ } else if lookahead.peek(Token![static]) {
+ let vis = input.parse()?;
+ let static_token = input.parse()?;
+ let mutability = input.parse()?;
+ let ident = input.parse()?;
+ if input.peek(Token![=]) {
+ input.parse::<Token![=]>()?;
+ input.parse::<Expr>()?;
+ input.parse::<Token![;]>()?;
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ let colon_token = input.parse()?;
+ let ty = input.parse()?;
+ if input.peek(Token![;]) {
+ input.parse::<Token![;]>()?;
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(Item::Static(ItemStatic {
+ attrs: Vec::new(),
+ vis,
+ static_token,
+ mutability,
+ ident,
+ colon_token,
+ ty,
+ eq_token: input.parse()?,
+ expr: input.parse()?,
+ semi_token: input.parse()?,
+ }))
+ }
+ }
+ } else if lookahead.peek(Token![const]) {
+ let vis = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+ let mut generics: Generics = input.parse()?;
+ let colon_token = input.parse()?;
+ let ty = input.parse()?;
+ let value = if let Some(eq_token) =
input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
+ } else {
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ match value {
+ Some((eq_token, expr))
+ if generics.lt_token.is_none() &&
generics.where_clause.is_none() =>
+ {
+ Ok(Item::Const(ItemConst {
+ attrs: Vec::new(),
+ vis,
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ eq_token,
+ expr: Box::new(expr),
+ semi_token,
+ }))
+ }
+ _ => Ok(Item::Verbatim(verbatim::between(&begin, input))),
+ }
+ } else if lookahead.peek(Token![unsafe]) {
+ ahead.parse::<Token![unsafe]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![trait])
+ || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+ let allow_verbatim_impl = true;
+ if let Some(item) = parse_impl(input, allow_verbatim_impl)? {
+ Ok(Item::Impl(item))
+ } else {
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ }
+ } else if lookahead.peek(Token![extern]) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+ parse_item_type(begin, input)
+ } else if lookahead.peek(Token![struct]) {
+ input.parse().map(Item::Struct)
+ } else if lookahead.peek(Token![enum]) {
+ input.parse().map(Item::Enum)
+ } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
+ input.parse().map(Item::Union)
+ } else if lookahead.peek(Token![trait]) {
+ input.call(parse_trait_or_trait_alias)
+ } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+ let allow_verbatim_impl = true;
+ if let Some(item) = parse_impl(input, allow_verbatim_impl)? {
+ Ok(Item::Impl(item))
+ } else {
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ }
+ } else if lookahead.peek(Token![macro]) {
+ input.advance_to(&ahead);
+ parse_macro2(begin, vis, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(Item::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+ attrs.extend(item.replace_attrs(Vec::new()));
+ item.replace_attrs(attrs);
+ Ok(item)
+ }
+
+ struct FlexibleItemType {
+ vis: Visibility,
+ defaultness: Option<Token![default]>,
+ type_token: Token![type],
+ ident: Ident,
+ generics: Generics,
+ colon_token: Option<Token![:]>,
+ bounds: Punctuated<TypeParamBound, Token![+]>,
+ ty: Option<(Token![=], Type)>,
+ semi_token: Token![;],
+ }
+
+ enum TypeDefaultness {
+ Optional,
+ Disallowed,
+ }
+
+ enum WhereClauseLocation {
+ // type Ty<T> where T: 'static = T;
+ BeforeEq,
+ // type Ty<T> = T where T: 'static;
+ AfterEq,
+ // TODO: goes away once the migration period on rust-lang/rust#89122
is over
+ Both,
+ }
+
+ impl FlexibleItemType {
+ fn parse(
+ input: ParseStream,
+ allow_defaultness: TypeDefaultness,
+ where_clause_location: WhereClauseLocation,
+ ) -> Result<Self> {
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = match allow_defaultness
{
+ TypeDefaultness::Optional => input.parse()?,
+ TypeDefaultness::Disallowed => None,
+ };
+ let type_token: Token![type] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let mut generics: Generics = input.parse()?;
+ let (colon_token, bounds) = Self::parse_optional_bounds(input)?;
+
+ match where_clause_location {
+ WhereClauseLocation::BeforeEq | WhereClauseLocation::Both => {
+ generics.where_clause = input.parse()?;
+ }
+ WhereClauseLocation::AfterEq => {}
+ }
+
+ let ty = Self::parse_optional_definition(input)?;
+
+ match where_clause_location {
+ WhereClauseLocation::AfterEq | WhereClauseLocation::Both
+ if generics.where_clause.is_none() =>
+ {
+ generics.where_clause = input.parse()?;
+ }
+ _ => {}
+ }
+
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(FlexibleItemType {
+ vis,
+ defaultness,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds,
+ ty,
+ semi_token,
+ })
+ }
+
+ fn parse_optional_bounds(
+ input: ParseStream,
+ ) -> Result<(Option<Token![:]>, Punctuated<TypeParamBound,
Token![+]>)> {
+ let colon_token: Option<Token![:]> = input.parse()?;
+
+ let mut bounds = Punctuated::new();
+ if colon_token.is_some() {
+ loop {
+ if input.peek(Token![where]) || input.peek(Token![=]) ||
input.peek(Token![;]) {
+ break;
+ }
+ bounds.push_value(input.parse::<TypeParamBound>()?);
+ if input.peek(Token![where]) || input.peek(Token![=]) ||
input.peek(Token![;]) {
+ break;
+ }
+ bounds.push_punct(input.parse::<Token![+]>()?);
+ }
+ }
+
+ Ok((colon_token, bounds))
+ }
+
+ fn parse_optional_definition(input: ParseStream) ->
Result<Option<(Token![=], Type)>> {
+ let eq_token: Option<Token![=]> = input.parse()?;
+ if let Some(eq_token) = eq_token {
+ let definition: Type = input.parse()?;
+ Ok(Some((eq_token, definition)))
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let path = input.call(Path::parse_mod_style)?;
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = if input.peek(Token![try]) {
+ input.call(Ident::parse_any).map(Some)
+ } else {
+ input.parse()
+ }?;
+ let (delimiter, tokens) = input.call(mac::parse_delimiter)?;
+ let semi_token: Option<Token![;]> = if !delimiter.is_brace() {
+ Some(input.parse()?)
+ } else {
+ None
+ };
+ Ok(ItemMacro {
+ attrs,
+ ident,
+ mac: Macro {
+ path,
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ semi_token,
+ })
+ }
+ }
+
+ fn parse_macro2(begin: ParseBuffer, _vis: Visibility, input: ParseStream)
-> Result<Item> {
+ input.parse::<Token![macro]>()?;
+ input.parse::<Ident>()?;
+
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(token::Paren) {
+ let paren_content;
+ parenthesized!(paren_content in input);
+ paren_content.parse::<TokenStream>()?;
+ lookahead = input.lookahead1();
+ }
+
+ if lookahead.peek(token::Brace) {
+ let brace_content;
+ braced!(brace_content in input);
+ brace_content.parse::<TokenStream>()?;
+ } else {
+ return Err(lookahead.error());
+ }
+
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemExternCrate {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ItemExternCrate {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ extern_token: input.parse()?,
+ crate_token: input.parse()?,
+ ident: {
+ if input.peek(Token![self]) {
+ input.call(Ident::parse_any)?
+ } else {
+ input.parse()?
+ }
+ },
+ rename: {
+ if input.peek(Token![as]) {
+ let as_token: Token![as] = input.parse()?;
+ let rename: Ident = if input.peek(Token![_]) {
+ Ident::from(input.parse::<Token![_]>()?)
+ } else {
+ input.parse()?
+ };
+ Some((as_token, rename))
+ } else {
+ None
+ }
+ },
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemUse {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_crate_root_in_path = false;
+ parse_item_use(input, allow_crate_root_in_path).map(Option::unwrap)
+ }
+ }
+
+ fn parse_item_use(
+ input: ParseStream,
+ allow_crate_root_in_path: bool,
+ ) -> Result<Option<ItemUse>> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let use_token: Token![use] = input.parse()?;
+ let leading_colon: Option<Token![::]> = input.parse()?;
+ let tree = parse_use_tree(input, allow_crate_root_in_path &&
leading_colon.is_none())?;
+ let semi_token: Token![;] = input.parse()?;
+
+ let tree = match tree {
+ Some(tree) => tree,
+ None => return Ok(None),
+ };
+
+ Ok(Some(ItemUse {
+ attrs,
+ vis,
+ use_token,
+ leading_colon,
+ tree,
+ semi_token,
+ }))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for UseTree {
+ fn parse(input: ParseStream) -> Result<UseTree> {
+ let allow_crate_root_in_path = false;
+ parse_use_tree(input, allow_crate_root_in_path).map(Option::unwrap)
+ }
+ }
+
+ fn parse_use_tree(
+ input: ParseStream,
+ allow_crate_root_in_path: bool,
+ ) -> Result<Option<UseTree>> {
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![try])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+ Ok(Some(UseTree::Path(UsePath {
+ ident,
+ colon2_token: input.parse()?,
+ tree: Box::new(input.parse()?),
+ })))
+ } else if input.peek(Token![as]) {
+ Ok(Some(UseTree::Rename(UseRename {
+ ident,
+ as_token: input.parse()?,
+ rename: {
+ if input.peek(Ident) {
+ input.parse()?
+ } else if input.peek(Token![_]) {
+ Ident::from(input.parse::<Token![_]>()?)
+ } else {
+ return Err(input.error("expected identifier or
underscore"));
+ }
+ },
+ })))
+ } else {
+ Ok(Some(UseTree::Name(UseName { ident })))
+ }
+ } else if lookahead.peek(Token![*]) {
+ Ok(Some(UseTree::Glob(UseGlob {
+ star_token: input.parse()?,
+ })))
+ } else if lookahead.peek(token::Brace) {
+ let content;
+ let brace_token = braced!(content in input);
+ let mut items = Punctuated::new();
+ let mut has_any_crate_root_in_path = false;
+ loop {
+ if content.is_empty() {
+ break;
+ }
+ let this_tree_starts_with_crate_root =
+ allow_crate_root_in_path &&
content.parse::<Option<Token![::]>>()?.is_some();
+ has_any_crate_root_in_path |= this_tree_starts_with_crate_root;
+ match parse_use_tree(
+ &content,
+ allow_crate_root_in_path &&
!this_tree_starts_with_crate_root,
+ )? {
+ Some(tree) => items.push_value(tree),
+ None => has_any_crate_root_in_path = true,
+ }
+ if content.is_empty() {
+ break;
+ }
+ let comma: Token![,] = content.parse()?;
+ items.push_punct(comma);
+ }
+ if has_any_crate_root_in_path {
+ Ok(None)
+ } else {
+ Ok(Some(UseTree::Group(UseGroup { brace_token, items })))
+ }
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemStatic {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ItemStatic {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ static_token: input.parse()?,
+ mutability: input.parse()?,
+ ident: input.parse()?,
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+ expr: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let eq_token: Token![=] = input.parse()?;
+ let expr: Expr = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(ItemConst {
+ attrs,
+ vis,
+ const_token,
+ ident,
+ generics: Generics::default(),
+ colon_token,
+ ty: Box::new(ty),
+ eq_token,
+ expr: Box::new(expr),
+ semi_token,
+ })
+ }
+ }
+
+ fn peek_signature(input: ParseStream) -> bool {
+ let fork = input.fork();
+ fork.parse::<Option<Token![const]>>().is_ok()
+ && fork.parse::<Option<Token![async]>>().is_ok()
+ && fork.parse::<Option<Token![unsafe]>>().is_ok()
+ && fork.parse::<Option<Abi>>().is_ok()
+ && fork.peek(Token![fn])
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Signature {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let constness: Option<Token![const]> = input.parse()?;
+ let asyncness: Option<Token![async]> = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let abi: Option<Abi> = input.parse()?;
+ let fn_token: Token![fn] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let mut generics: Generics = input.parse()?;
+
+ let content;
+ let paren_token = parenthesized!(content in input);
+ let (inputs, variadic) = parse_fn_args(&content)?;
+
+ let output: ReturnType = input.parse()?;
+ generics.where_clause = input.parse()?;
+
+ Ok(Signature {
+ constness,
+ asyncness,
+ unsafety,
+ abi,
+ fn_token,
+ ident,
+ generics,
+ paren_token,
+ inputs,
+ variadic,
+ output,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let sig: Signature = input.parse()?;
+ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
+ fn parse_rest_of_fn(
+ input: ParseStream,
+ mut attrs: Vec<Attribute>,
+ vis: Visibility,
+ sig: Signature,
+ ) -> Result<ItemFn> {
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ItemFn {
+ attrs,
+ vis,
+ sig,
+ block: Box::new(Block { brace_token, stmts }),
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_variadic = false;
+ let attrs = input.call(Attribute::parse_outer)?;
+ match parse_fn_arg_or_variadic(input, attrs, allow_variadic)? {
+ FnArgOrVariadic::FnArg(arg) => Ok(arg),
+ FnArgOrVariadic::Variadic(_) => unreachable!(),
+ }
+ }
+ }
+
+ enum FnArgOrVariadic {
+ FnArg(FnArg),
+ Variadic(Variadic),
+ }
+
+ fn parse_fn_arg_or_variadic(
+ input: ParseStream,
+ attrs: Vec<Attribute>,
+ allow_variadic: bool,
+ ) -> Result<FnArgOrVariadic> {
+ let ahead = input.fork();
+ if let Ok(mut receiver) = ahead.parse::<Receiver>() {
+ input.advance_to(&ahead);
+ receiver.attrs = attrs;
+ return Ok(FnArgOrVariadic::FnArg(FnArg::Receiver(receiver)));
+ }
+
+ // Hack to parse pre-2018 syntax in
+ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
+ // because the rest of the test case is valuable.
+ if input.peek(Ident) && input.peek2(Token![<]) {
+ let span = input.fork().parse::<Ident>()?.span();
+ return Ok(FnArgOrVariadic::FnArg(FnArg::Typed(PatType {
+ attrs,
+ pat: Box::new(Pat::Wild(PatWild {
+ attrs: Vec::new(),
+ underscore_token: Token![_](span),
+ })),
+ colon_token: Token![:](span),
+ ty: input.parse()?,
+ })));
+ }
+
+ let pat = Box::new(Pat::parse_single(input)?);
+ let colon_token: Token![:] = input.parse()?;
+
+ if allow_variadic {
+ if let Some(dots) = input.parse::<Option<Token![...]>>()? {
+ return Ok(FnArgOrVariadic::Variadic(Variadic {
+ attrs,
+ pat: Some((pat, colon_token)),
+ dots,
+ comma: None,
+ }));
+ }
+ }
+
+ Ok(FnArgOrVariadic::FnArg(FnArg::Typed(PatType {
+ attrs,
+ pat,
+ colon_token,
+ ty: input.parse()?,
+ })))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Receiver {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let reference = if input.peek(Token![&]) {
+ let ampersand: Token![&] = input.parse()?;
+ let lifetime: Option<Lifetime> = input.parse()?;
+ Some((ampersand, lifetime))
+ } else {
+ None
+ };
+ let mutability: Option<Token![mut]> = input.parse()?;
+ let self_token: Token![self] = input.parse()?;
+ let colon_token: Option<Token![:]> = if reference.is_some() {
+ None
+ } else {
+ input.parse()?
+ };
+ let ty: Type = if colon_token.is_some() {
+ input.parse()?
+ } else {
+ let mut ty = Type::Path(TypePath {
+ qself: None,
+ path: Path::from(Ident::new("Self", self_token.span)),
+ });
+ if let Some((ampersand, lifetime)) = reference.as_ref() {
+ ty = Type::Reference(TypeReference {
+ and_token: Token![&](ampersand.span),
+ lifetime: lifetime.clone(),
+ mutability: mutability.as_ref().map(|m|
Token![mut](m.span)),
+ elem: Box::new(ty),
+ });
+ }
+ ty
+ };
+ Ok(Receiver {
+ attrs: Vec::new(),
+ reference,
+ mutability,
+ self_token,
+ colon_token,
+ ty: Box::new(ty),
+ })
+ }
+ }
+
+ fn parse_fn_args(
+ input: ParseStream,
+ ) -> Result<(Punctuated<FnArg, Token![,]>, Option<Variadic>)> {
+ let mut args = Punctuated::new();
+ let mut variadic = None;
+ let mut has_receiver = false;
+
+ while !input.is_empty() {
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ if let Some(dots) = input.parse::<Option<Token![...]>>()? {
+ variadic = Some(Variadic {
+ attrs,
+ pat: None,
+ dots,
+ comma: if input.is_empty() {
+ None
+ } else {
+ Some(input.parse()?)
+ },
+ });
+ break;
+ }
+
+ let allow_variadic = true;
+ let arg = match parse_fn_arg_or_variadic(input, attrs,
allow_variadic)? {
+ FnArgOrVariadic::FnArg(arg) => arg,
+ FnArgOrVariadic::Variadic(arg) => {
+ variadic = Some(Variadic {
+ comma: if input.is_empty() {
+ None
+ } else {
+ Some(input.parse()?)
+ },
+ ..arg
+ });
+ break;
+ }
+ };
+
+ match &arg {
+ FnArg::Receiver(receiver) if has_receiver => {
+ return Err(Error::new(
+ receiver.self_token.span,
+ "unexpected second method receiver",
+ ));
+ }
+ FnArg::Receiver(receiver) if !args.is_empty() => {
+ return Err(Error::new(
+ receiver.self_token.span,
+ "unexpected method receiver",
+ ));
+ }
+ FnArg::Receiver(_) => has_receiver = true,
+ FnArg::Typed(_) => {}
+ }
+ args.push_value(arg);
+
+ if input.is_empty() {
+ break;
+ }
+
+ let comma: Token![,] = input.parse()?;
+ args.push_punct(comma);
+ }
+
+ Ok((args, variadic))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemMod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let mod_token: Token![mod] = input.parse()?;
+ let ident: Ident = if input.peek(Token![try]) {
+ input.call(Ident::parse_any)
+ } else {
+ input.parse()
+ }?;
+
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Token![;]) {
+ Ok(ItemMod {
+ attrs,
+ vis,
+ unsafety,
+ mod_token,
+ ident,
+ content: None,
+ semi: Some(input.parse()?),
+ })
+ } else if lookahead.peek(token::Brace) {
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemMod {
+ attrs,
+ vis,
+ unsafety,
+ mod_token,
+ ident,
+ content: Some((brace_token, items)),
+ semi: None,
+ })
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemForeignMod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let abi: Abi = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemForeignMod {
+ attrs,
+ unsafety,
+ abi,
+ brace_token,
+ items,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+ let mut item = if lookahead.peek(Token![fn]) ||
peek_signature(&ahead) {
+ let vis: Visibility = input.parse()?;
+ let sig: Signature = input.parse()?;
+ if input.peek(token::Brace) {
+ let content;
+ braced!(content in input);
+ content.call(Attribute::parse_inner)?;
+ content.call(Block::parse_within)?;
+
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(ForeignItem::Fn(ForeignItemFn {
+ attrs: Vec::new(),
+ vis,
+ sig,
+ semi_token: input.parse()?,
+ }))
+ }
+ } else if lookahead.peek(Token![static]) {
+ let vis = input.parse()?;
+ let static_token = input.parse()?;
+ let mutability = input.parse()?;
+ let ident = input.parse()?;
+ let colon_token = input.parse()?;
+ let ty = input.parse()?;
+ if input.peek(Token![=]) {
+ input.parse::<Token![=]>()?;
+ input.parse::<Expr>()?;
+ input.parse::<Token![;]>()?;
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(ForeignItem::Static(ForeignItemStatic {
+ attrs: Vec::new(),
+ vis,
+ static_token,
+ mutability,
+ ident,
+ colon_token,
+ ty,
+ semi_token: input.parse()?,
+ }))
+ }
+ } else if lookahead.peek(Token![type]) {
+ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(ForeignItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+ let item_attrs = match &mut item {
+ ForeignItem::Fn(item) => &mut item.attrs,
+ ForeignItem::Static(item) => &mut item.attrs,
+ ForeignItem::Type(item) => &mut item.attrs,
+ ForeignItem::Macro(item) => &mut item.attrs,
+ ForeignItem::Verbatim(_) => return Ok(item),
+ };
+ attrs.append(item_attrs);
+ *item_attrs = attrs;
+
+ Ok(item)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ForeignItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let sig: Signature = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+ sig,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ForeignItemStatic {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ForeignItemStatic {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ static_token: input.parse()?,
+ mutability: input.parse()?,
+ ident: input.parse()?,
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ForeignItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ForeignItemType {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ type_token: input.parse()?,
+ ident: input.parse()?,
+ generics: {
+ let mut generics: Generics = input.parse()?;
+ generics.where_clause = input.parse()?;
+ generics
+ },
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) ->
Result<ForeignItem> {
+ let FlexibleItemType {
+ vis,
+ defaultness: _,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds: _,
+ ty,
+ semi_token,
+ } = FlexibleItemType::parse(
+ input,
+ TypeDefaultness::Disallowed,
+ WhereClauseLocation::Both,
+ )?;
+
+ if colon_token.is_some() || ty.is_some() {
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(ForeignItem::Type(ForeignItemType {
+ attrs: Vec::new(),
+ vis,
+ type_token,
+ ident,
+ generics,
+ semi_token,
+ }))
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+ };
+ Ok(ForeignItemMacro {
+ attrs,
+ mac,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ItemType {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ type_token: input.parse()?,
+ ident: input.parse()?,
+ generics: {
+ let mut generics: Generics = input.parse()?;
+ generics.where_clause = input.parse()?;
+ generics
+ },
+ eq_token: input.parse()?,
+ ty: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item>
{
+ let FlexibleItemType {
+ vis,
+ defaultness: _,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds: _,
+ ty,
+ semi_token,
+ } = FlexibleItemType::parse(
+ input,
+ TypeDefaultness::Disallowed,
+ WhereClauseLocation::BeforeEq,
+ )?;
+
+ let (eq_token, ty) = match ty {
+ Some(ty) if colon_token.is_none() => ty,
+ _ => return Ok(Item::Verbatim(verbatim::between(&begin, input))),
+ };
+
+ Ok(Item::Type(ItemType {
+ attrs: Vec::new(),
+ vis,
+ type_token,
+ ident,
+ generics,
+ eq_token,
+ ty: Box::new(ty),
+ semi_token,
+ }))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemStruct {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis = input.parse::<Visibility>()?;
+ let struct_token = input.parse::<Token![struct]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, fields, semi_token) =
derive::parsing::data_struct(input)?;
+ Ok(ItemStruct {
+ attrs,
+ vis,
+ struct_token,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ fields,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemEnum {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis = input.parse::<Visibility>()?;
+ let enum_token = input.parse::<Token![enum]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, brace_token, variants) =
derive::parsing::data_enum(input)?;
+ Ok(ItemEnum {
+ attrs,
+ vis,
+ enum_token,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ brace_token,
+ variants,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemUnion {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis = input.parse::<Visibility>()?;
+ let union_token = input.parse::<Token![union]>()?;
+ let ident = input.parse::<Ident>()?;
+ let generics = input.parse::<Generics>()?;
+ let (where_clause, fields) = derive::parsing::data_union(input)?;
+ Ok(ItemUnion {
+ attrs,
+ vis,
+ union_token,
+ ident,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ fields,
+ })
+ }
+ }
+
+ fn parse_trait_or_trait_alias(input: ParseStream) -> Result<Item> {
+ let (attrs, vis, trait_token, ident, generics) =
parse_start_of_trait_alias(input)?;
+ let lookahead = input.lookahead1();
+ if lookahead.peek(token::Brace)
+ || lookahead.peek(Token![:])
+ || lookahead.peek(Token![where])
+ {
+ let unsafety = None;
+ let auto_token = None;
+ parse_rest_of_trait(
+ input,
+ attrs,
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ )
+ .map(Item::Trait)
+ } else if lookahead.peek(Token![=]) {
+ parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident,
generics)
+ .map(Item::TraitAlias)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+ let trait_token: Token![trait] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ )
+ }
+ }
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+ mut attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+ trait_token: Token![trait],
+ ident: Ident,
+ mut generics: Generics,
+ ) -> Result<ItemTrait> {
+ let colon_token: Option<Token![:]> = input.parse()?;
+
+ let mut supertraits = Punctuated::new();
+ if colon_token.is_some() {
+ loop {
+ if input.peek(Token![where]) || input.peek(token::Brace) {
+ break;
+ }
+ supertraits.push_value(input.parse()?);
+ if input.peek(Token![where]) || input.peek(token::Brace) {
+ break;
+ }
+ supertraits.push_punct(input.parse()?);
+ }
+ }
+
+ generics.where_clause = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+ attrs,
+ vis,
+ unsafety,
+ auto_token,
+ restriction: None,
+ trait_token,
+ ident,
+ generics,
+ colon_token,
+ supertraits,
+ brace_token,
+ items,
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemTraitAlias {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let (attrs, vis, trait_token, ident, generics) =
parse_start_of_trait_alias(input)?;
+ parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident,
generics)
+ }
+ }
+
+ fn parse_start_of_trait_alias(
+ input: ParseStream,
+ ) -> Result<(Vec<Attribute>, Visibility, Token![trait], Ident, Generics)> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let trait_token: Token![trait] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let generics: Generics = input.parse()?;
+ Ok((attrs, vis, trait_token, ident, generics))
+ }
+
+ fn parse_rest_of_trait_alias(
+ input: ParseStream,
+ attrs: Vec<Attribute>,
+ vis: Visibility,
+ trait_token: Token![trait],
+ ident: Ident,
+ mut generics: Generics,
+ ) -> Result<ItemTraitAlias> {
+ let eq_token: Token![=] = input.parse()?;
+
+ let mut bounds = Punctuated::new();
+ loop {
+ if input.peek(Token![where]) || input.peek(Token![;]) {
+ break;
+ }
+ bounds.push_value(input.parse()?);
+ if input.peek(Token![where]) || input.peek(Token![;]) {
+ break;
+ }
+ bounds.push_punct(input.parse()?);
+ }
+
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(ItemTraitAlias {
+ attrs,
+ vis,
+ trait_token,
+ ident,
+ generics,
+ eq_token,
+ bounds,
+ semi_token,
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+ let mut item = if lookahead.peek(Token![fn]) ||
peek_signature(&ahead) {
+ input.parse().map(TraitItem::Fn)
+ } else if lookahead.peek(Token![const]) {
+ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.advance_to(&ahead);
+ let ident = input.call(Ident::parse_any)?;
+ let mut generics: Generics = input.parse()?;
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let default = if let Some(eq_token) =
input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
+ } else {
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ if generics.lt_token.is_none() &&
generics.where_clause.is_none() {
+ Ok(TraitItem::Const(TraitItemConst {
+ attrs: Vec::new(),
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ default,
+ semi_token,
+ }))
+ } else {
+ return
Ok(TraitItem::Verbatim(verbatim::between(&begin, input)));
+ }
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+ || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![fn])
+ {
+ input.parse().map(TraitItem::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![type]) {
+ parse_trait_item_type(begin.fork(), input)
+ } else if vis.is_inherited()
+ && defaultness.is_none()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(TraitItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+ match (vis, defaultness) {
+ (Visibility::Inherited, None) => {}
+ _ => return Ok(TraitItem::Verbatim(verbatim::between(&begin,
input))),
+ }
+
+ let item_attrs = match &mut item {
+ TraitItem::Const(item) => &mut item.attrs,
+ TraitItem::Fn(item) => &mut item.attrs,
+ TraitItem::Type(item) => &mut item.attrs,
+ TraitItem::Macro(item) => &mut item.attrs,
+ TraitItem::Verbatim(_) => unreachable!(),
+ };
+ attrs.append(item_attrs);
+ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitItemConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let default = if input.peek(Token![=]) {
+ let eq_token: Token![=] = input.parse()?;
+ let default: Expr = input.parse()?;
+ Some((eq_token, default))
+ } else {
+ None
+ };
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(TraitItemConst {
+ attrs,
+ const_token,
+ ident,
+ generics: Generics::default(),
+ colon_token,
+ ty,
+ default,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let sig: Signature = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, stmts, semi_token) = if
lookahead.peek(token::Brace) {
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+ let stmts = content.call(Block::parse_within)?;
+ (Some(brace_token), stmts, None)
+ } else if lookahead.peek(Token![;]) {
+ let semi_token: Token![;] = input.parse()?;
+ (None, Vec::new(), Some(semi_token))
+ } else {
+ return Err(lookahead.error());
+ };
+
+ Ok(TraitItemFn {
+ attrs,
+ sig,
+ default: brace_token.map(|brace_token| Block { brace_token,
stmts }),
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let type_token: Token![type] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let mut generics: Generics = input.parse()?;
+ let (colon_token, bounds) =
FlexibleItemType::parse_optional_bounds(input)?;
+ let default = FlexibleItemType::parse_optional_definition(input)?;
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ Ok(TraitItemType {
+ attrs,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds,
+ default,
+ semi_token,
+ })
+ }
+ }
+
+ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) ->
Result<TraitItem> {
+ let FlexibleItemType {
+ vis,
+ defaultness: _,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds,
+ ty,
+ semi_token,
+ } = FlexibleItemType::parse(
+ input,
+ TypeDefaultness::Disallowed,
+ WhereClauseLocation::AfterEq,
+ )?;
+
+ if vis.is_some() {
+ Ok(TraitItem::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ Ok(TraitItem::Type(TraitItemType {
+ attrs: Vec::new(),
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds,
+ default: ty,
+ semi_token,
+ }))
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+ };
+ Ok(TraitItemMacro {
+ attrs,
+ mac,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_verbatim_impl = false;
+ parse_impl(input, allow_verbatim_impl).map(Option::unwrap)
+ }
+ }
+
+ fn parse_impl(input: ParseStream, allow_verbatim_impl: bool) ->
Result<Option<ItemImpl>> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let has_visibility = allow_verbatim_impl &&
input.parse::<Visibility>()?.is_some();
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let impl_token: Token![impl] = input.parse()?;
+
+ let has_generics = input.peek(Token![<])
+ && (input.peek2(Token![>])
+ || input.peek2(Token![#])
+ || (input.peek2(Ident) || input.peek2(Lifetime))
+ && (input.peek3(Token![:])
+ || input.peek3(Token![,])
+ || input.peek3(Token![>])
+ || input.peek3(Token![=]))
+ || input.peek2(Token![const]));
+ let mut generics: Generics = if has_generics {
+ input.parse()?
+ } else {
+ Generics::default()
+ };
+
+ let is_const_impl = allow_verbatim_impl
+ && (input.peek(Token![const]) || input.peek(Token![?]) &&
input.peek2(Token![const]));
+ if is_const_impl {
+ input.parse::<Option<Token![?]>>()?;
+ input.parse::<Token![const]>()?;
+ }
+
+ let begin = input.fork();
+ let polarity = if input.peek(Token![!]) && !input.peek2(token::Brace) {
+ Some(input.parse::<Token![!]>()?)
+ } else {
+ None
+ };
+
+ #[cfg(not(feature = "printing"))]
+ let first_ty_span = input.span();
+ let mut first_ty: Type = input.parse()?;
+ let self_ty: Type;
+ let trait_;
+
+ let is_impl_for = input.peek(Token![for]);
+ if is_impl_for {
+ let for_token: Token![for] = input.parse()?;
+ let mut first_ty_ref = &first_ty;
+ while let Type::Group(ty) = first_ty_ref {
+ first_ty_ref = &ty.elem;
+ }
+ if let Type::Path(TypePath { qself: None, .. }) = first_ty_ref {
+ while let Type::Group(ty) = first_ty {
+ first_ty = *ty.elem;
+ }
+ if let Type::Path(TypePath { qself: None, path }) = first_ty {
+ trait_ = Some((polarity, path, for_token));
+ } else {
+ unreachable!();
+ }
+ } else if !allow_verbatim_impl {
+ #[cfg(feature = "printing")]
+ return Err(Error::new_spanned(first_ty_ref, "expected trait
path"));
+ #[cfg(not(feature = "printing"))]
+ return Err(Error::new(first_ty_span, "expected trait path"));
+ } else {
+ trait_ = None;
+ }
+ self_ty = input.parse()?;
+ } else {
+ trait_ = None;
+ self_ty = if polarity.is_none() {
+ first_ty
+ } else {
+ Type::Verbatim(verbatim::between(&begin, input))
+ };
+ }
+
+ generics.where_clause = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ attr::parsing::parse_inner(&content, &mut attrs)?;
+
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ if has_visibility || is_const_impl || is_impl_for && trait_.is_none() {
+ Ok(None)
+ } else {
+ Ok(Some(ItemImpl {
+ attrs,
+ defaultness,
+ unsafety,
+ impl_token,
+ generics,
+ trait_,
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+ }))
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let mut lookahead = ahead.lookahead1();
+ let defaultness = if lookahead.peek(Token![default]) &&
!ahead.peek2(Token![!]) {
+ let defaultness: Token![default] = ahead.parse()?;
+ lookahead = ahead.lookahead1();
+ Some(defaultness)
+ } else {
+ None
+ };
+
+ let mut item = if lookahead.peek(Token![fn]) ||
peek_signature(&ahead) {
+ let allow_omitted_body = true;
+ if let Some(item) = parse_impl_item_fn(input,
allow_omitted_body)? {
+ Ok(ImplItem::Fn(item))
+ } else {
+ Ok(ImplItem::Verbatim(verbatim::between(&begin, input)))
+ }
+ } else if lookahead.peek(Token![const]) {
+ input.advance_to(&ahead);
+ let const_token: Token![const] = input.parse()?;
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) ||
lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+ let mut generics: Generics = input.parse()?;
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let value = if let Some(eq_token) =
input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
+ } else {
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ return match value {
+ Some((eq_token, expr))
+ if generics.lt_token.is_none() &&
generics.where_clause.is_none() =>
+ {
+ Ok(ImplItem::Const(ImplItemConst {
+ attrs,
+ vis,
+ defaultness,
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ eq_token,
+ expr,
+ semi_token,
+ }))
+ }
+ _ => Ok(ImplItem::Verbatim(verbatim::between(&begin,
input))),
+ };
+ } else if lookahead.peek(Token![type]) {
+ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited()
+ && defaultness.is_none()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(ImplItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+ {
+ let item_attrs = match &mut item {
+ ImplItem::Const(item) => &mut item.attrs,
+ ImplItem::Fn(item) => &mut item.attrs,
+ ImplItem::Type(item) => &mut item.attrs,
+ ImplItem::Macro(item) => &mut item.attrs,
+ ImplItem::Verbatim(_) => return Ok(item),
+ };
+ attrs.append(item_attrs);
+ *item_attrs = attrs;
+ }
+
+ Ok(item)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ImplItemConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let eq_token: Token![=] = input.parse()?;
+ let expr: Expr = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(ImplItemConst {
+ attrs,
+ vis,
+ defaultness,
+ const_token,
+ ident,
+ generics: Generics::default(),
+ colon_token,
+ ty,
+ eq_token,
+ expr,
+ semi_token,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ImplItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_omitted_body = false;
+ parse_impl_item_fn(input, allow_omitted_body).map(Option::unwrap)
+ }
+ }
+
+ fn parse_impl_item_fn(
+ input: ParseStream,
+ allow_omitted_body: bool,
+ ) -> Result<Option<ImplItemFn>> {
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let sig: Signature = input.parse()?;
+
+ // Accept functions without a body in an impl block because rustc's
+ // *parser* does not reject them (the compilation error is emitted
later
+ // than parsing) and it can be useful for macro DSLs.
+ if allow_omitted_body && input.parse::<Option<Token![;]>>()?.is_some()
{
+ return Ok(None);
+ }
+
+ let content;
+ let brace_token = braced!(content in input);
+ attrs.extend(content.call(Attribute::parse_inner)?);
+ let block = Block {
+ brace_token,
+ stmts: content.call(Block::parse_within)?,
+ };
+
+ Ok(Some(ImplItemFn {
+ attrs,
+ vis,
+ defaultness,
+ sig,
+ block,
+ }))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ImplItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let type_token: Token![type] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let mut generics: Generics = input.parse()?;
+ let eq_token: Token![=] = input.parse()?;
+ let ty: Type = input.parse()?;
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ Ok(ImplItemType {
+ attrs,
+ vis,
+ defaultness,
+ type_token,
+ ident,
+ generics,
+ eq_token,
+ ty,
+ semi_token,
+ })
+ }
+ }
+
+ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) ->
Result<ImplItem> {
+ let FlexibleItemType {
+ vis,
+ defaultness,
+ type_token,
+ ident,
+ generics,
+ colon_token,
+ bounds: _,
+ ty,
+ semi_token,
+ } = FlexibleItemType::parse(
+ input,
+ TypeDefaultness::Optional,
+ WhereClauseLocation::AfterEq,
+ )?;
+
+ let (eq_token, ty) = match ty {
+ Some(ty) if colon_token.is_none() => ty,
+ _ => return Ok(ImplItem::Verbatim(verbatim::between(&begin,
input))),
+ };
+
+ Ok(ImplItem::Type(ImplItemType {
+ attrs: Vec::new(),
+ vis,
+ defaultness,
+ type_token,
+ ident,
+ generics,
+ eq_token,
+ ty,
+ semi_token,
+ }))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+ };
+ Ok(ImplItemMacro {
+ attrs,
+ mac,
+ semi_token,
+ })
+ }
+ }
+
+ impl Visibility {
+ fn is_inherited(&self) -> bool {
+ match self {
+ Visibility::Inherited => true,
+ _ => false,
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for StaticMutability {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut_token: Option<Token![mut]> = input.parse()?;
+ Ok(mut_token.map_or(StaticMutability::None, StaticMutability::Mut))
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::FilterAttrs;
+ use crate::data::Fields;
+ use crate::item::{
+ ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType,
ImplItemConst,
+ ImplItemFn, ImplItemMacro, ImplItemType, ItemConst, ItemEnum,
ItemExternCrate, ItemFn,
+ ItemForeignMod, ItemImpl, ItemMacro, ItemMod, ItemStatic, ItemStruct,
ItemTrait,
+ ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver, Signature,
StaticMutability,
+ TraitItemConst, TraitItemFn, TraitItemMacro, TraitItemType, UseGlob,
UseGroup, UseName,
+ UsePath, UseRename, Variadic,
+ };
+ use crate::mac::MacroDelimiter;
+ use crate::print::TokensOrDefault;
+ use crate::ty::Type;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.extern_token.to_tokens(tokens);
+ self.crate_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ if let Some((as_token, rename)) = &self.rename {
+ as_token.to_tokens(tokens);
+ rename.to_tokens(tokens);
+ }
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemUse {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.use_token.to_tokens(tokens);
+ self.leading_colon.to_tokens(tokens);
+ self.tree.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemStatic {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.static_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.expr.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemConst {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.const_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.expr.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemFn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemMod {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.mod_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ if let Some((brace, items)) = &self.content {
+ brace.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(items);
+ });
+ } else {
+ TokensOrDefault(&self.semi).to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemForeignMod {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.unsafety.to_tokens(tokens);
+ self.abi.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.items);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.type_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemEnum {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.enum_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.variants.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.struct_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ match &self.fields {
+ Fields::Named(fields) => {
+ self.generics.where_clause.to_tokens(tokens);
+ fields.to_tokens(tokens);
+ }
+ Fields::Unnamed(fields) => {
+ fields.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ TokensOrDefault(&self.semi_token).to_tokens(tokens);
+ }
+ Fields::Unit => {
+ self.generics.where_clause.to_tokens(tokens);
+ TokensOrDefault(&self.semi_token).to_tokens(tokens);
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemUnion {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.union_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.fields.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemTrait {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.auto_token.to_tokens(tokens);
+ self.trait_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ if !self.supertraits.is_empty() {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.supertraits.to_tokens(tokens);
+ }
+ self.generics.where_clause.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.items);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemTraitAlias {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.trait_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemImpl {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.defaultness.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.impl_token.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ if let Some((polarity, path, for_token)) = &self.trait_ {
+ polarity.to_tokens(tokens);
+ path.to_tokens(tokens);
+ for_token.to_tokens(tokens);
+ }
+ self.self_ty.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.items);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ItemMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.mac.path.to_tokens(tokens);
+ self.mac.bang_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ match &self.mac.delimiter {
+ MacroDelimiter::Paren(paren) => {
+ paren.surround(tokens, |tokens|
self.mac.tokens.to_tokens(tokens));
+ }
+ MacroDelimiter::Brace(brace) => {
+ brace.surround(tokens, |tokens|
self.mac.tokens.to_tokens(tokens));
+ }
+ MacroDelimiter::Bracket(bracket) => {
+ bracket.surround(tokens, |tokens|
self.mac.tokens.to_tokens(tokens));
+ }
+ }
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UsePath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.colon2_token.to_tokens(tokens);
+ self.tree.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UseName {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UseRename {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.as_token.to_tokens(tokens);
+ self.rename.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UseGlob {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.star_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UseGroup {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.brace_token.surround(tokens, |tokens| {
+ self.items.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitItemConst {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.const_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ if let Some((eq_token, default)) = &self.default {
+ eq_token.to_tokens(tokens);
+ default.to_tokens(tokens);
+ }
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitItemFn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.sig.to_tokens(tokens);
+ match &self.default {
+ Some(block) => {
+ block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&block.stmts);
+ });
+ }
+ None => {
+ TokensOrDefault(&self.semi_token).to_tokens(tokens);
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitItemType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.type_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ if !self.bounds.is_empty() {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ if let Some((eq_token, default)) = &self.default {
+ eq_token.to_tokens(tokens);
+ default.to_tokens(tokens);
+ }
+ self.generics.where_clause.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TraitItemMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ImplItemConst {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.const_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.expr.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ImplItemFn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ImplItemType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.type_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ImplItemMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ForeignItemFn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ForeignItemStatic {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.static_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ForeignItemType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.type_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ForeignItemMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+ self.asyncness.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.abi.to_tokens(tokens);
+ self.fn_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+ self.inputs.to_tokens(tokens);
+ if let Some(variadic) = &self.variadic {
+ if !self.inputs.empty_or_trailing() {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ variadic.to_tokens(tokens);
+ }
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Receiver {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ if let Some((ampersand, lifetime)) = &self.reference {
+ ampersand.to_tokens(tokens);
+ lifetime.to_tokens(tokens);
+ }
+ self.mutability.to_tokens(tokens);
+ self.self_token.to_tokens(tokens);
+ if let Some(colon_token) = &self.colon_token {
+ colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ } else {
+ let consistent = match (&self.reference, &self.mutability,
&*self.ty) {
+ (Some(_), mutability, Type::Reference(ty)) => {
+ mutability.is_some() == ty.mutability.is_some()
+ && match &*ty.elem {
+ Type::Path(ty) => ty.qself.is_none() &&
ty.path.is_ident("Self"),
+ _ => false,
+ }
+ }
+ (None, _, Type::Path(ty)) => ty.qself.is_none() &&
ty.path.is_ident("Self"),
+ _ => false,
+ };
+ if !consistent {
+ <Token![:]>::default().to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Variadic {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ if let Some((pat, colon)) = &self.pat {
+ pat.to_tokens(tokens);
+ colon.to_tokens(tokens);
+ }
+ self.dots.to_tokens(tokens);
+ self.comma.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for StaticMutability {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ StaticMutability::None => {}
+ StaticMutability::Mut(mut_token) =>
mut_token.to_tokens(tokens),
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/lib.rs
b/rust/hw/char/pl011/vendor/syn/src/lib.rs
new file mode 100644
index 0000000000..a8372e8079
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/lib.rs
@@ -0,0 +1,1019 @@
+//!
[![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](crate)
+//!
+//! [github]:
https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]:
https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]:
https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+//! tree of Rust source code.
+//!
+//! Currently this library is geared toward use in Rust procedural macros, but
+//! contains some APIs that may be useful more generally.
+//!
+//! - **Data structures** — Syn provides a complete syntax tree that can
+//! represent any valid Rust source code. The syntax tree is rooted at
+//! [`syn::File`] which represents a full source file, but there are other
+//! entry points that may be useful to procedural macros including
+//! [`syn::Item`], [`syn::Expr`] and [`syn::Type`].
+//!
+//! - **Derives** — Of particular interest to derive macros is
+//! [`syn::DeriveInput`] which is any of the three legal input items to a
+//! derive macro. An example below shows using this type in a library that
can
+//! derive implementations of a user-defined trait.
+//!
+//! - **Parsing** — Parsing in Syn is built around [parser functions] with the
+//! signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined
+//! by Syn is individually parsable and may be used as a building block for
+//! custom syntaxes, or you may dream up your own brand new syntax without
+//! involving any of our syntax tree types.
+//!
+//! - **Location information** — Every token parsed by Syn is associated with a
+//! `Span` that tracks line and column information back to the source of that
+//! token. These spans allow a procedural macro to display detailed error
+//! messages pointing to all the right places in the user's code. There is an
+//! example of this below.
+//!
+//! - **Feature flags** — Functionality is aggressively feature gated so your
+//! procedural macros enable only what they need, and do not pay in compile
+//! time for all the rest.
+//!
+//! [`syn::File`]: File
+//! [`syn::Item`]: Item
+//! [`syn::Expr`]: Expr
+//! [`syn::Type`]: Type
+//! [`syn::DeriveInput`]: DeriveInput
+//! [parser functions]: mod@parse
+//!
+//! <br>
+//!
+//! # Example of a derive macro
+//!
+//! The canonical derive macro using Syn looks like this. We write an ordinary
+//! Rust function tagged with a `proc_macro_derive` attribute and the name of
+//! the trait we are deriving. Any time that derive appears in the user's code,
+//! the Rust compiler passes their data structure as tokens into our macro. We
+//! get to execute arbitrary Rust code to figure out what to do with those
+//! tokens, then hand some tokens back to the compiler to compile into the
+//! user's crate.
+//!
+//! [`TokenStream`]: proc_macro::TokenStream
+//!
+//! ```toml
+//! [dependencies]
+//! syn = "2.0"
+//! quote = "1.0"
+//!
+//! [lib]
+//! proc-macro = true
+//! ```
+//!
+//! ```
+//! # extern crate proc_macro;
+//! #
+//! use proc_macro::TokenStream;
+//! use quote::quote;
+//! use syn::{parse_macro_input, DeriveInput};
+//!
+//! # const IGNORE_TOKENS: &str = stringify! {
+//! #[proc_macro_derive(MyMacro)]
+//! # };
+//! pub fn my_macro(input: TokenStream) -> TokenStream {
+//! // Parse the input tokens into a syntax tree
+//! let input = parse_macro_input!(input as DeriveInput);
+//!
+//! // Build the output, possibly using quasi-quotation
+//! let expanded = quote! {
+//! // ...
+//! };
+//!
+//! // Hand the output tokens back to the compiler
+//! TokenStream::from(expanded)
+//! }
+//! ```
+//!
+//! The [`heapsize`] example directory shows a complete working implementation
+//! of a derive macro. The example derives a `HeapSize` trait which computes an
+//! estimate of the amount of heap memory owned by a value.
+//!
+//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
+//!
+//! ```
+//! pub trait HeapSize {
+//! /// Total number of bytes of heap memory owned by `self`.
+//! fn heap_size_of_children(&self) -> usize;
+//! }
+//! ```
+//!
+//! The derive macro allows users to write `#[derive(HeapSize)]` on data
+//! structures in their program.
+//!
+//! ```
+//! # const IGNORE_TOKENS: &str = stringify! {
+//! #[derive(HeapSize)]
+//! # };
+//! struct Demo<'a, T: ?Sized> {
+//! a: Box<T>,
+//! b: u8,
+//! c: &'a str,
+//! d: String,
+//! }
+//! ```
+//!
+//! <p><br></p>
+//!
+//! # Spans and error reporting
+//!
+//! The token-based procedural macro API provides great control over where the
+//! compiler's error messages are displayed in user code. Consider the error
the
+//! user sees if one of their field types does not implement `HeapSize`.
+//!
+//! ```
+//! # const IGNORE_TOKENS: &str = stringify! {
+//! #[derive(HeapSize)]
+//! # };
+//! struct Broken {
+//! ok: String,
+//! bad: std::thread::Thread,
+//! }
+//! ```
+//!
+//! By tracking span information all the way through the expansion of a
+//! procedural macro as shown in the `heapsize` example, token-based macros in
+//! Syn are able to trigger errors that directly pinpoint the source of the
+//! problem.
+//!
+//! ```text
+//! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not
satisfied
+//! --> src/main.rs:7:5
+//! |
+//! 7 | bad: std::thread::Thread,
+//! | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented
for `Thread`
+//! ```
+//!
+//! <br>
+//!
+//! # Parsing a custom syntax
+//!
+//! The [`lazy-static`] example directory shows the implementation of a
+//! `functionlike!(...)` procedural macro in which the input tokens are parsed
+//! using Syn's parsing API.
+//!
+//! [`lazy-static`]:
https://github.com/dtolnay/syn/tree/master/examples/lazy-static
+//!
+//! The example reimplements the popular `lazy_static` crate from crates.io as
a
+//! procedural macro.
+//!
+//! ```
+//! # macro_rules! lazy_static {
+//! # ($($tt:tt)*) => {}
+//! # }
+//! #
+//! lazy_static! {
+//! static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
+//! }
+//! ```
+//!
+//! The implementation shows how to trigger custom warnings and error messages
+//! on the macro input.
+//!
+//! ```text
+//! warning: come on, pick a more creative name
+//! --> src/main.rs:10:16
+//! |
+//! 10 | static ref FOO: String = "lazy_static".to_owned();
+//! | ^^^
+//! ```
+//!
+//! <br>
+//!
+//! # Testing
+//!
+//! When testing macros, we often care not just that the macro can be used
+//! successfully but also that when the macro is provided with invalid input it
+//! produces maximally helpful error messages. Consider using the [`trybuild`]
+//! crate to write tests for errors that are emitted by your macro or errors
+//! detected by the Rust compiler in the expanded code following misuse of the
+//! macro. Such tests help avoid regressions from later refactors that
+//! mistakenly make an error no longer trigger or be less helpful than it used
+//! to be.
+//!
+//! [`trybuild`]: https://github.com/dtolnay/trybuild
+//!
+//! <br>
+//!
+//! # Debugging
+//!
+//! When developing a procedural macro it can be helpful to look at what the
+//! generated code looks like. Use `cargo rustc -- -Zunstable-options
+//! --pretty=expanded` or the [`cargo expand`] subcommand.
+//!
+//! [`cargo expand`]: https://github.com/dtolnay/cargo-expand
+//!
+//! To show the expanded code for some crate that uses your procedural macro,
+//! run `cargo expand` from that crate. To show the expanded code for one of
+//! your own test cases, run `cargo expand --test the_test_case` where the last
+//! argument is the name of the test file without the `.rs` extension.
+//!
+//! This write-up by Brandon W Maister discusses debugging in more detail:
+//! [Debugging Rust's new Custom Derive system][debugging].
+//!
+//! [debugging]:
https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
+//!
+//! <br>
+//!
+//! # Optional features
+//!
+//! Syn puts a lot of functionality behind optional features in order to
+//! optimize compile time for the most common use cases. The following features
+//! are available.
+//!
+//! - **`derive`** *(enabled by default)* — Data structures for representing
the
+//! possible input to a derive macro, including structs and enums and types.
+//! - **`full`** — Data structures for representing the syntax tree of all
valid
+//! Rust source code, including items and expressions.
+//! - **`parsing`** *(enabled by default)* — Ability to parse input tokens into
+//! a syntax tree node of a chosen type.
+//! - **`printing`** *(enabled by default)* — Ability to print a syntax tree
+//! node as tokens of Rust source code.
+//! - **`visit`** — Trait for traversing a syntax tree.
+//! - **`visit-mut`** — Trait for traversing and mutating in place a syntax
+//! tree.
+//! - **`fold`** — Trait for transforming an owned syntax tree.
+//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax
tree
+//! types.
+//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
+//! types.
+//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
+//! dynamic library libproc_macro from rustc toolchain.
+
+// Syn types in rustdoc of other crates get linked to here.
+#![doc(html_root_url = "https://docs.rs/syn/2.0.66")]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![deny(unsafe_op_in_unsafe_fn)]
+#![allow(non_camel_case_types)]
+#![cfg_attr(not(check_cfg), allow(unexpected_cfgs))]
+#![allow(
+ clippy::bool_to_int_with_if,
+ clippy::cast_lossless,
+ clippy::cast_possible_truncation,
+ clippy::cast_possible_wrap,
+ clippy::cast_ptr_alignment,
+ clippy::default_trait_access,
+ clippy::derivable_impls,
+ clippy::diverging_sub_expression,
+ clippy::doc_markdown,
+ clippy::expl_impl_clone_on_copy,
+ clippy::explicit_auto_deref,
+ clippy::if_not_else,
+ clippy::inherent_to_string,
+ clippy::into_iter_without_iter,
+ clippy::items_after_statements,
+ clippy::large_enum_variant,
+ clippy::let_underscore_untyped, //
https://github.com/rust-lang/rust-clippy/issues/10410
+ clippy::manual_assert,
+ clippy::manual_let_else,
+ clippy::manual_map,
+ clippy::match_like_matches_macro,
+ clippy::match_on_vec_items,
+ clippy::match_same_arms,
+ clippy::match_wildcard_for_single_variants, // clippy bug:
https://github.com/rust-lang/rust-clippy/issues/6984
+ clippy::missing_errors_doc,
+ clippy::missing_panics_doc,
+ clippy::module_name_repetitions,
+ clippy::must_use_candidate,
+ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::range_plus_one,
+ clippy::redundant_else,
+ clippy::return_self_not_must_use,
+ clippy::similar_names,
+ clippy::single_match_else,
+ clippy::struct_excessive_bools,
+ clippy::too_many_arguments,
+ clippy::too_many_lines,
+ clippy::trivially_copy_pass_by_ref,
+ clippy::unconditional_recursion, //
https://github.com/rust-lang/rust-clippy/issues/12133
+ clippy::uninhabited_references,
+ clippy::uninlined_format_args,
+ clippy::unnecessary_box_returns,
+ clippy::unnecessary_unwrap,
+ clippy::used_underscore_binding,
+ clippy::wildcard_imports,
+)]
+
+#[cfg(feature = "proc-macro")]
+extern crate proc_macro;
+
+#[macro_use]
+mod macros;
+
+#[cfg(feature = "parsing")]
+#[macro_use]
+mod group;
+
+#[macro_use]
+pub mod token;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod attr;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue};
+
+mod bigint;
+
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+pub mod buffer;
+
+#[cfg(any(
+ all(feature = "parsing", feature = "full"),
+ all(feature = "printing", any(feature = "full", feature = "derive")),
+))]
+mod classify;
+
+mod custom_keyword;
+
+mod custom_punctuation;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod data;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant};
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod derive;
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+
+mod drops;
+
+mod error;
+pub use crate::error::{Error, Result};
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod expr;
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::expr::{Arm, Label, RangeLimits};
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::expr::{
+ Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprIndex, ExprLit,
ExprMacro, ExprMethodCall,
+ ExprParen, ExprPath, ExprReference, ExprStruct, ExprUnary, FieldValue,
Index, Member,
+};
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::expr::{
+ ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock, ExprBreak,
ExprClosure, ExprConst,
+ ExprContinue, ExprForLoop, ExprGroup, ExprIf, ExprInfer, ExprLet,
ExprLoop, ExprMatch,
+ ExprRange, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprTuple,
ExprUnsafe, ExprWhile,
+ ExprYield,
+};
+
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+pub mod ext;
+
+#[cfg(feature = "full")]
+mod file;
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::file::File;
+
+#[cfg(all(feature = "full", feature = "printing"))]
+mod fixup;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod generics;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::generics::{
+ BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeParam,
PredicateLifetime,
+ PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
WhereClause,
+ WherePredicate,
+};
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(any(feature = "full", feature = "derive"), feature =
"printing")))
+)]
+pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics};
+
+mod ident;
+#[doc(inline)]
+pub use crate::ident::Ident;
+
+#[cfg(feature = "full")]
+mod item;
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::item::{
+ FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic,
ForeignItemType,
+ ImplItem, ImplItemConst, ImplItemFn, ImplItemMacro, ImplItemType,
ImplRestriction, Item,
+ ItemConst, ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl,
ItemMacro, ItemMod,
+ ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion,
ItemUse, Receiver,
+ Signature, StaticMutability, TraitItem, TraitItemConst, TraitItemFn,
TraitItemMacro,
+ TraitItemType, UseGlob, UseGroup, UseName, UsePath, UseRename, UseTree,
Variadic,
+};
+
+mod lifetime;
+#[doc(inline)]
+pub use crate::lifetime::Lifetime;
+
+mod lit;
+#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566
+pub use crate::lit::StrStyle;
+#[doc(inline)]
+pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitInt,
LitStr,
+};
+
+#[cfg(feature = "parsing")]
+mod lookahead;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod mac;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::mac::{Macro, MacroDelimiter};
+
+#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(feature = "parsing", any(feature = "full", feature =
"derive"))))
+)]
+pub mod meta;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod op;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::op::{BinOp, UnOp};
+
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+pub mod parse;
+
+#[cfg(all(feature = "parsing", feature = "proc-macro"))]
+mod parse_macro_input;
+
+#[cfg(all(feature = "parsing", feature = "printing"))]
+mod parse_quote;
+
+#[cfg(feature = "full")]
+mod pat;
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::pat::{
+ FieldPat, Pat, PatConst, PatIdent, PatLit, PatMacro, PatOr, PatParen,
PatPath, PatRange,
+ PatReference, PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct,
PatType, PatWild,
+};
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod path;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::path::{
+ AngleBracketedGenericArguments, AssocConst, AssocType, Constraint,
GenericArgument,
+ ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
+};
+
+#[cfg(all(
+ any(feature = "full", feature = "derive"),
+ any(feature = "parsing", feature = "printing")
+))]
+mod precedence;
+
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+mod print;
+
+pub mod punctuated;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod restriction;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::restriction::{FieldMutability, VisRestricted, Visibility};
+
+mod sealed;
+
+mod span;
+
+#[cfg(all(feature = "parsing", feature = "printing"))]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
+pub mod spanned;
+
+#[cfg(feature = "full")]
+mod stmt;
+#[cfg(feature = "full")]
+#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+pub use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro};
+
+mod thread;
+
+#[cfg(all(any(feature = "full", feature = "derive"), feature =
"extra-traits"))]
+mod tt;
+
+#[cfg(any(feature = "full", feature = "derive"))]
+mod ty;
+#[cfg(any(feature = "full", feature = "derive"))]
+#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+pub use crate::ty::{
+ Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn,
TypeGroup,
+ TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath,
TypePtr, TypeReference,
+ TypeSlice, TypeTraitObject, TypeTuple,
+};
+
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
+mod verbatim;
+
+#[cfg(all(feature = "parsing", feature = "full"))]
+mod whitespace;
+
+mod gen {
+ /// Syntax tree traversal to transform the nodes of an owned syntax tree.
+ ///
+ /// Each method of the [`Fold`] trait is a hook that can be overridden to
+ /// customize the behavior when transforming the corresponding type of
node.
+ /// By default, every method recursively visits the substructure of the
+ /// input by invoking the right visitor method of each of its fields.
+ ///
+ /// [`Fold`]: fold::Fold
+ ///
+ /// ```
+ /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
+ /// #
+ /// pub trait Fold {
+ /// /* ... */
+ ///
+ /// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
+ /// fold_expr_binary(self, node)
+ /// }
+ ///
+ /// /* ... */
+ /// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
+ /// # fn fold_expr(&mut self, node: Expr) -> Expr;
+ /// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
+ /// }
+ ///
+ /// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary
+ /// where
+ /// V: Fold + ?Sized,
+ /// {
+ /// ExprBinary {
+ /// attrs: node
+ /// .attrs
+ /// .into_iter()
+ /// .map(|attr| v.fold_attribute(attr))
+ /// .collect(),
+ /// left: Box::new(v.fold_expr(*node.left)),
+ /// op: v.fold_bin_op(node.op),
+ /// right: Box::new(v.fold_expr(*node.right)),
+ /// }
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This fold inserts parentheses to fully parenthesizes any expression.
+ ///
+ /// ```
+ /// // [dependencies]
+ /// // quote = "1.0"
+ /// // syn = { version = "2.0", features = ["fold", "full"] }
+ ///
+ /// use quote::quote;
+ /// use syn::fold::{fold_expr, Fold};
+ /// use syn::{token, Expr, ExprParen};
+ ///
+ /// struct ParenthesizeEveryExpr;
+ ///
+ /// impl Fold for ParenthesizeEveryExpr {
+ /// fn fold_expr(&mut self, expr: Expr) -> Expr {
+ /// Expr::Paren(ExprParen {
+ /// attrs: Vec::new(),
+ /// expr: Box::new(fold_expr(self, expr)),
+ /// paren_token: token::Paren::default(),
+ /// })
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let code = quote! { a() + b(1) * c.d };
+ /// let expr: Expr = syn::parse2(code).unwrap();
+ /// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr);
+ /// println!("{}", quote!(#parenthesized));
+ ///
+ /// // Output: (((a)()) + (((b)((1))) * ((c).d)))
+ /// }
+ /// ```
+ #[cfg(feature = "fold")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "fold")))]
+ #[rustfmt::skip]
+ pub mod fold;
+
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+ /// Each method of the [`Visit`] trait is a hook that can be overridden to
+ /// customize the behavior when visiting the corresponding type of node. By
+ /// default, every method recursively visits the substructure of the input
+ /// by invoking the right visitor method of each of its fields.
+ ///
+ /// [`Visit`]: visit::Visit
+ ///
+ /// ```
+ /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
+ /// #
+ /// pub trait Visit<'ast> {
+ /// /* ... */
+ ///
+ /// fn visit_expr_binary(&mut self, node: &'ast ExprBinary) {
+ /// visit_expr_binary(self, node);
+ /// }
+ ///
+ /// /* ... */
+ /// # fn visit_attribute(&mut self, node: &'ast Attribute);
+ /// # fn visit_expr(&mut self, node: &'ast Expr);
+ /// # fn visit_bin_op(&mut self, node: &'ast BinOp);
+ /// }
+ ///
+ /// pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary)
+ /// where
+ /// V: Visit<'ast> + ?Sized,
+ /// {
+ /// for attr in &node.attrs {
+ /// v.visit_attribute(attr);
+ /// }
+ /// v.visit_expr(&*node.left);
+ /// v.visit_bin_op(&node.op);
+ /// v.visit_expr(&*node.right);
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This visitor will print the name of every freestanding function in the
+ /// syntax tree, including nested functions.
+ ///
+ /// ```
+ /// // [dependencies]
+ /// // quote = "1.0"
+ /// // syn = { version = "2.0", features = ["full", "visit"] }
+ ///
+ /// use quote::quote;
+ /// use syn::visit::{self, Visit};
+ /// use syn::{File, ItemFn};
+ ///
+ /// struct FnVisitor;
+ ///
+ /// impl<'ast> Visit<'ast> for FnVisitor {
+ /// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
+ /// println!("Function with name={}", node.sig.ident);
+ ///
+ /// // Delegate to the default impl to visit any nested functions.
+ /// visit::visit_item_fn(self, node);
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let code = quote! {
+ /// pub fn f() {
+ /// fn g() {}
+ /// }
+ /// };
+ ///
+ /// let syntax_tree: File = syn::parse2(code).unwrap();
+ /// FnVisitor.visit_file(&syntax_tree);
+ /// }
+ /// ```
+ ///
+ /// The `'ast` lifetime on the input references means that the syntax tree
+ /// outlives the complete recursive visit call, so the visitor is allowed
to
+ /// hold on to references into the syntax tree.
+ ///
+ /// ```
+ /// use quote::quote;
+ /// use syn::visit::{self, Visit};
+ /// use syn::{File, ItemFn};
+ ///
+ /// struct FnVisitor<'ast> {
+ /// functions: Vec<&'ast ItemFn>,
+ /// }
+ ///
+ /// impl<'ast> Visit<'ast> for FnVisitor<'ast> {
+ /// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
+ /// self.functions.push(node);
+ /// visit::visit_item_fn(self, node);
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let code = quote! {
+ /// pub fn f() {
+ /// fn g() {}
+ /// }
+ /// };
+ ///
+ /// let syntax_tree: File = syn::parse2(code).unwrap();
+ /// let mut visitor = FnVisitor { functions: Vec::new() };
+ /// visitor.visit_file(&syntax_tree);
+ /// for f in visitor.functions {
+ /// println!("Function with name={}", f.sig.ident);
+ /// }
+ /// }
+ /// ```
+ #[cfg(feature = "visit")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "visit")))]
+ #[rustfmt::skip]
+ pub mod visit;
+
+ /// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
+ /// place.
+ ///
+ /// Each method of the [`VisitMut`] trait is a hook that can be overridden
+ /// to customize the behavior when mutating the corresponding type of node.
+ /// By default, every method recursively visits the substructure of the
+ /// input by invoking the right visitor method of each of its fields.
+ ///
+ /// [`VisitMut`]: visit_mut::VisitMut
+ ///
+ /// ```
+ /// # use syn::{Attribute, BinOp, Expr, ExprBinary};
+ /// #
+ /// pub trait VisitMut {
+ /// /* ... */
+ ///
+ /// fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) {
+ /// visit_expr_binary_mut(self, node);
+ /// }
+ ///
+ /// /* ... */
+ /// # fn visit_attribute_mut(&mut self, node: &mut Attribute);
+ /// # fn visit_expr_mut(&mut self, node: &mut Expr);
+ /// # fn visit_bin_op_mut(&mut self, node: &mut BinOp);
+ /// }
+ ///
+ /// pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary)
+ /// where
+ /// V: VisitMut + ?Sized,
+ /// {
+ /// for attr in &mut node.attrs {
+ /// v.visit_attribute_mut(attr);
+ /// }
+ /// v.visit_expr_mut(&mut *node.left);
+ /// v.visit_bin_op_mut(&mut node.op);
+ /// v.visit_expr_mut(&mut *node.right);
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This mut visitor replace occurrences of u256 suffixed integer literals
+ /// like `999u256` with a macro invocation `bigint::u256!(999)`.
+ ///
+ /// ```
+ /// // [dependencies]
+ /// // quote = "1.0"
+ /// // syn = { version = "2.0", features = ["full", "visit-mut"] }
+ ///
+ /// use quote::quote;
+ /// use syn::visit_mut::{self, VisitMut};
+ /// use syn::{parse_quote, Expr, File, Lit, LitInt};
+ ///
+ /// struct BigintReplace;
+ ///
+ /// impl VisitMut for BigintReplace {
+ /// fn visit_expr_mut(&mut self, node: &mut Expr) {
+ /// if let Expr::Lit(expr) = &node {
+ /// if let Lit::Int(int) = &expr.lit {
+ /// if int.suffix() == "u256" {
+ /// let digits = int.base10_digits();
+ /// let unsuffixed: LitInt =
syn::parse_str(digits).unwrap();
+ /// *node = parse_quote!(bigint::u256!(#unsuffixed));
+ /// return;
+ /// }
+ /// }
+ /// }
+ ///
+ /// // Delegate to the default impl to visit nested expressions.
+ /// visit_mut::visit_expr_mut(self, node);
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let code = quote! {
+ /// fn main() {
+ /// let _ = 999u256;
+ /// }
+ /// };
+ ///
+ /// let mut syntax_tree: File = syn::parse2(code).unwrap();
+ /// BigintReplace.visit_file_mut(&mut syntax_tree);
+ /// println!("{}", quote!(#syntax_tree));
+ /// }
+ /// ```
+ #[cfg(feature = "visit-mut")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "visit-mut")))]
+ #[rustfmt::skip]
+ pub mod visit_mut;
+
+ #[cfg(feature = "clone-impls")]
+ #[rustfmt::skip]
+ mod clone;
+
+ #[cfg(feature = "extra-traits")]
+ #[rustfmt::skip]
+ mod debug;
+
+ #[cfg(feature = "extra-traits")]
+ #[rustfmt::skip]
+ mod eq;
+
+ #[cfg(feature = "extra-traits")]
+ #[rustfmt::skip]
+ mod hash;
+}
+
+#[cfg(feature = "fold")]
+#[cfg_attr(docsrs, doc(cfg(feature = "fold")))]
+pub use crate::gen::fold;
+
+#[cfg(feature = "visit")]
+#[cfg_attr(docsrs, doc(cfg(feature = "visit")))]
+pub use crate::gen::visit;
+
+#[cfg(feature = "visit-mut")]
+#[cfg_attr(docsrs, doc(cfg(feature = "visit-mut")))]
+pub use crate::gen::visit_mut;
+
+// Not public API.
+#[doc(hidden)]
+#[path = "export.rs"]
+pub mod __private;
+
+/// Parse tokens of source code into the chosen syntax tree node.
+///
+/// This is preferred over parsing a string because tokens are able to preserve
+/// information about where in the user's code they were originally written
(the
+/// "span" of the token), possibly allowing the compiler to produce better
error
+/// messages.
+///
+/// This function parses a `proc_macro::TokenStream` which is the type used for
+/// interop with the compiler in a procedural macro. To parse a
+/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
+///
+/// [`syn::parse2`]: parse2
+///
+/// # Examples
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use quote::quote;
+/// use syn::DeriveInput;
+///
+/// # const IGNORE_TOKENS: &str = stringify! {
+/// #[proc_macro_derive(MyMacro)]
+/// # };
+/// pub fn my_macro(input: TokenStream) -> TokenStream {
+/// // Parse the tokens into a syntax tree
+/// let ast: DeriveInput = syn::parse(input).unwrap();
+///
+/// // Build the output, possibly using quasi-quotation
+/// let expanded = quote! {
+/// /* ... */
+/// };
+///
+/// // Convert into a token stream and return it
+/// expanded.into()
+/// }
+/// ```
+#[cfg(all(feature = "parsing", feature = "proc-macro"))]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
+pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ parse::Parser::parse(T::parse, tokens)
+}
+
+/// Parse a proc-macro2 token stream into the chosen syntax tree node.
+///
+/// This function will check that the input is fully parsed. If there are
+/// any unparsed tokens at the end of the stream, an error is returned.
+///
+/// This function parses a `proc_macro2::TokenStream` which is commonly useful
+/// when the input comes from a node of the Syn syntax tree, for example the
+/// body tokens of a [`Macro`] node. When in a procedural macro parsing the
+/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
+/// instead.
+///
+/// [`syn::parse`]: parse()
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+}
+
+/// Parse a string of Rust code into the chosen syntax tree node.
+///
+/// # Hygiene
+///
+/// Every span in the resulting syntax tree will be set to resolve at the macro
+/// call site.
+///
+/// # Examples
+///
+/// ```
+/// use syn::{Expr, Result};
+///
+/// fn run() -> Result<()> {
+/// let code = "assert_eq!(u8::max_value(), 255)";
+/// let expr = syn::parse_str::<Expr>(code)?;
+/// println!("{:#?}", expr);
+/// Ok(())
+/// }
+/// #
+/// # run().unwrap();
+/// ```
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ parse::Parser::parse_str(T::parse, s)
+}
+
+/// Parse the content of a file of Rust code.
+///
+/// This is different from `syn::parse_str::<File>(content)` in two ways:
+///
+/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
+/// - It preserves the shebang line of the file, such as `#!/usr/bin/env
rustx`.
+///
+/// If present, either of these would be an error using `from_str`.
+///
+/// # Examples
+///
+/// ```no_run
+/// use std::error::Error;
+/// use std::fs::File;
+/// use std::io::Read;
+///
+/// fn run() -> Result<(), Box<dyn Error>> {
+/// let mut file = File::open("path/to/code.rs")?;
+/// let mut content = String::new();
+/// file.read_to_string(&mut content)?;
+///
+/// let ast = syn::parse_file(&content)?;
+/// if let Some(shebang) = ast.shebang {
+/// println!("{}", shebang);
+/// }
+/// println!("{} items", ast.items.len());
+///
+/// Ok(())
+/// }
+/// #
+/// # run().unwrap();
+/// ```
+#[cfg(all(feature = "parsing", feature = "full"))]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "full"))))]
+pub fn parse_file(mut content: &str) -> Result<File> {
+ // Strip the BOM if it is present
+ const BOM: &str = "\u{feff}";
+ if content.starts_with(BOM) {
+ content = &content[BOM.len()..];
+ }
+
+ let mut shebang = None;
+ if content.starts_with("#!") {
+ let rest = whitespace::skip(&content[2..]);
+ if !rest.starts_with('[') {
+ if let Some(idx) = content.find('\n') {
+ shebang = Some(content[..idx].to_string());
+ content = &content[idx..];
+ } else {
+ shebang = Some(content.to_string());
+ content = "";
+ }
+ }
+ }
+
+ let mut file: File = parse_str(content)?;
+ file.shebang = shebang;
+ Ok(file)
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/lifetime.rs
b/rust/hw/char/pl011/vendor/syn/src/lifetime.rs
new file mode 100644
index 0000000000..cc189d1ac9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/lifetime.rs
@@ -0,0 +1,156 @@
+#[cfg(feature = "parsing")]
+use crate::lookahead;
+use proc_macro2::{Ident, Span};
+use std::cmp::Ordering;
+use std::fmt::{self, Display};
+use std::hash::{Hash, Hasher};
+
+/// A Rust lifetime: `'a`.
+///
+/// Lifetime names must conform to the following rules:
+///
+/// - Must start with an apostrophe.
+/// - Must not consist of just an apostrophe: `'`.
+/// - Character after the apostrophe must be `_` or a Unicode code point with
+/// the XID_Start property.
+/// - All following characters must be Unicode code points with the
XID_Continue
+/// property.
+pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+}
+
+impl Lifetime {
+ /// # Panics
+ ///
+ /// Panics if the lifetime does not conform to the bulleted rules above.
+ ///
+ /// # Invocation
+ ///
+ /// ```
+ /// # use proc_macro2::Span;
+ /// # use syn::Lifetime;
+ /// #
+ /// # fn f() -> Lifetime {
+ /// Lifetime::new("'a", Span::call_site())
+ /// # }
+ /// ```
+ pub fn new(symbol: &str, span: Span) -> Self {
+ if !symbol.starts_with('\'') {
+ panic!(
+ "lifetime name must start with apostrophe as in \"'a\", got
{:?}",
+ symbol
+ );
+ }
+
+ if symbol == "'" {
+ panic!("lifetime name must not be empty");
+ }
+
+ if !crate::ident::xid_ok(&symbol[1..]) {
+ panic!("{:?} is not a valid lifetime name", symbol);
+ }
+
+ Lifetime {
+ apostrophe: span,
+ ident: Ident::new(&symbol[1..], span),
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ self.apostrophe
+ .join(self.ident.span())
+ .unwrap_or(self.apostrophe)
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.apostrophe = span;
+ self.ident.set_span(span);
+ }
+}
+
+impl Display for Lifetime {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ "'".fmt(formatter)?;
+ self.ident.fmt(formatter)
+ }
+}
+
+impl Clone for Lifetime {
+ fn clone(&self) -> Self {
+ Lifetime {
+ apostrophe: self.apostrophe,
+ ident: self.ident.clone(),
+ }
+ }
+}
+
+impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+ }
+}
+
+impl Eq for Lifetime {}
+
+impl PartialOrd for Lifetime {
+ fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for Lifetime {
+ fn cmp(&self, other: &Lifetime) -> Ordering {
+ self.ident.cmp(&other.ident)
+ }
+}
+
+impl Hash for Lifetime {
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ self.ident.hash(h);
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
+ match marker {}
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::error::Result;
+ use crate::lifetime::Lifetime;
+ use crate::parse::{Parse, ParseStream};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Lifetime {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ cursor
+ .lifetime()
+ .ok_or_else(|| cursor.error("expected lifetime"))
+ })
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::lifetime::Lifetime;
+ use proc_macro2::{Punct, Spacing, TokenStream};
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Lifetime {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let mut apostrophe = Punct::new('\'', Spacing::Joint);
+ apostrophe.set_span(self.apostrophe);
+ tokens.append(apostrophe);
+ self.ident.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/lit.rs
b/rust/hw/char/pl011/vendor/syn/src/lit.rs
new file mode 100644
index 0000000000..a37aa2a9e7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/lit.rs
@@ -0,0 +1,1830 @@
+#[cfg(feature = "parsing")]
+use crate::lookahead;
+#[cfg(feature = "parsing")]
+use crate::parse::{Parse, Parser};
+use crate::{Error, Result};
+use proc_macro2::{Ident, Literal, Span};
+#[cfg(feature = "parsing")]
+use proc_macro2::{TokenStream, TokenTree};
+use std::ffi::{CStr, CString};
+use std::fmt::{self, Display};
+#[cfg(feature = "extra-traits")]
+use std::hash::{Hash, Hasher};
+use std::str::{self, FromStr};
+
+ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[non_exhaustive]
+ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+ /// A byte string literal: `b"foo"`.
+ ByteStr(LitByteStr),
+
+ /// A nul-terminated C-string literal: `c"foo"`.
+ CStr(LitCStr),
+
+ /// A byte literal: `b'f'`.
+ Byte(LitByte),
+
+ /// A character literal: `'a'`.
+ Char(LitChar),
+
+ /// An integer literal: `1` or `1u16`.
+ Int(LitInt),
+
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinite or NaN.
+ Float(LitFloat),
+
+ /// A boolean literal: `true` or `false`.
+ Bool(LitBool),
+
+ /// A raw token literal not interpreted by Syn.
+ Verbatim(Literal),
+ }
+}
+
+ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+ pub struct LitStr {
+ repr: Box<LitRepr>,
+ }
+}
+
+ast_struct! {
+ /// A byte string literal: `b"foo"`.
+ pub struct LitByteStr {
+ repr: Box<LitRepr>,
+ }
+}
+
+ast_struct! {
+ /// A nul-terminated C-string literal: `c"foo"`.
+ pub struct LitCStr {
+ repr: Box<LitRepr>,
+ }
+}
+
+ast_struct! {
+ /// A byte literal: `b'f'`.
+ pub struct LitByte {
+ repr: Box<LitRepr>,
+ }
+}
+
+ast_struct! {
+ /// A character literal: `'a'`.
+ pub struct LitChar {
+ repr: Box<LitRepr>,
+ }
+}
+
+struct LitRepr {
+ token: Literal,
+ suffix: Box<str>,
+}
+
+ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+}
+
+struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+ suffix: Box<str>,
+}
+
+ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinite or NaN.
+ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+}
+
+struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+ suffix: Box<str>,
+}
+
+ast_struct! {
+ /// A boolean literal: `true` or `false`.
+ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+}
+
+impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+ let mut token = Literal::string(value);
+ token.set_span(span);
+ LitStr {
+ repr: Box::new(LitRepr {
+ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+ let repr = self.repr.token.to_string();
+ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+ /// Parse a syntax tree node from the content of this string literal.
+ ///
+ /// All spans in the syntax tree will point to the span of this `LitStr`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Attribute, Error, Expr, Lit, Meta, Path, Result};
+ ///
+ /// // Parses the path from an attribute that looks like:
+ /// //
+ /// // #[path = "a::b::c"]
+ /// //
+ /// // or returns `None` if the input is some other attribute.
+ /// fn get_path(attr: &Attribute) -> Result<Option<Path>> {
+ /// if !attr.path().is_ident("path") {
+ /// return Ok(None);
+ /// }
+ ///
+ /// if let Meta::NameValue(meta) = &attr.meta {
+ /// if let Expr::Lit(expr) = &meta.value {
+ /// if let Lit::Str(lit_str) = &expr.lit {
+ /// return lit_str.parse().map(Some);
+ /// }
+ /// }
+ /// }
+ ///
+ /// let message = "expected #[path = \"...\"]";
+ /// Err(Error::new_spanned(attr, message))
+ /// }
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse<T: Parse>(&self) -> Result<T> {
+ self.parse_with(T::parse)
+ }
+
+ /// Invoke parser on the content of this string literal.
+ ///
+ /// All spans in the syntax tree will point to the span of this `LitStr`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use proc_macro2::Span;
+ /// # use syn::{LitStr, Result};
+ /// #
+ /// # fn main() -> Result<()> {
+ /// # let lit_str = LitStr::new("a::b::c", Span::call_site());
+ /// #
+ /// # const IGNORE: &str = stringify! {
+ /// let lit_str: LitStr = /* ... */;
+ /// # };
+ ///
+ /// // Parse a string literal like "a::b::c" into a Path, not allowing
+ /// // generic arguments on any of the path segments.
+ /// let basic_path = lit_str.parse_with(syn::Path::parse_mod_style)?;
+ /// #
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ use proc_macro2::Group;
+
+ // Token stream with every span replaced by the given one.
+ fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream
{
+ stream
+ .into_iter()
+ .map(|token| respan_token_tree(token, span))
+ .collect()
+ }
+
+ // Token tree with every span replaced by the given one.
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+ other => other.set_span(span),
+ }
+ token
+ }
+
+ // Parse string literal into a token stream with every span equal to
the
+ // original literal's span.
+ let span = self.span();
+ let mut tokens = TokenStream::from_str(&self.value())?;
+ tokens = respan_token_stream(tokens, span);
+
+ let result = crate::parse::parse_scoped(parser, span, tokens)?;
+
+ let suffix = self.suffix();
+ if !suffix.is_empty() {
+ return Err(Error::new(
+ self.span(),
+ format!("unexpected suffix `{}` on string literal", suffix),
+ ));
+ }
+
+ Ok(result)
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+ LitByteStr {
+ repr: Box::new(LitRepr {
+ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+ let repr = self.repr.token.to_string();
+ let (value, _suffix) = value::parse_lit_byte_str(&repr);
+ value
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl LitCStr {
+ pub fn new(value: &CStr, span: Span) -> Self {
+ let mut token = Literal::c_string(value);
+ token.set_span(span);
+ LitCStr {
+ repr: Box::new(LitRepr {
+ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> CString {
+ let repr = self.repr.token.to_string();
+ let (value, _suffix) = value::parse_lit_c_str(&repr);
+ value
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+ LitByte {
+ repr: Box::new(LitRepr {
+ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> u8 {
+ let repr = self.repr.token.to_string();
+ let (value, _suffix) = value::parse_lit_byte(&repr);
+ value
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+ LitChar {
+ repr: Box::new(LitRepr {
+ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> char {
+ let repr = self.repr.token.to_string();
+ let (value, _suffix) = value::parse_lit_char(&repr);
+ value
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+ let (digits, suffix) = match value::parse_lit_int(repr) {
+ Some(parse) => parse,
+ None => panic!("not an integer literal: `{}`", repr),
+ };
+
+ let mut token: Literal = repr.parse().unwrap();
+ token.set_span(span);
+ LitInt {
+ repr: Box::new(LitIntRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }
+ }
+
+ pub fn base10_digits(&self) -> &str {
+ &self.repr.digits
+ }
+
+ /// Parses the literal into a selected number type.
+ ///
+ /// This is equivalent to `lit.base10_digits().parse()` except that the
+ /// resulting errors will be correctly spanned to point to the literal
token
+ /// in the macro input.
+ ///
+ /// ```
+ /// use syn::LitInt;
+ /// use syn::parse::{Parse, ParseStream, Result};
+ ///
+ /// struct Port {
+ /// value: u16,
+ /// }
+ ///
+ /// impl Parse for Port {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let lit: LitInt = input.parse()?;
+ /// let value = lit.base10_parse::<u16>()?;
+ /// Ok(Port { value })
+ /// }
+ /// }
+ /// ```
+ pub fn base10_parse<N>(&self) -> Result<N>
+ where
+ N: FromStr,
+ N::Err: Display,
+ {
+ self.base10_digits()
+ .parse()
+ .map_err(|err| Error::new(self.span(), err))
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl From<Literal> for LitInt {
+ fn from(token: Literal) -> Self {
+ let repr = token.to_string();
+ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
+ LitInt {
+ repr: Box::new(LitIntRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }
+ } else {
+ panic!("not an integer literal: `{}`", repr);
+ }
+ }
+}
+
+impl Display for LitInt {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.repr.token.fmt(formatter)
+ }
+}
+
+impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+ let (digits, suffix) = match value::parse_lit_float(repr) {
+ Some(parse) => parse,
+ None => panic!("not a float literal: `{}`", repr),
+ };
+
+ let mut token: Literal = repr.parse().unwrap();
+ token.set_span(span);
+ LitFloat {
+ repr: Box::new(LitFloatRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }
+ }
+
+ pub fn base10_digits(&self) -> &str {
+ &self.repr.digits
+ }
+
+ pub fn base10_parse<N>(&self) -> Result<N>
+ where
+ N: FromStr,
+ N::Err: Display,
+ {
+ self.base10_digits()
+ .parse()
+ .map_err(|err| Error::new(self.span(), err))
+ }
+
+ pub fn suffix(&self) -> &str {
+ &self.repr.suffix
+ }
+
+ pub fn span(&self) -> Span {
+ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.repr.token.set_span(span);
+ }
+
+ pub fn token(&self) -> Literal {
+ self.repr.token.clone()
+ }
+}
+
+impl From<Literal> for LitFloat {
+ fn from(token: Literal) -> Self {
+ let repr = token.to_string();
+ if let Some((digits, suffix)) = value::parse_lit_float(&repr) {
+ LitFloat {
+ repr: Box::new(LitFloatRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }
+ } else {
+ panic!("not a float literal: `{}`", repr);
+ }
+ }
+}
+
+impl Display for LitFloat {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.repr.token.fmt(formatter)
+ }
+}
+
+impl LitBool {
+ pub fn new(value: bool, span: Span) -> Self {
+ LitBool { value, span }
+ }
+
+ pub fn value(&self) -> bool {
+ self.value
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+
+ pub fn token(&self) -> Ident {
+ let s = if self.value { "true" } else { "false" };
+ Ident::new(s, self.span)
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+mod debug_impls {
+ use crate::lit::{LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat,
LitInt, LitStr};
+ use std::fmt::{self, Debug};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitStr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitStr")
+ }
+ }
+
+ impl LitStr {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitByteStr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitByteStr")
+ }
+ }
+
+ impl LitByteStr {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitCStr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitCStr")
+ }
+ }
+
+ impl LitCStr {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitByte {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitByte")
+ }
+ }
+
+ impl LitByte {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitChar {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitChar")
+ }
+ }
+
+ impl LitChar {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitInt {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitInt")
+ }
+ }
+
+ impl LitInt {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitFloat {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitFloat")
+ }
+ }
+
+ impl LitFloat {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for LitBool {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.debug(formatter, "LitBool")
+ }
+ }
+
+ impl LitBool {
+ pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str)
-> fmt::Result {
+ formatter
+ .debug_struct(name)
+ .field("value", &self.value)
+ .finish()
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for LitRepr {
+ fn clone(&self) -> Self {
+ LitRepr {
+ token: self.token.clone(),
+ suffix: self.suffix.clone(),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for LitIntRepr {
+ fn clone(&self) -> Self {
+ LitIntRepr {
+ token: self.token.clone(),
+ digits: self.digits.clone(),
+ suffix: self.suffix.clone(),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for LitFloatRepr {
+ fn clone(&self) -> Self {
+ LitFloatRepr {
+ token: self.token.clone(),
+ digits: self.digits.clone(),
+ suffix: self.suffix.clone(),
+ }
+ }
+}
+
+macro_rules! lit_extra_traits {
+ ($ty:ident) => {
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Clone for $ty {
+ fn clone(&self) -> Self {
+ $ty {
+ repr: self.repr.clone(),
+ }
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Hash for $ty {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ self.repr.token.to_string().hash(state);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
+ match marker {}
+ }
+ }
+ };
+}
+
+lit_extra_traits!(LitStr);
+lit_extra_traits!(LitByteStr);
+lit_extra_traits!(LitCStr);
+lit_extra_traits!(LitByte);
+lit_extra_traits!(LitChar);
+lit_extra_traits!(LitInt);
+lit_extra_traits!(LitFloat);
+
+#[cfg(feature = "parsing")]
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
+ match marker {}
+ }
+}
+
+/// The style of a string literal, either plain quoted or a raw string like
+/// `r##"data"##`.
+#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566
+pub enum StrStyle {
+ /// An ordinary string like `"data"`.
+ Cooked,
+ /// A raw string like `r##"data"##`.
+ ///
+ /// The unsigned integer is the number of `#` symbols used.
+ Raw(usize),
+}
+
+#[cfg(feature = "parsing")]
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ match marker {}
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::buffer::Cursor;
+ use crate::error::Result;
+ use crate::lit::{
+ value, Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat,
LitFloatRepr, LitInt,
+ LitIntRepr, LitStr,
+ };
+ use crate::parse::{Parse, ParseStream};
+ use proc_macro2::{Literal, Punct};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+
+ if let Some((ident, rest)) = cursor.ident() {
+ let value = ident == "true";
+ if value || ident == "false" {
+ let lit_bool = LitBool {
+ value,
+ span: ident.span(),
+ };
+ return Ok((Lit::Bool(lit_bool), rest));
+ }
+ }
+
+ if let Some((punct, rest)) = cursor.punct() {
+ if punct.as_char() == '-' {
+ if let Some((lit, rest)) = parse_negative_lit(punct,
rest) {
+ return Ok((lit, rest));
+ }
+ }
+ }
+
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
+ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)>
{
+ let (lit, rest) = cursor.literal()?;
+
+ let mut span = neg.span();
+ span = span.join(lit.span()).unwrap_or(span);
+
+ let mut repr = lit.to_string();
+ repr.insert(0, '-');
+
+ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
+ let mut token: Literal = repr.parse().unwrap();
+ token.set_span(span);
+ return Some((
+ Lit::Int(LitInt {
+ repr: Box::new(LitIntRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }),
+ rest,
+ ));
+ }
+
+ let (digits, suffix) = value::parse_lit_float(&repr)?;
+ let mut token: Literal = repr.parse().unwrap();
+ token.set_span(span);
+ Some((
+ Lit::Float(LitFloat {
+ repr: Box::new(LitFloatRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ }),
+ rest,
+ ))
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Str(lit)) => Ok(lit),
+ _ => Err(head.error("expected string literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitByteStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::ByteStr(lit)) => Ok(lit),
+ _ => Err(head.error("expected byte string literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitCStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::CStr(lit)) => Ok(lit),
+ _ => Err(head.error("expected C string literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitByte {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Byte(lit)) => Ok(lit),
+ _ => Err(head.error("expected byte literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitChar {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Char(lit)) => Ok(lit),
+ _ => Err(head.error("expected character literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitInt {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Int(lit)) => Ok(lit),
+ _ => Err(head.error("expected integer literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitFloat {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Float(lit)) => Ok(lit),
+ _ => Err(head.error("expected floating point literal")),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for LitBool {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse() {
+ Ok(Lit::Bool(lit)) => Ok(lit),
+ _ => Err(head.error("expected boolean literal")),
+ }
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::lit::{LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat,
LitInt, LitStr};
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitCStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitInt {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitFloat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for LitBool {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(self.token());
+ }
+ }
+}
+
+mod value {
+ use crate::bigint::BigInt;
+ use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat,
LitFloatRepr, LitInt,
+ LitIntRepr, LitRepr, LitStr,
+ };
+ use proc_macro2::{Literal, Span};
+ use std::ascii;
+ use std::char;
+ use std::ffi::CString;
+ use std::ops::{Index, RangeFrom};
+
+ impl Lit {
+ /// Interpret a Syn literal from a proc-macro2 literal.
+ pub fn new(token: Literal) -> Self {
+ let repr = token.to_string();
+
+ match byte(&repr, 0) {
+ // "...", r"...", r#"..."#
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ // b"...", br"...", br#"...#"
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_byte_str(&repr);
+ return Lit::ByteStr(LitByteStr {
+ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ // b'...'
+ b'\'' => {
+ let (_, suffix) = parse_lit_byte(&repr);
+ return Lit::Byte(LitByte {
+ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ _ => {}
+ },
+ // c"...", cr"...", cr#"..."#
+ b'c' => {
+ let (_, suffix) = parse_lit_c_str(&repr);
+ return Lit::CStr(LitCStr {
+ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ // '...'
+ b'\'' => {
+ let (_, suffix) = parse_lit_char(&repr);
+ return Lit::Char(LitChar {
+ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'0'..=b'9' | b'-' => {
+ // 0, 123, 0xFF, 0o77, 0b11
+ if let Some((digits, suffix)) = parse_lit_int(&repr) {
+ return Lit::Int(LitInt {
+ repr: Box::new(LitIntRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ });
+ }
+ // 1.0, 1e-1, 1e+1
+ if let Some((digits, suffix)) = parse_lit_float(&repr) {
+ return Lit::Float(LitFloat {
+ repr: Box::new(LitFloatRepr {
+ token,
+ digits,
+ suffix,
+ }),
+ });
+ }
+ }
+ // true, false
+ b't' | b'f' => {
+ if repr == "true" || repr == "false" {
+ return Lit::Bool(LitBool {
+ value: repr == "true",
+ span: token.span(),
+ });
+ }
+ }
+ b'(' if repr == "(/*ERROR*/)" => return Lit::Verbatim(token),
+ _ => {}
+ }
+
+ panic!("unrecognized literal: `{}`", repr);
+ }
+
+ pub fn suffix(&self) -> &str {
+ match self {
+ Lit::Str(lit) => lit.suffix(),
+ Lit::ByteStr(lit) => lit.suffix(),
+ Lit::CStr(lit) => lit.suffix(),
+ Lit::Byte(lit) => lit.suffix(),
+ Lit::Char(lit) => lit.suffix(),
+ Lit::Int(lit) => lit.suffix(),
+ Lit::Float(lit) => lit.suffix(),
+ Lit::Bool(_) | Lit::Verbatim(_) => "",
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+ Lit::Str(lit) => lit.span(),
+ Lit::ByteStr(lit) => lit.span(),
+ Lit::CStr(lit) => lit.span(),
+ Lit::Byte(lit) => lit.span(),
+ Lit::Char(lit) => lit.span(),
+ Lit::Int(lit) => lit.span(),
+ Lit::Float(lit) => lit.span(),
+ Lit::Bool(lit) => lit.span,
+ Lit::Verbatim(lit) => lit.span(),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match self {
+ Lit::Str(lit) => lit.set_span(span),
+ Lit::ByteStr(lit) => lit.set_span(span),
+ Lit::CStr(lit) => lit.set_span(span),
+ Lit::Byte(lit) => lit.set_span(span),
+ Lit::Char(lit) => lit.set_span(span),
+ Lit::Int(lit) => lit.set_span(span),
+ Lit::Float(lit) => lit.set_span(span),
+ Lit::Bool(lit) => lit.span = span,
+ Lit::Verbatim(lit) => lit.set_span(span),
+ }
+ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+ /// past the end of the input buffer.
+ pub(crate) fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
+ let s = s.as_ref();
+ if idx < s.len() {
+ s[idx]
+ } else {
+ 0
+ }
+ }
+
+ fn next_chr(s: &str) -> char {
+ s.chars().next().unwrap_or('\0')
+ }
+
+ // Returns (content, suffix).
+ pub(crate) fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) {
+ match byte(s, 0) {
+ b'"' => parse_lit_str_cooked(s),
+ b'r' => parse_lit_str_raw(s),
+ _ => unreachable!(),
+ }
+ }
+
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+ fn parse_lit_str_cooked(mut s: &str) -> (Box<str>, Box<str>) {
+ assert_eq!(byte(s, 0), b'"');
+ s = &s[1..];
+
+ let mut content = String::new();
+ 'outer: loop {
+ let ch = match byte(s, 0) {
+ b'"' => break,
+ b'\\' => {
+ let b = byte(s, 1);
+ s = &s[2..];
+ match b {
+ b'x' => {
+ let (byte, rest) = backslash_x(s);
+ s = rest;
+ assert!(byte <= 0x7F, "invalid \\x byte in string
literal");
+ char::from_u32(u32::from(byte)).unwrap()
+ }
+ b'u' => {
+ let (ch, rest) = backslash_u(s);
+ s = rest;
+ ch
+ }
+ b'n' => '\n',
+ b'r' => '\r',
+ b't' => '\t',
+ b'\\' => '\\',
+ b'0' => '\0',
+ b'\'' => '\'',
+ b'"' => '"',
+ b'\r' | b'\n' => loop {
+ let b = byte(s, 0);
+ match b {
+ b' ' | b'\t' | b'\n' | b'\r' => s = &s[1..],
+ _ => continue 'outer,
+ }
+ },
+ b => panic!(
+ "unexpected byte '{}' after \\ character in string
literal",
+ ascii::escape_default(b),
+ ),
+ }
+ }
+ b'\r' => {
+ assert_eq!(byte(s, 1), b'\n', "bare CR not allowed in
string");
+ s = &s[2..];
+ '\n'
+ }
+ _ => {
+ let ch = next_chr(s);
+ s = &s[ch.len_utf8()..];
+ ch
+ }
+ };
+ content.push(ch);
+ }
+
+ assert!(s.starts_with('"'));
+ let content = content.into_boxed_str();
+ let suffix = s[1..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+ fn parse_lit_str_raw(mut s: &str) -> (Box<str>, Box<str>) {
+ assert_eq!(byte(s, 0), b'r');
+ s = &s[1..];
+
+ let mut pounds = 0;
+ while byte(s, pounds) == b'#' {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+ let close = s.rfind('"').unwrap();
+ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+ let content = s[pounds + 1..close].to_owned().into_boxed_str();
+ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+ // Returns (content, suffix).
+ pub(crate) fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+ b'r' => parse_lit_byte_str_raw(s),
+ _ => unreachable!(),
+ }
+ }
+
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint
boundaries.
+ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+ let b = byte(v, 1);
+ v = &v[2..];
+ match b {
+ b'x' => {
+ let (b, rest) = backslash_x(v);
+ v = rest;
+ b
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'0' => b'\0',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+ let byte = byte(v, 0);
+ if matches!(byte, b' ' | b'\t' | b'\n' | b'\r') {
+ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+ },
+ b => panic!(
+ "unexpected byte '{}' after \\ character in
byte-string literal",
+ ascii::escape_default(b),
+ ),
+ }
+ }
+ b'\r' => {
+ assert_eq!(byte(v, 1), b'\n', "bare CR not allowed in
string");
+ v = &v[2..];
+ b'\n'
+ }
+ b => {
+ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+ assert_eq!(byte(v, 0), b'"');
+ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
+ (out, suffix)
+ }
+
+ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ let (value, suffix) = parse_lit_str_raw(&s[1..]);
+ (String::from(value).into_bytes(), suffix)
+ }
+
+ // Returns (content, suffix).
+ pub(crate) fn parse_lit_c_str(s: &str) -> (CString, Box<str>) {
+ assert_eq!(byte(s, 0), b'c');
+ match byte(s, 1) {
+ b'"' => parse_lit_c_str_cooked(s),
+ b'r' => parse_lit_c_str_raw(s),
+ _ => unreachable!(),
+ }
+ }
+
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+ fn parse_lit_c_str_cooked(mut s: &str) -> (CString, Box<str>) {
+ assert_eq!(byte(s, 0), b'c');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint
boundaries.
+ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+ let b = byte(v, 1);
+ v = &v[2..];
+ match b {
+ b'x' => {
+ let (b, rest) = backslash_x(v);
+ assert!(b != 0, "\\x00 is not allowed in C-string
literal");
+ v = rest;
+ b
+ }
+ b'u' => {
+ let (ch, rest) = backslash_u(v);
+ assert!(ch != '\0', "\\u{{0}} is not allowed in
C-string literal");
+ v = rest;
+ out.extend_from_slice(ch.encode_utf8(&mut [0u8;
4]).as_bytes());
+ continue 'outer;
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+ let byte = byte(v, 0);
+ if matches!(byte, b' ' | b'\t' | b'\n' | b'\r') {
+ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+ },
+ b => panic!(
+ "unexpected byte '{}' after \\ character in byte
literal",
+ ascii::escape_default(b),
+ ),
+ }
+ }
+ b'\r' => {
+ assert_eq!(byte(v, 1), b'\n', "bare CR not allowed in
string");
+ v = &v[2..];
+ b'\n'
+ }
+ b => {
+ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+ assert_eq!(byte(v, 0), b'"');
+ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
+ (CString::new(out).unwrap(), suffix)
+ }
+
+ fn parse_lit_c_str_raw(s: &str) -> (CString, Box<str>) {
+ assert_eq!(byte(s, 0), b'c');
+ let (value, suffix) = parse_lit_str_raw(&s[1..]);
+ (CString::new(String::from(value)).unwrap(), suffix)
+ }
+
+ // Returns (value, suffix).
+ pub(crate) fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint
boundaries.
+ let mut v = s[2..].as_bytes();
+
+ let b = match byte(v, 0) {
+ b'\\' => {
+ let b = byte(v, 1);
+ v = &v[2..];
+ match b {
+ b'x' => {
+ let (b, rest) = backslash_x(v);
+ v = rest;
+ b
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'0' => b'\0',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b => panic!(
+ "unexpected byte '{}' after \\ character in byte
literal",
+ ascii::escape_default(b),
+ ),
+ }
+ }
+ b => {
+ v = &v[1..];
+ b
+ }
+ };
+
+ assert_eq!(byte(v, 0), b'\'');
+ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
+ (b, suffix)
+ }
+
+ // Returns (value, suffix).
+ pub(crate) fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+ let ch = match byte(s, 0) {
+ b'\\' => {
+ let b = byte(s, 1);
+ s = &s[2..];
+ match b {
+ b'x' => {
+ let (byte, rest) = backslash_x(s);
+ s = rest;
+ assert!(byte <= 0x7F, "invalid \\x byte in character
literal");
+ char::from_u32(u32::from(byte)).unwrap()
+ }
+ b'u' => {
+ let (ch, rest) = backslash_u(s);
+ s = rest;
+ ch
+ }
+ b'n' => '\n',
+ b'r' => '\r',
+ b't' => '\t',
+ b'\\' => '\\',
+ b'0' => '\0',
+ b'\'' => '\'',
+ b'"' => '"',
+ b => panic!(
+ "unexpected byte '{}' after \\ character in character
literal",
+ ascii::escape_default(b),
+ ),
+ }
+ }
+ _ => {
+ let ch = next_chr(s);
+ s = &s[ch.len_utf8()..];
+ ch
+ }
+ };
+ assert_eq!(byte(s, 0), b'\'');
+ let suffix = s[1..].to_owned().into_boxed_str();
+ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+ where
+ S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
+ {
+ let mut ch = 0;
+ let b0 = byte(s, 0);
+ let b1 = byte(s, 1);
+ ch += 0x10
+ * match b0 {
+ b'0'..=b'9' => b0 - b'0',
+ b'a'..=b'f' => 10 + (b0 - b'a'),
+ b'A'..=b'F' => 10 + (b0 - b'A'),
+ _ => panic!("unexpected non-hex character after \\x"),
+ };
+ ch += match b1 {
+ b'0'..=b'9' => b1 - b'0',
+ b'a'..=b'f' => 10 + (b1 - b'a'),
+ b'A'..=b'F' => 10 + (b1 - b'A'),
+ _ => panic!("unexpected non-hex character after \\x"),
+ };
+ (ch, &s[2..])
+ }
+
+ fn backslash_u<S>(mut s: &S) -> (char, &S)
+ where
+ S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
+ {
+ if byte(s, 0) != b'{' {
+ panic!("{}", "expected { after \\u");
+ }
+ s = &s[1..];
+
+ let mut ch = 0;
+ let mut digits = 0;
+ loop {
+ let b = byte(s, 0);
+ let digit = match b {
+ b'0'..=b'9' => b - b'0',
+ b'a'..=b'f' => 10 + b - b'a',
+ b'A'..=b'F' => 10 + b - b'A',
+ b'_' if digits > 0 => {
+ s = &s[1..];
+ continue;
+ }
+ b'}' if digits == 0 => panic!("invalid empty unicode escape"),
+ b'}' => break,
+ _ => panic!("unexpected non-hex character after \\u"),
+ };
+ if digits == 6 {
+ panic!("overlong unicode escape (must have at most 6 hex
digits)");
+ }
+ ch *= 0x10;
+ ch += u32::from(digit);
+ digits += 1;
+ s = &s[1..];
+ }
+ assert!(byte(s, 0) == b'}');
+ s = &s[1..];
+
+ if let Some(ch) = char::from_u32(ch) {
+ (ch, s)
+ } else {
+ panic!("character code {:x} is not a valid unicode character", ch);
+ }
+ }
+
+ // Returns base 10 digits and suffix.
+ pub(crate) fn parse_lit_int(mut s: &str) -> Option<(Box<str>, Box<str>)> {
+ let negative = byte(s, 0) == b'-';
+ if negative {
+ s = &s[1..];
+ }
+
+ let base = match (byte(s, 0), byte(s, 1)) {
+ (b'0', b'x') => {
+ s = &s[2..];
+ 16
+ }
+ (b'0', b'o') => {
+ s = &s[2..];
+ 8
+ }
+ (b'0', b'b') => {
+ s = &s[2..];
+ 2
+ }
+ (b'0'..=b'9', _) => 10,
+ _ => return None,
+ };
+
+ let mut value = BigInt::new();
+ let mut has_digit = false;
+ 'outer: loop {
+ let b = byte(s, 0);
+ let digit = match b {
+ b'0'..=b'9' => b - b'0',
+ b'a'..=b'f' if base > 10 => b - b'a' + 10,
+ b'A'..=b'F' if base > 10 => b - b'A' + 10,
+ b'_' => {
+ s = &s[1..];
+ continue;
+ }
+ // If looking at a floating point literal, we don't want to
+ // consider it an integer.
+ b'.' if base == 10 => return None,
+ b'e' | b'E' if base == 10 => {
+ let mut has_exp = false;
+ for (i, b) in s[1..].bytes().enumerate() {
+ match b {
+ b'_' => {}
+ b'-' | b'+' => return None,
+ b'0'..=b'9' => has_exp = true,
+ _ => {
+ let suffix = &s[1 + i..];
+ if has_exp && crate::ident::xid_ok(suffix) {
+ return None;
+ } else {
+ break 'outer;
+ }
+ }
+ }
+ }
+ if has_exp {
+ return None;
+ } else {
+ break;
+ }
+ }
+ _ => break,
+ };
+
+ if digit >= base {
+ return None;
+ }
+
+ has_digit = true;
+ value *= base;
+ value += digit;
+ s = &s[1..];
+ }
+
+ if !has_digit {
+ return None;
+ }
+
+ let suffix = s;
+ if suffix.is_empty() || crate::ident::xid_ok(suffix) {
+ let mut repr = value.to_string();
+ if negative {
+ repr.insert(0, '-');
+ }
+ Some((repr.into_boxed_str(), suffix.to_owned().into_boxed_str()))
+ } else {
+ None
+ }
+ }
+
+ // Returns base 10 digits and suffix.
+ pub(crate) fn parse_lit_float(input: &str) -> Option<(Box<str>, Box<str>)>
{
+ // Rust's floating point literals are very similar to the ones parsed
by
+ // the standard library, except that rust's literals can contain
+ // ignorable underscores. Let's remove those underscores.
+
+ let mut bytes = input.to_owned().into_bytes();
+
+ let start = (*bytes.first()? == b'-') as usize;
+ match bytes.get(start)? {
+ b'0'..=b'9' => {}
+ _ => return None,
+ }
+
+ let mut read = start;
+ let mut write = start;
+ let mut has_dot = false;
+ let mut has_e = false;
+ let mut has_sign = false;
+ let mut has_exponent = false;
+ while read < bytes.len() {
+ match bytes[read] {
+ b'_' => {
+ // Don't increase write
+ read += 1;
+ continue;
+ }
+ b'0'..=b'9' => {
+ if has_e {
+ has_exponent = true;
+ }
+ bytes[write] = bytes[read];
+ }
+ b'.' => {
+ if has_e || has_dot {
+ return None;
+ }
+ has_dot = true;
+ bytes[write] = b'.';
+ }
+ b'e' | b'E' => {
+ match bytes[read + 1..]
+ .iter()
+ .find(|b| **b != b'_')
+ .unwrap_or(&b'\0')
+ {
+ b'-' | b'+' | b'0'..=b'9' => {}
+ _ => break,
+ }
+ if has_e {
+ if has_exponent {
+ break;
+ } else {
+ return None;
+ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+ }
+ b'-' | b'+' => {
+ if has_sign || has_exponent || !has_e {
+ return None;
+ }
+ has_sign = true;
+ if bytes[read] == b'-' {
+ bytes[write] = bytes[read];
+ } else {
+ // Omit '+'
+ read += 1;
+ continue;
+ }
+ }
+ _ => break,
+ }
+ read += 1;
+ write += 1;
+ }
+
+ if has_e && !has_exponent {
+ return None;
+ }
+
+ let mut digits = String::from_utf8(bytes).unwrap();
+ let suffix = digits.split_off(read);
+ digits.truncate(write);
+ if suffix.is_empty() || crate::ident::xid_ok(&suffix) {
+ Some((digits.into_boxed_str(), suffix.into_boxed_str()))
+ } else {
+ None
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/lookahead.rs
b/rust/hw/char/pl011/vendor/syn/src/lookahead.rs
new file mode 100644
index 0000000000..2ca1471472
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/lookahead.rs
@@ -0,0 +1,169 @@
+use crate::buffer::Cursor;
+use crate::error::{self, Error};
+use crate::sealed::lookahead::Sealed;
+use crate::span::IntoSpans;
+use crate::token::Token;
+use proc_macro2::{Delimiter, Span};
+use std::cell::RefCell;
+
+/// Support for checking the next token in a stream to decide how to parse.
+///
+/// An important advantage over [`ParseStream::peek`] is that here we
+/// automatically construct an appropriate error message based on the token
+/// alternatives that get peeked. If you are producing your own error message,
+/// go ahead and use `ParseStream::peek` instead.
+///
+/// Use [`ParseStream::lookahead1`] to construct this object.
+///
+/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
+/// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1
+///
+/// Consuming tokens from the source stream after constructing a lookahead
+/// object does not also advance the lookahead object.
+///
+/// # Example
+///
+/// ```
+/// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token,
TypeParam};
+/// use syn::parse::{Parse, ParseStream};
+///
+/// // A generic parameter, a single one of the comma-separated elements inside
+/// // angle brackets in:
+/// //
+/// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
+/// //
+/// // On invalid input, lookahead gives us a reasonable error message.
+/// //
+/// // error: expected one of: identifier, lifetime, `const`
+/// // |
+/// // 5 | fn f<!Sized>() {}
+/// // | ^
+/// enum GenericParam {
+/// Type(TypeParam),
+/// Lifetime(LifetimeParam),
+/// Const(ConstParam),
+/// }
+///
+/// impl Parse for GenericParam {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let lookahead = input.lookahead1();
+/// if lookahead.peek(Ident) {
+/// input.parse().map(GenericParam::Type)
+/// } else if lookahead.peek(Lifetime) {
+/// input.parse().map(GenericParam::Lifetime)
+/// } else if lookahead.peek(Token![const]) {
+/// input.parse().map(GenericParam::Const)
+/// } else {
+/// Err(lookahead.error())
+/// }
+/// }
+/// }
+/// ```
+pub struct Lookahead1<'a> {
+ scope: Span,
+ cursor: Cursor<'a>,
+ comparisons: RefCell<Vec<&'static str>>,
+}
+
+pub(crate) fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
+ Lookahead1 {
+ scope,
+ cursor,
+ comparisons: RefCell::new(Vec::new()),
+ }
+}
+
+fn peek_impl(
+ lookahead: &Lookahead1,
+ peek: fn(Cursor) -> bool,
+ display: fn() -> &'static str,
+) -> bool {
+ if peek(lookahead.cursor) {
+ return true;
+ }
+ lookahead.comparisons.borrow_mut().push(display());
+ false
+}
+
+impl<'a> Lookahead1<'a> {
+ /// Looks at the next token in the parse stream to determine whether it
+ /// matches the requested type of token.
+ ///
+ /// # Syntax
+ ///
+ /// Note that this method does not use turbofish syntax. Pass the peek type
+ /// inside of parentheses.
+ ///
+ /// - `input.peek(Token![struct])`
+ /// - `input.peek(Token![==])`
+ /// - `input.peek(Ident)` *(does not accept keywords)*
+ /// - `input.peek(Ident::peek_any)`
+ /// - `input.peek(Lifetime)`
+ /// - `input.peek(token::Brace)`
+ pub fn peek<T: Peek>(&self, token: T) -> bool {
+ let _ = token;
+ peek_impl(self, T::Token::peek, T::Token::display)
+ }
+
+ /// Triggers an error at the current position of the parse stream.
+ ///
+ /// The error message will identify all of the expected token types that
+ /// have been peeked against this lookahead instance.
+ pub fn error(self) -> Error {
+ let comparisons = self.comparisons.into_inner();
+ match comparisons.len() {
+ 0 => {
+ if self.cursor.eof() {
+ Error::new(self.scope, "unexpected end of input")
+ } else {
+ Error::new(self.cursor.span(), "unexpected token")
+ }
+ }
+ 1 => {
+ let message = format!("expected {}", comparisons[0]);
+ error::new_at(self.scope, self.cursor, message)
+ }
+ 2 => {
+ let message = format!("expected {} or {}", comparisons[0],
comparisons[1]);
+ error::new_at(self.scope, self.cursor, message)
+ }
+ _ => {
+ let join = comparisons.join(", ");
+ let message = format!("expected one of: {}", join);
+ error::new_at(self.scope, self.cursor, message)
+ }
+ }
+ }
+}
+
+/// Types that can be parsed by looking at just one token.
+///
+/// Use [`ParseStream::peek`] to peek one of these types in a parse stream
+/// without consuming it from the stream.
+///
+/// This trait is sealed and cannot be implemented for types outside of Syn.
+///
+/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
+pub trait Peek: Sealed {
+ // Not public API.
+ #[doc(hidden)]
+ type Token: Token;
+}
+
+impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F {
+ type Token = T;
+}
+
+pub enum TokenMarker {}
+
+impl<S> IntoSpans<S> for TokenMarker {
+ fn into_spans(self) -> S {
+ match self {}
+ }
+}
+
+pub(crate) fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool {
+ cursor.group(delimiter).is_some()
+}
+
+impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {}
diff --git a/rust/hw/char/pl011/vendor/syn/src/mac.rs
b/rust/hw/char/pl011/vendor/syn/src/mac.rs
new file mode 100644
index 0000000000..7e1876c648
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/mac.rs
@@ -0,0 +1,223 @@
+#[cfg(feature = "parsing")]
+use crate::error::Result;
+#[cfg(feature = "parsing")]
+use crate::parse::{Parse, ParseStream, Parser};
+use crate::path::Path;
+use crate::token::{Brace, Bracket, Paren};
+use proc_macro2::extra::DelimSpan;
+#[cfg(feature = "parsing")]
+use proc_macro2::Delimiter;
+use proc_macro2::TokenStream;
+#[cfg(feature = "parsing")]
+use proc_macro2::TokenTree;
+
+ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+ pub tokens: TokenStream,
+ }
+}
+
+ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}`
or `m![...]`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum MacroDelimiter {
+ Paren(Paren),
+ Brace(Brace),
+ Bracket(Bracket),
+ }
+}
+
+impl MacroDelimiter {
+ pub fn span(&self) -> &DelimSpan {
+ match self {
+ MacroDelimiter::Paren(token) => &token.span,
+ MacroDelimiter::Brace(token) => &token.span,
+ MacroDelimiter::Bracket(token) => &token.span,
+ }
+ }
+
+ #[cfg(all(feature = "full", any(feature = "parsing", feature =
"printing")))]
+ pub(crate) fn is_brace(&self) -> bool {
+ match self {
+ MacroDelimiter::Brace(_) => true,
+ MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false,
+ }
+ }
+}
+
+impl Macro {
+ /// Parse the tokens within the macro invocation's delimiters into a syntax
+ /// tree.
+ ///
+ /// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
+ /// produces a more useful span when `tokens` is empty.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{parse_quote, Expr, ExprLit, Ident, Lit, LitStr, Macro,
Token};
+ /// use syn::ext::IdentExt;
+ /// use syn::parse::{Error, Parse, ParseStream, Result};
+ /// use syn::punctuated::Punctuated;
+ ///
+ /// // The arguments expected by libcore's format_args macro, and as a
+ /// // result most other formatting and printing macros like println.
+ /// //
+ /// // println!("{} is {number:.prec$}", "x", prec=5, number=0.01)
+ /// struct FormatArgs {
+ /// format_string: Expr,
+ /// positional_args: Vec<Expr>,
+ /// named_args: Vec<(Ident, Expr)>,
+ /// }
+ ///
+ /// impl Parse for FormatArgs {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let format_string: Expr;
+ /// let mut positional_args = Vec::new();
+ /// let mut named_args = Vec::new();
+ ///
+ /// format_string = input.parse()?;
+ /// while !input.is_empty() {
+ /// input.parse::<Token![,]>()?;
+ /// if input.is_empty() {
+ /// break;
+ /// }
+ /// if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
+ /// while !input.is_empty() {
+ /// let name: Ident = input.call(Ident::parse_any)?;
+ /// input.parse::<Token![=]>()?;
+ /// let value: Expr = input.parse()?;
+ /// named_args.push((name, value));
+ /// if input.is_empty() {
+ /// break;
+ /// }
+ /// input.parse::<Token![,]>()?;
+ /// }
+ /// break;
+ /// }
+ /// positional_args.push(input.parse()?);
+ /// }
+ ///
+ /// Ok(FormatArgs {
+ /// format_string,
+ /// positional_args,
+ /// named_args,
+ /// })
+ /// }
+ /// }
+ ///
+ /// // Extract the first argument, the format string literal, from an
+ /// // invocation of a formatting or printing macro.
+ /// fn get_format_string(m: &Macro) -> Result<LitStr> {
+ /// let args: FormatArgs = m.parse_body()?;
+ /// match args.format_string {
+ /// Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) => Ok(lit),
+ /// other => {
+ /// // First argument was not a string literal expression.
+ /// // Maybe something like: println!(concat!(...), ...)
+ /// Err(Error::new_spanned(other, "format string must be a
string literal"))
+ /// }
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let invocation = parse_quote! {
+ /// println!("{:?}", Instant::now())
+ /// };
+ /// let lit = get_format_string(&invocation).unwrap();
+ /// assert_eq!(lit.value(), "{:?}");
+ /// }
+ /// ```
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_body<T: Parse>(&self) -> Result<T> {
+ self.parse_body_with(T::parse)
+ }
+
+ /// Parse the tokens within the macro invocation's delimiters using the
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ let scope = self.delimiter.span().close();
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter,
TokenStream)> {
+ input.step(|cursor| {
+ if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
+ let span = g.delim_span();
+ let delimiter = match g.delimiter() {
+ Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
+ Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
+ Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
+ Delimiter::None => {
+ return Err(cursor.error("expected delimiter"));
+ }
+ };
+ Ok(((delimiter, g.stream()), rest))
+ } else {
+ Err(cursor.error("expected delimiter"))
+ }
+ })
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::error::Result;
+ use crate::mac::{parse_delimiter, Macro};
+ use crate::parse::{Parse, ParseStream};
+ use crate::path::Path;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Macro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let tokens;
+ Ok(Macro {
+ path: input.call(Path::parse_mod_style)?,
+ bang_token: input.parse()?,
+ delimiter: {
+ let (delimiter, content) = parse_delimiter(input)?;
+ tokens = content;
+ delimiter
+ },
+ tokens,
+ })
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::mac::{Macro, MacroDelimiter};
+ use crate::token;
+ use proc_macro2::{Delimiter, TokenStream};
+ use quote::ToTokens;
+
+ impl MacroDelimiter {
+ pub(crate) fn surround(&self, tokens: &mut TokenStream, inner:
TokenStream) {
+ let (delim, span) = match self {
+ MacroDelimiter::Paren(paren) => (Delimiter::Parenthesis,
paren.span),
+ MacroDelimiter::Brace(brace) => (Delimiter::Brace, brace.span),
+ MacroDelimiter::Bracket(bracket) => (Delimiter::Bracket,
bracket.span),
+ };
+ token::printing::delim(delim, span.join(), tokens, inner);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Macro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.path.to_tokens(tokens);
+ self.bang_token.to_tokens(tokens);
+ self.delimiter.surround(tokens, self.tokens.clone());
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/macros.rs
b/rust/hw/char/pl011/vendor/syn/src/macros.rs
new file mode 100644
index 0000000000..2b6708d495
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/macros.rs
@@ -0,0 +1,166 @@
+#[cfg_attr(
+ not(any(feature = "full", feature = "derive")),
+ allow(unknown_lints, unused_macro_rules)
+)]
+macro_rules! ast_struct {
+ (
+ $(#[$attr:meta])*
+ $pub:ident $struct:ident $name:ident #full $body:tt
+ ) => {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(struct $struct);
+
+ #[cfg(feature = "full")]
+ $(#[$attr])* $pub $struct $name $body
+
+ #[cfg(not(feature = "full"))]
+ $(#[$attr])* $pub $struct $name {
+ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+ impl ::quote::ToTokens for $name {
+ fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) {
+ unreachable!()
+ }
+ }
+ };
+
+ (
+ $(#[$attr:meta])*
+ $pub:ident $struct:ident $name:ident $body:tt
+ ) => {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(struct $struct);
+
+ $(#[$attr])* $pub $struct $name $body
+ };
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+macro_rules! ast_enum {
+ (
+ $(#[$enum_attr:meta])*
+ $pub:ident $enum:ident $name:ident $body:tt
+ ) => {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+ $(#[$enum_attr])* $pub $enum $name $body
+ };
+}
+
+macro_rules! ast_enum_of_structs {
+ (
+ $(#[$enum_attr:meta])*
+ $pub:ident $enum:ident $name:ident $body:tt
+ ) => {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+ $(#[$enum_attr])* $pub $enum $name $body
+
+ ast_enum_of_structs_impl!($name $body);
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens!(() tokens $name $body);
+ };
+}
+
+macro_rules! ast_enum_of_structs_impl {
+ (
+ $name:ident {
+ $(
+ $(#[cfg $cfg_attr:tt])*
+ $(#[doc $($doc_attr:tt)*])*
+ $variant:ident $( ($member:ident) )*,
+ )*
+ }
+ ) => {
+ $($(
+ ast_enum_from_struct!($name::$variant, $member);
+ )*)*
+ };
+}
+
+macro_rules! ast_enum_from_struct {
+ // No From<TokenStream> for verbatim variants.
+ ($name:ident::Verbatim, $member:ident) => {};
+
+ ($name:ident::$variant:ident, $member:ident) => {
+ impl From<$member> for $name {
+ fn from(e: $member) -> $name {
+ $name::$variant(e)
+ }
+ }
+ };
+}
+
+#[cfg(feature = "printing")]
+macro_rules! generate_to_tokens {
+ (
+ ($($arms:tt)*) $tokens:ident $name:ident {
+ $(#[cfg $cfg_attr:tt])*
+ $(#[doc $($doc_attr:tt)*])*
+ $variant:ident,
+ $($next:tt)*
+ }
+ ) => {
+ generate_to_tokens!(
+ ($($arms)* $(#[cfg $cfg_attr])* $name::$variant => {})
+ $tokens $name { $($next)* }
+ );
+ };
+
+ (
+ ($($arms:tt)*) $tokens:ident $name:ident {
+ $(#[cfg $cfg_attr:tt])*
+ $(#[doc $($doc_attr:tt)*])*
+ $variant:ident($member:ident),
+ $($next:tt)*
+ }
+ ) => {
+ generate_to_tokens!(
+ ($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) =>
_e.to_tokens($tokens),)
+ $tokens $name { $($next)* }
+ );
+ };
+
+ (($($arms:tt)*) $tokens:ident $name:ident {}) => {
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ::quote::ToTokens for $name {
+ fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
+ match self {
+ $($arms)*
+ }
+ }
+ }
+ };
+}
+
+// Rustdoc bug: does not respect the doc(hidden) on some items.
+#[cfg(all(doc, feature = "parsing"))]
+macro_rules! pub_if_not_doc {
+ ($(#[$m:meta])* $pub:ident $($item:tt)*) => {
+ check_keyword_matches!(pub $pub);
+
+ $(#[$m])*
+ $pub(crate) $($item)*
+ };
+}
+
+#[cfg(all(not(doc), feature = "parsing"))]
+macro_rules! pub_if_not_doc {
+ ($(#[$m:meta])* $pub:ident $($item:tt)*) => {
+ check_keyword_matches!(pub $pub);
+
+ $(#[$m])*
+ $pub $($item)*
+ };
+}
+
+macro_rules! check_keyword_matches {
+ (enum enum) => {};
+ (pub pub) => {};
+ (struct struct) => {};
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/meta.rs
b/rust/hw/char/pl011/vendor/syn/src/meta.rs
new file mode 100644
index 0000000000..ffeeb2629f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/meta.rs
@@ -0,0 +1,427 @@
+//! Facility for interpreting structured content inside of an `Attribute`.
+
+use crate::error::{Error, Result};
+use crate::ext::IdentExt as _;
+use crate::lit::Lit;
+use crate::parse::{ParseStream, Parser};
+use crate::path::{Path, PathSegment};
+use crate::punctuated::Punctuated;
+use proc_macro2::Ident;
+use std::fmt::Display;
+
+/// Make a parser that is usable with `parse_macro_input!` in a
+/// `#[proc_macro_attribute]` macro.
+///
+/// *Warning:* When parsing attribute args **other than** the
+/// `proc_macro::TokenStream` input of a `proc_macro_attribute`, you do **not**
+/// need this function. In several cases your callers will get worse error
+/// messages if you use this function, because the surrounding delimiter's span
+/// is concealed from attribute macros by rustc. Use
+/// [`Attribute::parse_nested_meta`] instead.
+///
+/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
+///
+/// # Example
+///
+/// This example implements an attribute macro whose invocations look like
this:
+///
+/// ```
+/// # const IGNORE: &str = stringify! {
+/// #[tea(kind = "EarlGrey", hot)]
+/// struct Picard {...}
+/// # };
+/// ```
+///
+/// The "parameters" supported by the attribute are:
+///
+/// - `kind = "..."`
+/// - `hot`
+/// - `with(sugar, milk, ...)`, a comma-separated list of ingredients
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use syn::{parse_macro_input, LitStr, Path};
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
+/// let mut kind: Option<LitStr> = None;
+/// let mut hot: bool = false;
+/// let mut with: Vec<Path> = Vec::new();
+/// let tea_parser = syn::meta::parser(|meta| {
+/// if meta.path.is_ident("kind") {
+/// kind = Some(meta.value()?.parse()?);
+/// Ok(())
+/// } else if meta.path.is_ident("hot") {
+/// hot = true;
+/// Ok(())
+/// } else if meta.path.is_ident("with") {
+/// meta.parse_nested_meta(|meta| {
+/// with.push(meta.path);
+/// Ok(())
+/// })
+/// } else {
+/// Err(meta.error("unsupported tea property"))
+/// }
+/// });
+///
+/// parse_macro_input!(args with tea_parser);
+/// eprintln!("kind={kind:?} hot={hot} with={with:?}");
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+/// ```
+///
+/// The `syn::meta` library will take care of dealing with the commas including
+/// trailing commas, and producing sensible error messages on unexpected input.
+///
+/// ```console
+/// error: expected `,`
+/// --> src/main.rs:3:37
+/// |
+/// 3 | #[tea(kind = "EarlGrey", with(sugar = "lol", milk))]
+/// | ^
+/// ```
+///
+/// # Example
+///
+/// Same as above but we factor out most of the logic into a separate function.
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use syn::meta::ParseNestedMeta;
+/// use syn::parse::{Parser, Result};
+/// use syn::{parse_macro_input, LitStr, Path};
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
+/// let mut attrs = TeaAttributes::default();
+/// let tea_parser = syn::meta::parser(|meta| attrs.parse(meta));
+/// parse_macro_input!(args with tea_parser);
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+///
+/// #[derive(Default)]
+/// struct TeaAttributes {
+/// kind: Option<LitStr>,
+/// hot: bool,
+/// with: Vec<Path>,
+/// }
+///
+/// impl TeaAttributes {
+/// fn parse(&mut self, meta: ParseNestedMeta) -> Result<()> {
+/// if meta.path.is_ident("kind") {
+/// self.kind = Some(meta.value()?.parse()?);
+/// Ok(())
+/// } else /* just like in last example */
+/// # { unimplemented!() }
+///
+/// }
+/// }
+/// ```
+pub fn parser(logic: impl FnMut(ParseNestedMeta) -> Result<()>) -> impl
Parser<Output = ()> {
+ |input: ParseStream| {
+ if input.is_empty() {
+ Ok(())
+ } else {
+ parse_nested_meta(input, logic)
+ }
+ }
+}
+
+/// Context for parsing a single property in the conventional syntax for
+/// structured attributes.
+///
+/// # Examples
+///
+/// Refer to usage examples on the following two entry-points:
+///
+/// - [`Attribute::parse_nested_meta`] if you have an entire `Attribute` to
+/// parse. Always use this if possible. Generally this is able to produce
+/// better error messages because `Attribute` holds span information for all
+/// of the delimiters therein.
+///
+/// - [`syn::meta::parser`] if you are implementing a `proc_macro_attribute`
+/// macro and parsing the arguments to the attribute macro, i.e. the ones
+/// written in the same attribute that dispatched the macro invocation. Rustc
+/// does not pass span information for the surrounding delimiters into the
+/// attribute macro invocation in this situation, so error messages might be
+/// less precise.
+///
+/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
+/// [`syn::meta::parser`]: crate::meta::parser
+#[non_exhaustive]
+pub struct ParseNestedMeta<'a> {
+ pub path: Path,
+ pub input: ParseStream<'a>,
+}
+
+impl<'a> ParseNestedMeta<'a> {
+ /// Used when parsing `key = "value"` syntax.
+ ///
+ /// All it does is advance `meta.input` past the `=` sign in the input. You
+ /// could accomplish the same effect by writing
+ /// `meta.parse::<Token![=]>()?`, so at most it is a minor convenience to
+ /// use `meta.value()?`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{parse_quote, Attribute, LitStr};
+ ///
+ /// let attr: Attribute = parse_quote! {
+ /// #[tea(kind = "EarlGrey")]
+ /// };
+ /// // conceptually:
+ /// if attr.path().is_ident("tea") { // this parses the `tea`
+ /// attr.parse_nested_meta(|meta| { // this parses the `(`
+ /// if meta.path.is_ident("kind") { // this parses the `kind`
+ /// let value = meta.value()?; // this parses the `=`
+ /// let s: LitStr = value.parse()?; // this parses
`"EarlGrey"`
+ /// if s.value() == "EarlGrey" {
+ /// // ...
+ /// }
+ /// Ok(())
+ /// } else {
+ /// Err(meta.error("unsupported attribute"))
+ /// }
+ /// })?;
+ /// }
+ /// # anyhow::Ok(())
+ /// ```
+ pub fn value(&self) -> Result<ParseStream<'a>> {
+ self.input.parse::<Token![=]>()?;
+ Ok(self.input)
+ }
+
+ /// Used when parsing `list(...)` syntax **if** the content inside the
+ /// nested parentheses is also expected to conform to Rust's structured
+ /// attribute convention.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{parse_quote, Attribute};
+ ///
+ /// let attr: Attribute = parse_quote! {
+ /// #[tea(with(sugar, milk))]
+ /// };
+ ///
+ /// if attr.path().is_ident("tea") {
+ /// attr.parse_nested_meta(|meta| {
+ /// if meta.path.is_ident("with") {
+ /// meta.parse_nested_meta(|meta| { // <---
+ /// if meta.path.is_ident("sugar") {
+ /// // Here we can go even deeper if needed.
+ /// Ok(())
+ /// } else if meta.path.is_ident("milk") {
+ /// Ok(())
+ /// } else {
+ /// Err(meta.error("unsupported ingredient"))
+ /// }
+ /// })
+ /// } else {
+ /// Err(meta.error("unsupported tea property"))
+ /// }
+ /// })?;
+ /// }
+ /// # anyhow::Ok(())
+ /// ```
+ ///
+ /// # Counterexample
+ ///
+ /// If you don't need `parse_nested_meta`'s help in parsing the content
+ /// written within the nested parentheses, keep in mind that you can always
+ /// just parse it yourself from the exposed ParseStream. Rust syntax
permits
+ /// arbitrary tokens within those parentheses so for the crazier stuff,
+ /// `parse_nested_meta` is not what you want.
+ ///
+ /// ```
+ /// use syn::{parenthesized, parse_quote, Attribute, LitInt};
+ ///
+ /// let attr: Attribute = parse_quote! {
+ /// #[repr(align(32))]
+ /// };
+ ///
+ /// let mut align: Option<LitInt> = None;
+ /// if attr.path().is_ident("repr") {
+ /// attr.parse_nested_meta(|meta| {
+ /// if meta.path.is_ident("align") {
+ /// let content;
+ /// parenthesized!(content in meta.input);
+ /// align = Some(content.parse()?);
+ /// Ok(())
+ /// } else {
+ /// Err(meta.error("unsupported repr"))
+ /// }
+ /// })?;
+ /// }
+ /// # anyhow::Ok(())
+ /// ```
+ pub fn parse_nested_meta(
+ &self,
+ logic: impl FnMut(ParseNestedMeta) -> Result<()>,
+ ) -> Result<()> {
+ let content;
+ parenthesized!(content in self.input);
+ parse_nested_meta(&content, logic)
+ }
+
+ /// Report that the attribute's content did not conform to expectations.
+ ///
+ /// The span of the resulting error will cover `meta.path` *and* everything
+ /// that has been parsed so far since it.
+ ///
+ /// There are 2 ways you might call this. First, if `meta.path` is not
+ /// something you recognize:
+ ///
+ /// ```
+ /// # use syn::Attribute;
+ /// #
+ /// # fn example(attr: &Attribute) -> syn::Result<()> {
+ /// attr.parse_nested_meta(|meta| {
+ /// if meta.path.is_ident("kind") {
+ /// // ...
+ /// Ok(())
+ /// } else {
+ /// Err(meta.error("unsupported tea property"))
+ /// }
+ /// })?;
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// In this case, it behaves exactly like
+ /// `syn::Error::new_spanned(&meta.path, "message...")`.
+ ///
+ /// ```console
+ /// error: unsupported tea property
+ /// --> src/main.rs:3:26
+ /// |
+ /// 3 | #[tea(kind = "EarlGrey", wat = "foo")]
+ /// | ^^^
+ /// ```
+ ///
+ /// More usefully, the second place is if you've already parsed a value but
+ /// have decided not to accept the value:
+ ///
+ /// ```
+ /// # use syn::Attribute;
+ /// #
+ /// # fn example(attr: &Attribute) -> syn::Result<()> {
+ /// use syn::Expr;
+ ///
+ /// attr.parse_nested_meta(|meta| {
+ /// if meta.path.is_ident("kind") {
+ /// let expr: Expr = meta.value()?.parse()?;
+ /// match expr {
+ /// Expr::Lit(expr) => /* ... */
+ /// # unimplemented!(),
+ /// Expr::Path(expr) => /* ... */
+ /// # unimplemented!(),
+ /// Expr::Macro(expr) => /* ... */
+ /// # unimplemented!(),
+ /// _ => Err(meta.error("tea kind must be a string literal,
path, or macro")),
+ /// }
+ /// } else /* as above */
+ /// # { unimplemented!() }
+ ///
+ /// })?;
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// ```console
+ /// error: tea kind must be a string literal, path, or macro
+ /// --> src/main.rs:3:7
+ /// |
+ /// 3 | #[tea(kind = async { replicator.await })]
+ /// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ /// ```
+ ///
+ /// Often you may want to use `syn::Error::new_spanned` even in this
+ /// situation. In the above code, that would be:
+ ///
+ /// ```
+ /// # use syn::{Error, Expr};
+ /// #
+ /// # fn example(expr: Expr) -> syn::Result<()> {
+ /// match expr {
+ /// Expr::Lit(expr) => /* ... */
+ /// # unimplemented!(),
+ /// Expr::Path(expr) => /* ... */
+ /// # unimplemented!(),
+ /// Expr::Macro(expr) => /* ... */
+ /// # unimplemented!(),
+ /// _ => Err(Error::new_spanned(expr, "unsupported expression type
for `kind`")),
+ /// }
+ /// # }
+ /// ```
+ ///
+ /// ```console
+ /// error: unsupported expression type for `kind`
+ /// --> src/main.rs:3:14
+ /// |
+ /// 3 | #[tea(kind = async { replicator.await })]
+ /// | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ /// ```
+ pub fn error(&self, msg: impl Display) -> Error {
+ let start_span = self.path.segments[0].ident.span();
+ let end_span = self.input.cursor().prev_span();
+ crate::error::new2(start_span, end_span, msg)
+ }
+}
+
+pub(crate) fn parse_nested_meta(
+ input: ParseStream,
+ mut logic: impl FnMut(ParseNestedMeta) -> Result<()>,
+) -> Result<()> {
+ loop {
+ let path = input.call(parse_meta_path)?;
+ logic(ParseNestedMeta { path, input })?;
+ if input.is_empty() {
+ return Ok(());
+ }
+ input.parse::<Token![,]>()?;
+ if input.is_empty() {
+ return Ok(());
+ }
+ }
+}
+
+// Like Path::parse_mod_style, but accepts keywords in the path.
+fn parse_meta_path(input: ParseStream) -> Result<Path> {
+ Ok(Path {
+ leading_colon: input.parse()?,
+ segments: {
+ let mut segments = Punctuated::new();
+ if input.peek(Ident::peek_any) {
+ let ident = Ident::parse_any(input)?;
+ segments.push_value(PathSegment::from(ident));
+ } else if input.is_empty() {
+ return Err(input.error("expected nested attribute"));
+ } else if input.peek(Lit) {
+ return Err(input.error("unexpected literal in nested
attribute, expected ident"));
+ } else {
+ return Err(input.error("unexpected token in nested attribute,
expected ident"));
+ }
+ while input.peek(Token![::]) {
+ let punct = input.parse()?;
+ segments.push_punct(punct);
+ let ident = Ident::parse_any(input)?;
+ segments.push_value(PathSegment::from(ident));
+ }
+ segments
+ },
+ })
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/op.rs
b/rust/hw/char/pl011/vendor/syn/src/op.rs
new file mode 100644
index 0000000000..575d9faa12
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/op.rs
@@ -0,0 +1,219 @@
+ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+ /// The `-` operator (subtraction)
+ Sub(Token![-]),
+ /// The `*` operator (multiplication)
+ Mul(Token![*]),
+ /// The `/` operator (division)
+ Div(Token![/]),
+ /// The `%` operator (modulus)
+ Rem(Token![%]),
+ /// The `&&` operator (logical and)
+ And(Token![&&]),
+ /// The `||` operator (logical or)
+ Or(Token![||]),
+ /// The `^` operator (bitwise xor)
+ BitXor(Token![^]),
+ /// The `&` operator (bitwise and)
+ BitAnd(Token![&]),
+ /// The `|` operator (bitwise or)
+ BitOr(Token![|]),
+ /// The `<<` operator (shift left)
+ Shl(Token![<<]),
+ /// The `>>` operator (shift right)
+ Shr(Token![>>]),
+ /// The `==` operator (equality)
+ Eq(Token![==]),
+ /// The `<` operator (less than)
+ Lt(Token![<]),
+ /// The `<=` operator (less than or equal to)
+ Le(Token![<=]),
+ /// The `!=` operator (not equal to)
+ Ne(Token![!=]),
+ /// The `>=` operator (greater than or equal to)
+ Ge(Token![>=]),
+ /// The `>` operator (greater than)
+ Gt(Token![>]),
+ /// The `+=` operator
+ AddAssign(Token![+=]),
+ /// The `-=` operator
+ SubAssign(Token![-=]),
+ /// The `*=` operator
+ MulAssign(Token![*=]),
+ /// The `/=` operator
+ DivAssign(Token![/=]),
+ /// The `%=` operator
+ RemAssign(Token![%=]),
+ /// The `^=` operator
+ BitXorAssign(Token![^=]),
+ /// The `&=` operator
+ BitAndAssign(Token![&=]),
+ /// The `|=` operator
+ BitOrAssign(Token![|=]),
+ /// The `<<=` operator
+ ShlAssign(Token![<<=]),
+ /// The `>>=` operator
+ ShrAssign(Token![>>=]),
+ }
+}
+
+ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+ /// The `!` operator for logical inversion
+ Not(Token![!]),
+ /// The `-` operator for negation
+ Neg(Token![-]),
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::error::Result;
+ use crate::op::{BinOp, UnOp};
+ use crate::parse::{Parse, ParseStream};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for BinOp {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![+=]) {
+ input.parse().map(BinOp::AddAssign)
+ } else if input.peek(Token![-=]) {
+ input.parse().map(BinOp::SubAssign)
+ } else if input.peek(Token![*=]) {
+ input.parse().map(BinOp::MulAssign)
+ } else if input.peek(Token![/=]) {
+ input.parse().map(BinOp::DivAssign)
+ } else if input.peek(Token![%=]) {
+ input.parse().map(BinOp::RemAssign)
+ } else if input.peek(Token![^=]) {
+ input.parse().map(BinOp::BitXorAssign)
+ } else if input.peek(Token![&=]) {
+ input.parse().map(BinOp::BitAndAssign)
+ } else if input.peek(Token![|=]) {
+ input.parse().map(BinOp::BitOrAssign)
+ } else if input.peek(Token![<<=]) {
+ input.parse().map(BinOp::ShlAssign)
+ } else if input.peek(Token![>>=]) {
+ input.parse().map(BinOp::ShrAssign)
+ } else if input.peek(Token![&&]) {
+ input.parse().map(BinOp::And)
+ } else if input.peek(Token![||]) {
+ input.parse().map(BinOp::Or)
+ } else if input.peek(Token![<<]) {
+ input.parse().map(BinOp::Shl)
+ } else if input.peek(Token![>>]) {
+ input.parse().map(BinOp::Shr)
+ } else if input.peek(Token![==]) {
+ input.parse().map(BinOp::Eq)
+ } else if input.peek(Token![<=]) {
+ input.parse().map(BinOp::Le)
+ } else if input.peek(Token![!=]) {
+ input.parse().map(BinOp::Ne)
+ } else if input.peek(Token![>=]) {
+ input.parse().map(BinOp::Ge)
+ } else if input.peek(Token![+]) {
+ input.parse().map(BinOp::Add)
+ } else if input.peek(Token![-]) {
+ input.parse().map(BinOp::Sub)
+ } else if input.peek(Token![*]) {
+ input.parse().map(BinOp::Mul)
+ } else if input.peek(Token![/]) {
+ input.parse().map(BinOp::Div)
+ } else if input.peek(Token![%]) {
+ input.parse().map(BinOp::Rem)
+ } else if input.peek(Token![^]) {
+ input.parse().map(BinOp::BitXor)
+ } else if input.peek(Token![&]) {
+ input.parse().map(BinOp::BitAnd)
+ } else if input.peek(Token![|]) {
+ input.parse().map(BinOp::BitOr)
+ } else if input.peek(Token![<]) {
+ input.parse().map(BinOp::Lt)
+ } else if input.peek(Token![>]) {
+ input.parse().map(BinOp::Gt)
+ } else {
+ Err(input.error("expected binary operator"))
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for UnOp {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Token![*]) {
+ input.parse().map(UnOp::Deref)
+ } else if lookahead.peek(Token![!]) {
+ input.parse().map(UnOp::Not)
+ } else if lookahead.peek(Token![-]) {
+ input.parse().map(UnOp::Neg)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::op::{BinOp, UnOp};
+ use proc_macro2::TokenStream;
+ use quote::ToTokens;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for BinOp {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ BinOp::Add(t) => t.to_tokens(tokens),
+ BinOp::Sub(t) => t.to_tokens(tokens),
+ BinOp::Mul(t) => t.to_tokens(tokens),
+ BinOp::Div(t) => t.to_tokens(tokens),
+ BinOp::Rem(t) => t.to_tokens(tokens),
+ BinOp::And(t) => t.to_tokens(tokens),
+ BinOp::Or(t) => t.to_tokens(tokens),
+ BinOp::BitXor(t) => t.to_tokens(tokens),
+ BinOp::BitAnd(t) => t.to_tokens(tokens),
+ BinOp::BitOr(t) => t.to_tokens(tokens),
+ BinOp::Shl(t) => t.to_tokens(tokens),
+ BinOp::Shr(t) => t.to_tokens(tokens),
+ BinOp::Eq(t) => t.to_tokens(tokens),
+ BinOp::Lt(t) => t.to_tokens(tokens),
+ BinOp::Le(t) => t.to_tokens(tokens),
+ BinOp::Ne(t) => t.to_tokens(tokens),
+ BinOp::Ge(t) => t.to_tokens(tokens),
+ BinOp::Gt(t) => t.to_tokens(tokens),
+ BinOp::AddAssign(t) => t.to_tokens(tokens),
+ BinOp::SubAssign(t) => t.to_tokens(tokens),
+ BinOp::MulAssign(t) => t.to_tokens(tokens),
+ BinOp::DivAssign(t) => t.to_tokens(tokens),
+ BinOp::RemAssign(t) => t.to_tokens(tokens),
+ BinOp::BitXorAssign(t) => t.to_tokens(tokens),
+ BinOp::BitAndAssign(t) => t.to_tokens(tokens),
+ BinOp::BitOrAssign(t) => t.to_tokens(tokens),
+ BinOp::ShlAssign(t) => t.to_tokens(tokens),
+ BinOp::ShrAssign(t) => t.to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for UnOp {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ UnOp::Deref(t) => t.to_tokens(tokens),
+ UnOp::Not(t) => t.to_tokens(tokens),
+ UnOp::Neg(t) => t.to_tokens(tokens),
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/parse.rs
b/rust/hw/char/pl011/vendor/syn/src/parse.rs
new file mode 100644
index 0000000000..a80a914760
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/parse.rs
@@ -0,0 +1,1397 @@
+//! Parsing interface for parsing a token stream into a syntax tree node.
+//!
+//! Parsing in Syn is built on parser functions that take in a [`ParseStream`]
+//! and produce a [`Result<T>`] where `T` is some syntax tree node. Underlying
+//! these parser functions is a lower level mechanism built around the
+//! [`Cursor`] type. `Cursor` is a cheaply copyable cursor over a range of
+//! tokens in a token stream.
+//!
+//! [`Result<T>`]: Result
+//! [`Cursor`]: crate::buffer::Cursor
+//!
+//! # Example
+//!
+//! Here is a snippet of parsing code to get a feel for the style of the
+//! library. We define data structures for a subset of Rust syntax including
+//! enums (not shown) and structs, then provide implementations of the
[`Parse`]
+//! trait to parse these syntax tree data structures from a token stream.
+//!
+//! Once `Parse` impls have been defined, they can be called conveniently from
a
+//! procedural macro through [`parse_macro_input!`] as shown at the bottom of
+//! the snippet. If the caller provides syntactically invalid input to the
+//! procedural macro, they will receive a helpful compiler error message
+//! pointing out the exact token that triggered the failure to parse.
+//!
+//! [`parse_macro_input!`]: crate::parse_macro_input!
+//!
+//! ```
+//! # extern crate proc_macro;
+//! #
+//! use proc_macro::TokenStream;
+//! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+//! use syn::parse::{Parse, ParseStream};
+//! use syn::punctuated::Punctuated;
+//!
+//! enum Item {
+//! Struct(ItemStruct),
+//! Enum(ItemEnum),
+//! }
+//!
+//! struct ItemStruct {
+//! struct_token: Token![struct],
+//! ident: Ident,
+//! brace_token: token::Brace,
+//! fields: Punctuated<Field, Token![,]>,
+//! }
+//! #
+//! # enum ItemEnum {}
+//!
+//! impl Parse for Item {
+//! fn parse(input: ParseStream) -> Result<Self> {
+//! let lookahead = input.lookahead1();
+//! if lookahead.peek(Token![struct]) {
+//! input.parse().map(Item::Struct)
+//! } else if lookahead.peek(Token![enum]) {
+//! input.parse().map(Item::Enum)
+//! } else {
+//! Err(lookahead.error())
+//! }
+//! }
+//! }
+//!
+//! impl Parse for ItemStruct {
+//! fn parse(input: ParseStream) -> Result<Self> {
+//! let content;
+//! Ok(ItemStruct {
+//! struct_token: input.parse()?,
+//! ident: input.parse()?,
+//! brace_token: braced!(content in input),
+//! fields: content.parse_terminated(Field::parse_named,
Token![,])?,
+//! })
+//! }
+//! }
+//! #
+//! # impl Parse for ItemEnum {
+//! # fn parse(input: ParseStream) -> Result<Self> {
+//! # unimplemented!()
+//! # }
+//! # }
+//!
+//! # const IGNORE: &str = stringify! {
+//! #[proc_macro]
+//! # };
+//! pub fn my_macro(tokens: TokenStream) -> TokenStream {
+//! let input = parse_macro_input!(tokens as Item);
+//!
+//! /* ... */
+//! # TokenStream::new()
+//! }
+//! ```
+//!
+//! # The `syn::parse*` functions
+//!
+//! The [`syn::parse`], [`syn::parse2`], and [`syn::parse_str`] functions serve
+//! as an entry point for parsing syntax tree nodes that can be parsed in an
+//! obvious default way. These functions can return any syntax tree node that
+//! implements the [`Parse`] trait, which includes most types in Syn.
+//!
+//! [`syn::parse`]: crate::parse()
+//! [`syn::parse2`]: crate::parse2()
+//! [`syn::parse_str`]: crate::parse_str()
+//!
+//! ```
+//! use syn::Type;
+//!
+//! # fn run_parser() -> syn::Result<()> {
+//! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?;
+//! # Ok(())
+//! # }
+//! #
+//! # run_parser().unwrap();
+//! ```
+//!
+//! The [`parse_quote!`] macro also uses this approach.
+//!
+//! [`parse_quote!`]: crate::parse_quote!
+//!
+//! # The `Parser` trait
+//!
+//! Some types can be parsed in several ways depending on context. For example
+//! an [`Attribute`] can be either "outer" like `#[...]` or "inner" like
+//! `#![...]` and parsing the wrong one would be a bug. Similarly
[`Punctuated`]
+//! may or may not allow trailing punctuation, and parsing it the wrong way
+//! would either reject valid input or accept invalid input.
+//!
+//! [`Attribute`]: crate::Attribute
+//! [`Punctuated`]: crate::punctuated
+//!
+//! The `Parse` trait is not implemented in these cases because there is no
good
+//! behavior to consider the default.
+//!
+//! ```compile_fail
+//! # extern crate proc_macro;
+//! #
+//! # use syn::punctuated::Punctuated;
+//! # use syn::{PathSegment, Result, Token};
+//! #
+//! # fn f(tokens: proc_macro::TokenStream) -> Result<()> {
+//! #
+//! // Can't parse `Punctuated` without knowing whether trailing punctuation
+//! // should be allowed in this context.
+//! let path: Punctuated<PathSegment, Token![::]> = syn::parse(tokens)?;
+//! #
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! In these cases the types provide a choice of parser functions rather than a
+//! single `Parse` implementation, and those parser functions can be invoked
+//! through the [`Parser`] trait.
+//!
+//!
+//! ```
+//! # extern crate proc_macro;
+//! #
+//! use proc_macro::TokenStream;
+//! use syn::parse::Parser;
+//! use syn::punctuated::Punctuated;
+//! use syn::{Attribute, Expr, PathSegment, Result, Token};
+//!
+//! fn call_some_parser_methods(input: TokenStream) -> Result<()> {
+//! // Parse a nonempty sequence of path segments separated by `::`
punctuation
+//! // with no trailing punctuation.
+//! let tokens = input.clone();
+//! let parser = Punctuated::<PathSegment,
Token![::]>::parse_separated_nonempty;
+//! let _path = parser.parse(tokens)?;
+//!
+//! // Parse a possibly empty sequence of expressions terminated by commas
with
+//! // an optional trailing punctuation.
+//! let tokens = input.clone();
+//! let parser = Punctuated::<Expr, Token![,]>::parse_terminated;
+//! let _args = parser.parse(tokens)?;
+//!
+//! // Parse zero or more outer attributes but not inner attributes.
+//! let tokens = input.clone();
+//! let parser = Attribute::parse_outer;
+//! let _attrs = parser.parse(tokens)?;
+//!
+//! Ok(())
+//! }
+//! ```
+
+#[path = "discouraged.rs"]
+pub mod discouraged;
+
+use crate::buffer::{Cursor, TokenBuffer};
+use crate::error;
+use crate::lookahead;
+use crate::punctuated::Punctuated;
+use crate::token::Token;
+use proc_macro2::{Delimiter, Group, Literal, Punct, Span, TokenStream,
TokenTree};
+#[cfg(feature = "printing")]
+use quote::ToTokens;
+use std::cell::Cell;
+use std::fmt::{self, Debug, Display};
+#[cfg(feature = "extra-traits")]
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
+use std::mem;
+use std::ops::Deref;
+use std::panic::{RefUnwindSafe, UnwindSafe};
+use std::rc::Rc;
+use std::str::FromStr;
+
+pub use crate::error::{Error, Result};
+pub use crate::lookahead::{Lookahead1, Peek};
+
+/// Parsing interface implemented by all types that can be parsed in a default
+/// way from a token stream.
+///
+/// Refer to the [module documentation] for details about implementing and
using
+/// the `Parse` trait.
+///
+/// [module documentation]: self
+pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+}
+
+/// Input to a Syn parser function.
+///
+/// See the methods of this type under the documentation of [`ParseBuffer`].
For
+/// an overview of parsing in Syn, refer to the [module documentation].
+///
+/// [module documentation]: self
+pub type ParseStream<'a> = &'a ParseBuffer<'a>;
+
+/// Cursor position within a buffered token stream.
+///
+/// This type is more commonly used through the type alias [`ParseStream`]
which
+/// is an alias for `&ParseBuffer`.
+///
+/// `ParseStream` is the input type for all parser functions in Syn. They have
+/// the signature `fn(ParseStream) -> Result<T>`.
+///
+/// ## Calling a parser function
+///
+/// There is no public way to construct a `ParseBuffer`. Instead, if you are
+/// looking to invoke a parser function that requires `ParseStream` as input,
+/// you will need to go through one of the public parsing entry points.
+///
+/// - The [`parse_macro_input!`] macro if parsing input of a procedural macro;
+/// - One of [the `syn::parse*` functions][syn-parse]; or
+/// - A method of the [`Parser`] trait.
+///
+/// [`parse_macro_input!`]: crate::parse_macro_input!
+/// [syn-parse]: self#the-synparse-functions
+pub struct ParseBuffer<'a> {
+ scope: Span,
+ // Instead of Cell<Cursor<'a>> so that ParseBuffer<'a> is covariant in 'a.
+ // The rest of the code in this module needs to be careful that only a
+ // cursor derived from this `cell` is ever assigned to this `cell`.
+ //
+ // Cell<Cursor<'a>> cannot be covariant in 'a because then we could take a
+ // ParseBuffer<'a>, upcast to ParseBuffer<'short> for some lifetime shorter
+ // than 'a, and then assign a Cursor<'short> into the Cell.
+ //
+ // By extension, it would not be safe to expose an API that accepts a
+ // Cursor<'a> and trusts that it lives as long as the cursor currently in
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+}
+
+impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+ if let Some(unexpected_span) =
span_of_unexpected_ignoring_nones(self.cursor()) {
+ let (inner, old_span) = inner_unexpected(self);
+ if old_span.is_none() {
+ inner.set(Unexpected::Some(unexpected_span));
+ }
+ }
+ }
+}
+
+impl<'a> Display for ParseBuffer<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.cursor().token_stream(), f)
+ }
+}
+
+impl<'a> Debug for ParseBuffer<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.cursor().token_stream(), f)
+ }
+}
+
+impl<'a> UnwindSafe for ParseBuffer<'a> {}
+impl<'a> RefUnwindSafe for ParseBuffer<'a> {}
+
+/// Cursor state associated with speculative parsing.
+///
+/// This type is the input of the closure provided to [`ParseStream::step`].
+///
+/// [`ParseStream::step`]: ParseBuffer::step
+///
+/// # Example
+///
+/// ```
+/// use proc_macro2::TokenTree;
+/// use syn::Result;
+/// use syn::parse::ParseStream;
+///
+/// // This function advances the stream past the next occurrence of `@`. If
+/// // no `@` is present in the stream, the stream position is unchanged and
+/// // an error is returned.
+/// fn skip_past_next_at(input: ParseStream) -> Result<()> {
+/// input.step(|cursor| {
+/// let mut rest = *cursor;
+/// while let Some((tt, next)) = rest.token_tree() {
+/// match &tt {
+/// TokenTree::Punct(punct) if punct.as_char() == '@' => {
+/// return Ok(((), next));
+/// }
+/// _ => rest = next,
+/// }
+/// }
+/// Err(cursor.error("no `@` was found after this point"))
+/// })
+/// }
+/// #
+/// # fn remainder_after_skipping_past_next_at(
+/// # input: ParseStream,
+/// # ) -> Result<proc_macro2::TokenStream> {
+/// # skip_past_next_at(input)?;
+/// # input.parse()
+/// # }
+/// #
+/// # use syn::parse::Parser;
+/// # let remainder = remainder_after_skipping_past_next_at
+/// # .parse_str("a @ b c")
+/// # .unwrap();
+/// # assert_eq!(remainder.to_string(), "b c");
+/// ```
+pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+ cursor: Cursor<'c>,
+ // This field is contravariant in 'c. Together these make StepCursor
+ // invariant in 'c. Also covariant in 'a. The user cannot cast 'c to a
+ // different lifetime but can upcast into a StepCursor with a shorter
+ // lifetime 'a.
+ //
+ // As long as we only ever construct a StepCursor for which 'c outlives 'a,
+ // this means if ever a StepCursor<'c, 'a> exists we are guaranteed that 'c
+ // outlives 'a.
+ marker: PhantomData<fn(Cursor<'c>) -> Cursor<'a>>,
+}
+
+impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ type Target = Cursor<'c>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.cursor
+ }
+}
+
+impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
+
+impl<'c, 'a> Clone for StepCursor<'c, 'a> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+ /// The `ParseStream::step` invocation will return this same error without
+ /// advancing the stream state.
+ pub fn error<T: Display>(self, message: T) -> Error {
+ error::new_at(self.scope, self.cursor, message)
+ }
+}
+
+pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to:
Cursor<'c>) -> Cursor<'a> {
+ // Refer to the comments within the StepCursor definition. We use the
+ // fact that a StepCursor<'c, 'a> exists as proof that 'c outlives 'a.
+ // Cursor is covariant in its lifetime parameter so we can cast a
+ // Cursor<'c> to one with the shorter lifetime Cursor<'a>.
+ let _ = proof;
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+}
+
+pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+ unexpected: Rc<Cell<Unexpected>>,
+) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor,
Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+ unexpected: Cell::new(Some(unexpected)),
+ }
+}
+
+pub(crate) enum Unexpected {
+ None,
+ Some(Span),
+ Chain(Rc<Cell<Unexpected>>),
+}
+
+impl Default for Unexpected {
+ fn default() -> Self {
+ Unexpected::None
+ }
+}
+
+impl Clone for Unexpected {
+ fn clone(&self) -> Self {
+ match self {
+ Unexpected::None => Unexpected::None,
+ Unexpected::Some(span) => Unexpected::Some(*span),
+ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
+ }
+ }
+}
+
+// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
+// swapping in a None is cheap.
+fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
+ let prev = cell.take();
+ let ret = prev.clone();
+ cell.set(prev);
+ ret
+}
+
+fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>,
Option<Span>) {
+ let mut unexpected = get_unexpected(buffer);
+ loop {
+ match cell_clone(&unexpected) {
+ Unexpected::None => return (unexpected, None),
+ Unexpected::Some(span) => return (unexpected, Some(span)),
+ Unexpected::Chain(next) => unexpected = next,
+ }
+ }
+}
+
+pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
+ cell_clone(&buffer.unexpected).unwrap()
+}
+
+fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
+ if cursor.eof() {
+ return None;
+ }
+ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
+ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
+ return Some(unexpected);
+ }
+ cursor = rest;
+ }
+ if cursor.eof() {
+ None
+ } else {
+ Some(cursor.span())
+ }
+}
+
+impl<'a> ParseBuffer<'a> {
+ /// Parses a syntax tree node of type `T`, advancing the position of our
+ /// parse stream past it.
+ pub fn parse<T: Parse>(&self) -> Result<T> {
+ T::parse(self)
+ }
+
+ /// Calls the given parser function to parse a syntax tree node of type `T`
+ /// from this stream.
+ ///
+ /// # Example
+ ///
+ /// The parser below invokes [`Attribute::parse_outer`] to parse a vector
of
+ /// zero or more outer attributes.
+ ///
+ /// [`Attribute::parse_outer`]: crate::Attribute::parse_outer
+ ///
+ /// ```
+ /// use syn::{Attribute, Ident, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Parses a unit struct with attributes.
+ /// //
+ /// // #[path = "s.tmpl"]
+ /// // struct S;
+ /// struct UnitStruct {
+ /// attrs: Vec<Attribute>,
+ /// struct_token: Token![struct],
+ /// name: Ident,
+ /// semi_token: Token![;],
+ /// }
+ ///
+ /// impl Parse for UnitStruct {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// Ok(UnitStruct {
+ /// attrs: input.call(Attribute::parse_outer)?,
+ /// struct_token: input.parse()?,
+ /// name: input.parse()?,
+ /// semi_token: input.parse()?,
+ /// })
+ /// }
+ /// }
+ /// ```
+ pub fn call<T>(&self, function: fn(ParseStream) -> Result<T>) -> Result<T>
{
+ function(self)
+ }
+
+ /// Looks at the next token in the parse stream to determine whether it
+ /// matches the requested type of token.
+ ///
+ /// Does not advance the position of the parse stream.
+ ///
+ /// # Syntax
+ ///
+ /// Note that this method does not use turbofish syntax. Pass the peek type
+ /// inside of parentheses.
+ ///
+ /// - `input.peek(Token![struct])`
+ /// - `input.peek(Token![==])`
+ /// - `input.peek(syn::Ident)` *(does not accept keywords)*
+ /// - `input.peek(syn::Ident::peek_any)`
+ /// - `input.peek(Lifetime)`
+ /// - `input.peek(token::Brace)`
+ ///
+ /// # Example
+ ///
+ /// In this example we finish parsing the list of supertraits when the next
+ /// token in the input is either `where` or an opening curly brace.
+ ///
+ /// ```
+ /// use syn::{braced, token, Generics, Ident, Result, Token,
TypeParamBound};
+ /// use syn::parse::{Parse, ParseStream};
+ /// use syn::punctuated::Punctuated;
+ ///
+ /// // Parses a trait definition containing no associated items.
+ /// //
+ /// // trait Marker<'de, T>: A + B<'de> where Box<T>: Clone {}
+ /// struct MarkerTrait {
+ /// trait_token: Token![trait],
+ /// ident: Ident,
+ /// generics: Generics,
+ /// colon_token: Option<Token![:]>,
+ /// supertraits: Punctuated<TypeParamBound, Token![+]>,
+ /// brace_token: token::Brace,
+ /// }
+ ///
+ /// impl Parse for MarkerTrait {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let trait_token: Token![trait] = input.parse()?;
+ /// let ident: Ident = input.parse()?;
+ /// let mut generics: Generics = input.parse()?;
+ /// let colon_token: Option<Token![:]> = input.parse()?;
+ ///
+ /// let mut supertraits = Punctuated::new();
+ /// if colon_token.is_some() {
+ /// loop {
+ /// supertraits.push_value(input.parse()?);
+ /// if input.peek(Token![where]) ||
input.peek(token::Brace) {
+ /// break;
+ /// }
+ /// supertraits.push_punct(input.parse()?);
+ /// }
+ /// }
+ ///
+ /// generics.where_clause = input.parse()?;
+ /// let content;
+ /// let empty_brace_token = braced!(content in input);
+ ///
+ /// Ok(MarkerTrait {
+ /// trait_token,
+ /// ident,
+ /// generics,
+ /// colon_token,
+ /// supertraits,
+ /// brace_token: empty_brace_token,
+ /// })
+ /// }
+ /// }
+ /// ```
+ pub fn peek<T: Peek>(&self, token: T) -> bool {
+ let _ = token;
+ T::Token::peek(self.cursor())
+ }
+
+ /// Looks at the second-next token in the parse stream.
+ ///
+ /// This is commonly useful as a way to implement contextual keywords.
+ ///
+ /// # Example
+ ///
+ /// This example needs to use `peek2` because the symbol `union` is not a
+ /// keyword in Rust. We can't use just `peek` and decide to parse a union
if
+ /// the very next token is `union`, because someone is free to write a `mod
+ /// union` and a macro invocation that looks like `union::some_macro! { ...
+ /// }`. In other words `union` is a contextual keyword.
+ ///
+ /// ```
+ /// use syn::{Ident, ItemUnion, Macro, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Parses either a union or a macro invocation.
+ /// enum UnionOrMacro {
+ /// // union MaybeUninit<T> { uninit: (), value: T }
+ /// Union(ItemUnion),
+ /// // lazy_static! { ... }
+ /// Macro(Macro),
+ /// }
+ ///
+ /// impl Parse for UnionOrMacro {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// if input.peek(Token![union]) && input.peek2(Ident) {
+ /// input.parse().map(UnionOrMacro::Union)
+ /// } else {
+ /// input.parse().map(UnionOrMacro::Macro)
+ /// }
+ /// }
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+ fn peek2(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool {
+ buffer.cursor().skip().map_or(false, peek)
+ }
+
+ let _ = token;
+ peek2(self, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+ fn peek3(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool {
+ buffer
+ .cursor()
+ .skip()
+ .and_then(Cursor::skip)
+ .map_or(false, peek)
+ }
+
+ let _ = token;
+ peek3(self, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire
content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use quote::quote;
+ /// #
+ /// use syn::{parenthesized, token, Ident, Result, Token, Type};
+ /// use syn::parse::{Parse, ParseStream};
+ /// use syn::punctuated::Punctuated;
+ ///
+ /// // Parse a simplified tuple struct syntax like:
+ /// //
+ /// // struct S(A, B);
+ /// struct TupleStruct {
+ /// struct_token: Token![struct],
+ /// ident: Ident,
+ /// paren_token: token::Paren,
+ /// fields: Punctuated<Type, Token![,]>,
+ /// semi_token: Token![;],
+ /// }
+ ///
+ /// impl Parse for TupleStruct {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let content;
+ /// Ok(TupleStruct {
+ /// struct_token: input.parse()?,
+ /// ident: input.parse()?,
+ /// paren_token: parenthesized!(content in input),
+ /// fields: content.parse_terminated(Type::parse, Token![,])?,
+ /// semi_token: input.parse()?,
+ /// })
+ /// }
+ /// }
+ /// #
+ /// # let input = quote! {
+ /// # struct S(A, B);
+ /// # };
+ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ ///
+ /// # See also
+ ///
+ /// If your separator is anything more complicated than an invocation of
the
+ /// `Token!` macro, this method won't be applicable and you can instead
+ /// directly use `Punctuated`'s parser functions: [`parse_terminated`],
+ /// [`parse_separated_nonempty`] etc.
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+ /// ```
+ /// use syn::{custom_keyword, Expr, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ /// use syn::punctuated::Punctuated;
+ ///
+ /// mod kw {
+ /// syn::custom_keyword!(fin);
+ /// }
+ ///
+ /// struct Fin(kw::fin, Token![;]);
+ ///
+ /// impl Parse for Fin {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// Ok(Self(input.parse()?, input.parse()?))
+ /// }
+ /// }
+ ///
+ /// struct Thing {
+ /// steps: Punctuated<Expr, Fin>,
+ /// }
+ ///
+ /// impl Parse for Thing {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// # if true {
+ /// Ok(Thing {
+ /// steps: Punctuated::parse_terminated(input)?,
+ /// })
+ /// # } else {
+ /// // or equivalently, this means the same thing:
+ /// # Ok(Thing {
+ /// steps: input.call(Punctuated::parse_terminated)?,
+ /// # })
+ /// # }
+ /// }
+ /// }
+ /// ```
+ pub fn parse_terminated<T, P>(
+ &self,
+ parser: fn(ParseStream) -> Result<T>,
+ separator: P,
+ ) -> Result<Punctuated<T, P::Token>>
+ where
+ P: Peek,
+ P::Token: Parse,
+ {
+ let _ = separator;
+ Punctuated::parse_terminated_with(self, parser)
+ }
+
+ /// Returns whether there are tokens remaining in this stream.
+ ///
+ /// This method returns true at the end of the content of a set of
+ /// delimiters, as well as at the very end of the complete macro input.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{braced, token, Ident, Item, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Parses a Rust `mod m { ... }` containing zero or more items.
+ /// struct Mod {
+ /// mod_token: Token![mod],
+ /// name: Ident,
+ /// brace_token: token::Brace,
+ /// items: Vec<Item>,
+ /// }
+ ///
+ /// impl Parse for Mod {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let content;
+ /// Ok(Mod {
+ /// mod_token: input.parse()?,
+ /// name: input.parse()?,
+ /// brace_token: braced!(content in input),
+ /// items: {
+ /// let mut items = Vec::new();
+ /// while !content.is_empty() {
+ /// items.push(content.parse()?);
+ /// }
+ /// items
+ /// },
+ /// })
+ /// }
+ /// }
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.cursor().eof()
+ }
+
+ /// Constructs a helper for peeking at the next token in this stream and
+ /// building an error message if it is not one of a set of expected tokens.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token,
TypeParam};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // A generic parameter, a single one of the comma-separated elements
inside
+ /// // angle brackets in:
+ /// //
+ /// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
+ /// //
+ /// // On invalid input, lookahead gives us a reasonable error message.
+ /// //
+ /// // error: expected one of: identifier, lifetime, `const`
+ /// // |
+ /// // 5 | fn f<!Sized>() {}
+ /// // | ^
+ /// enum GenericParam {
+ /// Type(TypeParam),
+ /// Lifetime(LifetimeParam),
+ /// Const(ConstParam),
+ /// }
+ ///
+ /// impl Parse for GenericParam {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let lookahead = input.lookahead1();
+ /// if lookahead.peek(Ident) {
+ /// input.parse().map(GenericParam::Type)
+ /// } else if lookahead.peek(Lifetime) {
+ /// input.parse().map(GenericParam::Lifetime)
+ /// } else if lookahead.peek(Token![const]) {
+ /// input.parse().map(GenericParam::Const)
+ /// } else {
+ /// Err(lookahead.error())
+ /// }
+ /// }
+ /// }
+ /// ```
+ pub fn lookahead1(&self) -> Lookahead1<'a> {
+ lookahead::new(self.scope, self.cursor())
+ }
+
+ /// Forks a parse stream so that parsing tokens out of either the original
+ /// or the fork does not advance the position of the other.
+ ///
+ /// # Performance
+ ///
+ /// Forking a parse stream is a cheap fixed amount of work and does not
+ /// involve copying token buffers. Where you might hit performance problems
+ /// is if your macro ends up parsing a large amount of content more than
+ /// once.
+ ///
+ /// ```
+ /// # use syn::{Expr, Result};
+ /// # use syn::parse::ParseStream;
+ /// #
+ /// # fn bad(input: ParseStream) -> Result<Expr> {
+ /// // Do not do this.
+ /// if input.fork().parse::<Expr>().is_ok() {
+ /// return input.parse::<Expr>();
+ /// }
+ /// # unimplemented!()
+ /// # }
+ /// ```
+ ///
+ /// As a rule, avoid parsing an unbounded amount of tokens out of a forked
+ /// parse stream. Only use a fork when the amount of work performed against
+ /// the fork is small and bounded.
+ ///
+ /// When complex speculative parsing against the forked stream is
+ /// unavoidable, use [`parse::discouraged::Speculative`] to advance the
+ /// original stream once the fork's parse is determined to have been
+ /// successful.
+ ///
+ /// For a lower level way to perform speculative parsing at the token
level,
+ /// consider using [`ParseStream::step`] instead.
+ ///
+ /// [`parse::discouraged::Speculative`]: discouraged::Speculative
+ /// [`ParseStream::step`]: ParseBuffer::step
+ ///
+ /// # Example
+ ///
+ /// The parse implementation shown here parses possibly restricted `pub`
+ /// visibilities.
+ ///
+ /// - `pub`
+ /// - `pub(crate)`
+ /// - `pub(self)`
+ /// - `pub(super)`
+ /// - `pub(in some::path)`
+ ///
+ /// To handle the case of visibilities inside of tuple structs, the parser
+ /// needs to distinguish parentheses that specify visibility restrictions
+ /// from parentheses that form part of a tuple type.
+ ///
+ /// ```
+ /// # struct A;
+ /// # struct B;
+ /// # struct C;
+ /// #
+ /// struct S(pub(crate) A, pub (B, C));
+ /// ```
+ ///
+ /// In this example input the first tuple struct element of `S` has
+ /// `pub(crate)` visibility while the second tuple struct element has `pub`
+ /// visibility; the parentheses around `(B, C)` are part of the type rather
+ /// than part of a visibility restriction.
+ ///
+ /// The parser uses a forked parse stream to check the first token inside
of
+ /// parentheses after the `pub` keyword. This is a small bounded amount of
+ /// work performed against the forked parse stream.
+ ///
+ /// ```
+ /// use syn::{parenthesized, token, Ident, Path, Result, Token};
+ /// use syn::ext::IdentExt;
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// struct PubVisibility {
+ /// pub_token: Token![pub],
+ /// restricted: Option<Restricted>,
+ /// }
+ ///
+ /// struct Restricted {
+ /// paren_token: token::Paren,
+ /// in_token: Option<Token![in]>,
+ /// path: Path,
+ /// }
+ ///
+ /// impl Parse for PubVisibility {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let pub_token: Token![pub] = input.parse()?;
+ ///
+ /// if input.peek(token::Paren) {
+ /// let ahead = input.fork();
+ /// let mut content;
+ /// parenthesized!(content in ahead);
+ ///
+ /// if content.peek(Token![crate])
+ /// || content.peek(Token![self])
+ /// || content.peek(Token![super])
+ /// {
+ /// return Ok(PubVisibility {
+ /// pub_token,
+ /// restricted: Some(Restricted {
+ /// paren_token: parenthesized!(content in input),
+ /// in_token: None,
+ /// path:
Path::from(content.call(Ident::parse_any)?),
+ /// }),
+ /// });
+ /// } else if content.peek(Token![in]) {
+ /// return Ok(PubVisibility {
+ /// pub_token,
+ /// restricted: Some(Restricted {
+ /// paren_token: parenthesized!(content in input),
+ /// in_token: Some(content.parse()?),
+ /// path: content.call(Path::parse_mod_style)?,
+ /// }),
+ /// });
+ /// }
+ /// }
+ ///
+ /// Ok(PubVisibility {
+ /// pub_token,
+ /// restricted: None,
+ /// })
+ /// }
+ /// }
+ /// ```
+ pub fn fork(&self) -> Self {
+ ParseBuffer {
+ scope: self.scope,
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+ // parses all the way unless we `advance_to`.
+ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+ /// Triggers an error at the current position of the parse stream.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Expr, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Some kind of loop: `while` or `for` or `loop`.
+ /// struct Loop {
+ /// expr: Expr,
+ /// }
+ ///
+ /// impl Parse for Loop {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// if input.peek(Token![while])
+ /// || input.peek(Token![for])
+ /// || input.peek(Token![loop])
+ /// {
+ /// Ok(Loop {
+ /// expr: input.parse()?,
+ /// })
+ /// } else {
+ /// Err(input.error("expected some kind of loop"))
+ /// }
+ /// }
+ /// }
+ /// ```
+ pub fn error<T: Display>(&self, message: T) -> Error {
+ error::new_at(self.scope, self.cursor(), message)
+ }
+
+ /// Speculatively parses tokens from this parse stream, advancing the
+ /// position of this stream only if parsing succeeds.
+ ///
+ /// This is a powerful low-level API used for defining the `Parse` impls of
+ /// the basic built-in token types. It is not something that will be used
+ /// widely outside of the Syn codebase.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use proc_macro2::TokenTree;
+ /// use syn::Result;
+ /// use syn::parse::ParseStream;
+ ///
+ /// // This function advances the stream past the next occurrence of `@`.
If
+ /// // no `@` is present in the stream, the stream position is unchanged
and
+ /// // an error is returned.
+ /// fn skip_past_next_at(input: ParseStream) -> Result<()> {
+ /// input.step(|cursor| {
+ /// let mut rest = *cursor;
+ /// while let Some((tt, next)) = rest.token_tree() {
+ /// match &tt {
+ /// TokenTree::Punct(punct) if punct.as_char() == '@' => {
+ /// return Ok(((), next));
+ /// }
+ /// _ => rest = next,
+ /// }
+ /// }
+ /// Err(cursor.error("no `@` was found after this point"))
+ /// })
+ /// }
+ /// #
+ /// # fn remainder_after_skipping_past_next_at(
+ /// # input: ParseStream,
+ /// # ) -> Result<proc_macro2::TokenStream> {
+ /// # skip_past_next_at(input)?;
+ /// # input.parse()
+ /// # }
+ /// #
+ /// # use syn::parse::Parser;
+ /// # let remainder = remainder_after_skipping_past_next_at
+ /// # .parse_str("a @ b c")
+ /// # .unwrap();
+ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+ F: for<'c> FnOnce(StepCursor<'c, 'a>) -> Result<(R, Cursor<'c>)>,
+ {
+ // Since the user's function is required to work for any 'c, we know
+ // that the Cursor<'c> they return is either derived from the input
+ // StepCursor<'c, 'a> or from a Cursor<'static>.
+ //
+ // It would not be legal to write this function without the invariant
+ // lifetime 'c in StepCursor<'c, 'a>. If this function were written
only
+ // in terms of 'a, the user could take our ParseBuffer<'a>, upcast it
to
+ // a ParseBuffer<'short> which some shorter lifetime than 'a, invoke
+ // `step` on their ParseBuffer<'short> with a closure that returns
+ // Cursor<'short>, and we would wrongly write that Cursor<'short> into
+ // the Cell intended to hold Cursor<'a>.
+ //
+ // In some cases it may be necessary for R to contain a Cursor<'a>.
+ // Within Syn we solve this using `advance_step_cursor` which uses the
+ // existence of a StepCursor<'c, 'a> as proof that it is safe to cast
+ // from Cursor<'c> to Cursor<'a>. If needed outside of Syn, it would be
+ // safe to expose that API as a method on StepCursor.
+ let (node, rest) = function(StepCursor {
+ scope: self.scope,
+ cursor: self.cell.get(),
+ marker: PhantomData,
+ })?;
+ self.cell.set(rest);
+ Ok(node)
+ }
+
+ /// Returns the `Span` of the next token in the parse stream, or
+ /// `Span::call_site()` if this parse stream has completely exhausted its
+ /// input `TokenStream`.
+ pub fn span(&self) -> Span {
+ let cursor = self.cursor();
+ if cursor.eof() {
+ self.scope
+ } else {
+ crate::buffer::open_span_of_group(cursor)
+ }
+ }
+
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+ /// Cursors are immutable so no operations you perform against the cursor
+ /// will affect the state of this parse stream.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use proc_macro2::TokenStream;
+ /// use syn::buffer::Cursor;
+ /// use syn::parse::{ParseStream, Result};
+ ///
+ /// // Run a parser that returns T, but get its output as TokenStream
instead of T.
+ /// // This works without T needing to implement ToTokens.
+ /// fn recognize_token_stream<T>(
+ /// recognizer: fn(ParseStream) -> Result<T>,
+ /// ) -> impl Fn(ParseStream) -> Result<TokenStream> {
+ /// move |input| {
+ /// let begin = input.cursor();
+ /// recognizer(input)?;
+ /// let end = input.cursor();
+ /// Ok(tokens_between(begin, end))
+ /// }
+ /// }
+ ///
+ /// // Collect tokens between two cursors as a TokenStream.
+ /// fn tokens_between(begin: Cursor, end: Cursor) -> TokenStream {
+ /// assert!(begin <= end);
+ ///
+ /// let mut cursor = begin;
+ /// let mut tokens = TokenStream::new();
+ /// while cursor < end {
+ /// let (token, next) = cursor.token_tree().unwrap();
+ /// tokens.extend(std::iter::once(token));
+ /// cursor = next;
+ /// }
+ /// tokens
+ /// }
+ ///
+ /// fn main() {
+ /// use quote::quote;
+ /// use syn::parse::{Parse, Parser};
+ /// use syn::Token;
+ ///
+ /// // Parse syn::Type as a TokenStream, surrounded by angle brackets.
+ /// fn example(input: ParseStream) -> Result<TokenStream> {
+ /// let _langle: Token![<] = input.parse()?;
+ /// let ty = recognize_token_stream(syn::Type::parse)(input)?;
+ /// let _rangle: Token![>] = input.parse()?;
+ /// Ok(ty)
+ /// }
+ ///
+ /// let tokens = quote! { <fn() -> u8> };
+ /// println!("{}", example.parse2(tokens).unwrap());
+ /// }
+ /// ```
+ pub fn cursor(&self) -> Cursor<'a> {
+ self.cell.get()
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl<T: Parse> Parse for Box<T> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.parse().map(Box::new)
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl<T: Parse + Token> Parse for Option<T> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if T::peek(input.cursor()) {
+ Ok(Some(input.parse()?))
+ } else {
+ Ok(None)
+ }
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for TokenStream {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| Ok((cursor.token_stream(), Cursor::empty())))
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for TokenTree {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| match cursor.token_tree() {
+ Some((tt, rest)) => Ok((tt, rest)),
+ None => Err(cursor.error("expected token tree")),
+ })
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for Group {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ if let Some((group, rest)) = cursor.any_group_token() {
+ if group.delimiter() != Delimiter::None {
+ return Ok((group, rest));
+ }
+ }
+ Err(cursor.error("expected group token"))
+ })
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for Punct {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| match cursor.punct() {
+ Some((punct, rest)) => Ok((punct, rest)),
+ None => Err(cursor.error("expected punctuation token")),
+ })
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for Literal {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| match cursor.literal() {
+ Some((literal, rest)) => Ok((literal, rest)),
+ None => Err(cursor.error("expected literal token")),
+ })
+ }
+}
+
+/// Parser that can parse Rust tokens into a particular syntax tree node.
+///
+/// Refer to the [module documentation] for details about parsing in Syn.
+///
+/// [module documentation]: self
+pub trait Parser: Sized {
+ type Output;
+
+ /// Parse a proc-macro2 token stream into the chosen syntax tree node.
+ ///
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ fn parse2(self, tokens: TokenStream) -> Result<Self::Output>;
+
+ /// Parse tokens of source code into the chosen syntax tree node.
+ ///
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ #[cfg(feature = "proc-macro")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))]
+ fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
+ self.parse2(proc_macro2::TokenStream::from(tokens))
+ }
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the string, an error is returned.
+ ///
+ /// # Hygiene
+ ///
+ /// Every span in the resulting syntax tree will be set to resolve at the
+ /// macro call site.
+ fn parse_str(self, s: &str) -> Result<Self::Output> {
+ self.parse2(proc_macro2::TokenStream::from_str(s)?)
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) ->
Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+ }
+}
+
+fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+}
+
+impl<F, T> Parser for F
+where
+ F: FnOnce(ParseStream) -> Result<T>,
+{
+ type Output = T;
+
+ fn parse2(self, tokens: TokenStream) -> Result<T> {
+ let buf = TokenBuffer::new2(tokens);
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+ if let Some(unexpected_span) =
span_of_unexpected_ignoring_nones(state.cursor()) {
+ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+ Ok(node)
+ }
+ }
+
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) ->
Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+ if let Some(unexpected_span) =
span_of_unexpected_ignoring_nones(state.cursor()) {
+ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+ Ok(node)
+ }
+ }
+}
+
+pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream)
-> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+}
+
+/// An empty syntax tree node that consumes no tokens when parsed.
+///
+/// This is useful for attribute macros that want to ensure they are not
+/// provided any attribute args.
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use syn::parse_macro_input;
+/// use syn::parse::Nothing;
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
+/// parse_macro_input!(args as Nothing);
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+/// ```
+///
+/// ```text
+/// error: unexpected token
+/// --> src/main.rs:3:19
+/// |
+/// 3 | #[my_attr(asdf)]
+/// | ^^^^
+/// ```
+pub struct Nothing;
+
+impl Parse for Nothing {
+ fn parse(_input: ParseStream) -> Result<Self> {
+ Ok(Nothing)
+ }
+}
+
+#[cfg(feature = "printing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+impl ToTokens for Nothing {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let _ = tokens;
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for Nothing {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for Nothing {}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for Nothing {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("Nothing")
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Eq for Nothing {}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for Nothing {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for Nothing {
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/parse_macro_input.rs
b/rust/hw/char/pl011/vendor/syn/src/parse_macro_input.rs
new file mode 100644
index 0000000000..f0660aedd7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/parse_macro_input.rs
@@ -0,0 +1,128 @@
+/// Parse the input TokenStream of a macro, triggering a compile error if the
+/// tokens fail to parse.
+///
+/// Refer to the [`parse` module] documentation for more details about parsing
+/// in Syn.
+///
+/// [`parse` module]: mod@crate::parse
+///
+/// <br>
+///
+/// # Intended usage
+///
+/// This macro must be called from a function that returns
+/// `proc_macro::TokenStream`. Usually this will be your proc macro entry
point,
+/// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] /
+/// #\[proc_macro_attribute\] attribute.
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// use proc_macro::TokenStream;
+/// use syn::{parse_macro_input, Result};
+/// use syn::parse::{Parse, ParseStream};
+///
+/// struct MyMacroInput {
+/// /* ... */
+/// }
+///
+/// impl Parse for MyMacroInput {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// /* ... */
+/// # Ok(MyMacroInput {})
+/// }
+/// }
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro]
+/// # };
+/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
+/// let input = parse_macro_input!(tokens as MyMacroInput);
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+/// ```
+///
+/// <br>
+///
+/// # Usage with Parser
+///
+/// This macro can also be used with the [`Parser` trait] for types that have
+/// multiple ways that they can be parsed.
+///
+/// [`Parser` trait]: crate::parse::Parser
+///
+/// ```
+/// # extern crate proc_macro;
+/// #
+/// # use proc_macro::TokenStream;
+/// # use syn::{parse_macro_input, Result};
+/// # use syn::parse::ParseStream;
+/// #
+/// # struct MyMacroInput {}
+/// #
+/// impl MyMacroInput {
+/// fn parse_alternate(input: ParseStream) -> Result<Self> {
+/// /* ... */
+/// # Ok(MyMacroInput {})
+/// }
+/// }
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro]
+/// # };
+/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
+/// let input = parse_macro_input!(tokens with
MyMacroInput::parse_alternate);
+///
+/// /* ... */
+/// # TokenStream::new()
+/// }
+/// ```
+///
+/// <br>
+///
+/// # Expansion
+///
+/// `parse_macro_input!($variable as $Type)` expands to something like:
+///
+/// ```no_run
+/// # extern crate proc_macro;
+/// #
+/// # macro_rules! doc_test {
+/// # ($variable:ident as $Type:ty) => {
+/// match syn::parse::<$Type>($variable) {
+/// Ok(syntax_tree) => syntax_tree,
+/// Err(err) => return
proc_macro::TokenStream::from(err.to_compile_error()),
+/// }
+/// # };
+/// # }
+/// #
+/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+/// # let _ = doc_test!(input as syn::Ident);
+/// # proc_macro::TokenStream::new()
+/// # }
+/// ```
+#[macro_export]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
+macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse::<$ty>($tokenstream) {
+ $crate::__private::Ok(data) => data,
+ $crate::__private::Err(err) => {
+ return
$crate::__private::TokenStream::from(err.to_compile_error());
+ }
+ }
+ };
+ ($tokenstream:ident with $parser:path) => {
+ match $crate::parse::Parser::parse($parser, $tokenstream) {
+ $crate::__private::Ok(data) => data,
+ $crate::__private::Err(err) => {
+ return
$crate::__private::TokenStream::from(err.to_compile_error());
+ }
+ }
+ };
+ ($tokenstream:ident) => {
+ $crate::parse_macro_input!($tokenstream as _)
+ };
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/parse_quote.rs
b/rust/hw/char/pl011/vendor/syn/src/parse_quote.rs
new file mode 100644
index 0000000000..c4f47e16d1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/parse_quote.rs
@@ -0,0 +1,210 @@
+/// Quasi-quotation macro that accepts input like the [`quote!`] macro but uses
+/// type inference to figure out a return type for those tokens.
+///
+/// [`quote!`]: https://docs.rs/quote/1.0/quote/index.html
+///
+/// The return type can be any syntax tree node that implements the [`Parse`]
+/// trait.
+///
+/// [`Parse`]: crate::parse::Parse
+///
+/// ```
+/// use quote::quote;
+/// use syn::{parse_quote, Stmt};
+///
+/// fn main() {
+/// let name = quote!(v);
+/// let ty = quote!(u8);
+///
+/// let stmt: Stmt = parse_quote! {
+/// let #name: #ty = Default::default();
+/// };
+///
+/// println!("{:#?}", stmt);
+/// }
+/// ```
+///
+/// *This macro is available only if Syn is built with both the `"parsing"` and
+/// `"printing"` features.*
+///
+/// # Example
+///
+/// The following helper function adds a bound `T: HeapSize` to every type
+/// parameter `T` in the input generics.
+///
+/// ```
+/// use syn::{parse_quote, Generics, GenericParam};
+///
+/// // Add a bound `T: HeapSize` to every type parameter T.
+/// fn add_trait_bounds(mut generics: Generics) -> Generics {
+/// for param in &mut generics.params {
+/// if let GenericParam::Type(type_param) = param {
+/// type_param.bounds.push(parse_quote!(HeapSize));
+/// }
+/// }
+/// generics
+/// }
+/// ```
+///
+/// # Special cases
+///
+/// This macro can parse the following additional types as a special case even
+/// though they do not implement the `Parse` trait.
+///
+/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
+/// or inner like `#![...]`
+/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+/// `P` with optional trailing punctuation
+/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+///
+/// [`Vec<Stmt>`]: Block::parse_within
+///
+/// # Panics
+///
+/// Panics if the tokens fail to parse as the expected syntax tree type. The
+/// caller is responsible for ensuring that the input tokens are syntactically
+/// valid.
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
+#[macro_export]
+macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+
$crate::__private::parse_quote($crate::__private::quote::quote!($($tt)*))
+ };
+}
+
+/// This macro is [`parse_quote!`] + [`quote_spanned!`][quote::quote_spanned].
+///
+/// Please refer to each of their documentation.
+///
+/// # Example
+///
+/// ```
+/// use quote::{quote, quote_spanned};
+/// use syn::spanned::Spanned;
+/// use syn::{parse_quote_spanned, ReturnType, Signature};
+///
+/// // Changes `fn()` to `fn() -> Pin<Box<dyn Future<Output = ()>>>`,
+/// // and `fn() -> T` to `fn() -> Pin<Box<dyn Future<Output = T>>>`,
+/// // without introducing any call_site() spans.
+/// fn make_ret_pinned_future(sig: &mut Signature) {
+/// let ret = match &sig.output {
+/// ReturnType::Default => quote_spanned!(sig.paren_token.span=> ()),
+/// ReturnType::Type(_, ret) => quote!(#ret),
+/// };
+/// sig.output = parse_quote_spanned! {ret.span()=>
+/// -> ::std::pin::Pin<::std::boxed::Box<dyn
::std::future::Future<Output = #ret>>>
+/// };
+/// }
+/// ```
+#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
+#[macro_export]
+macro_rules! parse_quote_spanned {
+ ($span:expr=> $($tt:tt)*) => {
+
$crate::__private::parse_quote($crate::__private::quote::quote_spanned!($span=>
$($tt)*))
+ };
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Can parse any type that implements Parse.
+
+use crate::error::Result;
+use crate::parse::{Parse, ParseStream, Parser};
+use proc_macro2::TokenStream;
+
+// Not public API.
+#[doc(hidden)]
+pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
+ let parser = T::parse;
+ match parser.parse2(token_stream) {
+ Ok(t) => t,
+ Err(err) => panic!("{}", err),
+ }
+}
+
+#[doc(hidden)]
+pub trait ParseQuote: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+}
+
+impl<T: Parse> ParseQuote for T {
+ fn parse(input: ParseStream) -> Result<Self> {
+ <T as Parse>::parse(input)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Any other types that we want `parse_quote!` to be able to parse.
+
+use crate::punctuated::Punctuated;
+#[cfg(any(feature = "full", feature = "derive"))]
+use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility};
+#[cfg(feature = "full")]
+use crate::{Block, Pat, Stmt};
+
+#[cfg(any(feature = "full", feature = "derive"))]
+impl ParseQuote for Attribute {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![#]) && input.peek2(Token![!]) {
+ attr::parsing::single_parse_inner(input)
+ } else {
+ attr::parsing::single_parse_outer(input)
+ }
+ }
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+impl ParseQuote for Field {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+
+ let ident: Option<Ident>;
+ let colon_token: Option<Token![:]>;
+ let is_named = input.peek(Ident) && input.peek2(Token![:]) &&
!input.peek2(Token![::]);
+ if is_named {
+ ident = Some(input.parse()?);
+ colon_token = Some(input.parse()?);
+ } else {
+ ident = None;
+ colon_token = None;
+ }
+
+ let ty: Type = input.parse()?;
+
+ Ok(Field {
+ attrs,
+ vis,
+ mutability: FieldMutability::None,
+ ident,
+ colon_token,
+ ty,
+ })
+ }
+}
+
+#[cfg(feature = "full")]
+impl ParseQuote for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Pat::parse_multi_with_leading_vert(input)
+ }
+}
+
+#[cfg(feature = "full")]
+impl ParseQuote for Box<Pat> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ <Pat as ParseQuote>::parse(input).map(Box::new)
+ }
+}
+
+impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Self::parse_terminated(input)
+ }
+}
+
+#[cfg(feature = "full")]
+impl ParseQuote for Vec<Stmt> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Block::parse_within(input)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/pat.rs
b/rust/hw/char/pl011/vendor/syn/src/pat.rs
new file mode 100644
index 0000000000..e647f2f4d0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/pat.rs
@@ -0,0 +1,953 @@
+use crate::attr::Attribute;
+use crate::expr::Member;
+use crate::ident::Ident;
+use crate::path::{Path, QSelf};
+use crate::punctuated::Punctuated;
+use crate::token;
+use crate::ty::Type;
+use proc_macro2::TokenStream;
+
+pub use crate::expr::{
+ ExprConst as PatConst, ExprLit as PatLit, ExprMacro as PatMacro, ExprPath
as PatPath,
+ ExprRange as PatRange,
+};
+
+ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ #[non_exhaustive]
+ pub enum Pat {
+ /// A const block: `const { ... }`.
+ Const(PatConst),
+
+ /// A pattern that binds a new variable: `ref mut binding @
SUBPATTERN`.
+ Ident(PatIdent),
+
+ /// A literal pattern: `0`.
+ Lit(PatLit),
+
+ /// A macro in pattern position.
+ Macro(PatMacro),
+
+ /// A pattern that matches any one of a set of cases.
+ Or(PatOr),
+
+ /// A parenthesized pattern: `(A | B)`.
+ Paren(PatParen),
+
+ /// A path pattern like `Color::Red`, optionally qualified with a
+ /// self-type.
+ ///
+ /// Unqualified path patterns can legally refer to variants, structs,
+ /// constants or associated constants. Qualified path patterns like
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ Path(PatPath),
+
+ /// A range pattern: `1..=2`.
+ Range(PatRange),
+
+ /// A reference pattern: `&mut var`.
+ Reference(PatReference),
+
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`.
+ Rest(PatRest),
+
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ Slice(PatSlice),
+
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ Struct(PatStruct),
+
+ /// A tuple pattern: `(a, b)`.
+ Tuple(PatTuple),
+
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ TupleStruct(PatTupleStruct),
+
+ /// A type ascription pattern: `foo: f64`.
+ Type(PatType),
+
+ /// Tokens in pattern position not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ /// A pattern that matches any value: `_`.
+ Wild(PatWild),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match pat {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // Pat::Box(pat) => {...}
+ // Pat::Ident(pat) => {...}
+ // ...
+ // Pat::Wild(pat) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+ /// It may also be a unit struct or struct variant (e.g. `None`), or a
+ /// constant; these cannot be distinguished syntactically.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+ pub mutability: Option<Token![mut]>,
+ pub ident: Ident,
+ pub subpat: Option<(Token![@], Box<Pat>)>,
+ }
+}
+
+ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+ pub cases: Punctuated<Pat, Token![|]>,
+ }
+}
+
+ast_struct! {
+ /// A parenthesized pattern: `(A | B)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub pat: Box<Pat>,
+ }
+}
+
+ast_struct! {
+ /// A reference pattern: `&mut var`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+ pub mutability: Option<Token![mut]>,
+ pub pat: Box<Pat>,
+ }
+}
+
+ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+ }
+}
+
+ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub elems: Punctuated<Pat, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ pub brace_token: token::Brace,
+ pub fields: Punctuated<FieldPat, Token![,]>,
+ pub rest: Option<PatRest>,
+ }
+}
+
+ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Pat, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Pat, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A pattern that matches any value: `_`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+ }
+}
+
+ast_struct! {
+ /// A single field in a struct pattern.
+ ///
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+ pub colon_token: Option<Token![:]>,
+ pub pat: Box<Pat>,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::error::{self, Result};
+ use crate::expr::{
+ Expr, ExprConst, ExprLit, ExprMacro, ExprPath, ExprRange, Member,
RangeLimits,
+ };
+ use crate::ext::IdentExt as _;
+ use crate::ident::Ident;
+ use crate::lit::Lit;
+ use crate::mac::{self, Macro};
+ use crate::parse::{Parse, ParseBuffer, ParseStream};
+ use crate::pat::{
+ FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest,
PatSlice, PatStruct,
+ PatTuple, PatTupleStruct, PatType, PatWild,
+ };
+ use crate::path::{self, Path, QSelf};
+ use crate::punctuated::Punctuated;
+ use crate::stmt::Block;
+ use crate::token;
+ use crate::verbatim;
+ use proc_macro2::TokenStream;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Pat {
+ /// Parse a pattern that does _not_ involve `|` at the top level.
+ ///
+ /// This parser matches the behavior of the `$:pat_param` macro_rules
+ /// matcher, and on editions prior to Rust 2021, the behavior of
+ /// `$:pat`.
+ ///
+ /// In Rust syntax, some examples of where this syntax would occur are
+ /// in the argument pattern of functions and closures. Patterns using
+ /// `|` are not allowed to occur in these positions.
+ ///
+ /// ```compile_fail
+ /// fn f(Some(_) | None: Option<T>) {
+ /// let _ = |Some(_) | None: Option<T>| {};
+ /// // ^^^^^^^^^^^^^^^^^^^^^^^^^??? :(
+ /// }
+ /// ```
+ ///
+ /// ```console
+ /// error: top-level or-patterns are not allowed in function parameters
+ /// --> src/main.rs:1:6
+ /// |
+ /// 1 | fn f(Some(_) | None: Option<T>) {
+ /// | ^^^^^^^^^^^^^^ help: wrap the pattern in parentheses:
`(Some(_) | None)`
+ /// ```
+ pub fn parse_single(input: ParseStream) -> Result<Self> {
+ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && (input.peek2(Token![::])
+ || input.peek2(Token![!])
+ || input.peek2(token::Brace)
+ || input.peek2(token::Paren)
+ || input.peek2(Token![..]))
+ || input.peek(Token![self]) && input.peek2(Token![::])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+ } else if lookahead.peek(Token![_]) {
+ input.call(pat_wild).map(Pat::Wild)
+ } else if input.peek(Token![box]) {
+ pat_box(begin, input)
+ } else if input.peek(Token![-]) || lookahead.peek(Lit) ||
lookahead.peek(Token![const])
+ {
+ pat_lit_or_range(input)
+ } else if lookahead.peek(Token![ref])
+ || lookahead.peek(Token![mut])
+ || input.peek(Token![self])
+ || input.peek(Ident)
+ {
+ input.call(pat_ident).map(Pat::Ident)
+ } else if lookahead.peek(Token![&]) {
+ input.call(pat_reference).map(Pat::Reference)
+ } else if lookahead.peek(token::Paren) {
+ input.call(pat_paren_or_tuple)
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+ pat_range_half_open(input)
+ } else if lookahead.peek(Token![const]) {
+ input.call(pat_const).map(Pat::Verbatim)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ /// Parse a pattern, possibly involving `|`, but not a leading `|`.
+ pub fn parse_multi(input: ParseStream) -> Result<Self> {
+ multi_pat_impl(input, None)
+ }
+
+ /// Parse a pattern, possibly involving `|`, possibly including a
+ /// leading `|`.
+ ///
+ /// This parser matches the behavior of the Rust 2021 edition's `$:pat`
+ /// macro_rules matcher.
+ ///
+ /// In Rust syntax, an example of where this syntax would occur is in
+ /// the pattern of a `match` arm, where the language permits an
optional
+ /// leading `|`, although it is not idiomatic to write one there in
+ /// handwritten code.
+ ///
+ /// ```
+ /// # let wat = None;
+ /// match wat {
+ /// | None | Some(false) => {}
+ /// | Some(true) => {}
+ /// }
+ /// ```
+ ///
+ /// The compiler accepts it only to facilitate some situations in
+ /// macro-generated code where a macro author might need to write:
+ ///
+ /// ```
+ /// # macro_rules! doc {
+ /// # ($value:expr, ($($conditions1:pat),*),
($($conditions2:pat),*), $then:expr) => {
+ /// match $value {
+ /// $(| $conditions1)* $(| $conditions2)* => $then
+ /// }
+ /// # };
+ /// # }
+ /// #
+ /// # doc!(true, (true), (false), {});
+ /// # doc!(true, (), (true, false), {});
+ /// # doc!(true, (true, false), (), {});
+ /// ```
+ ///
+ /// Expressing the same thing correctly in the case that either one
(but
+ /// not both) of `$conditions1` and `$conditions2` might be empty,
+ /// without leading `|`, is complex.
+ ///
+ /// Use [`Pat::parse_multi`] instead if you are not intending to
support
+ /// macro-generated macro input.
+ pub fn parse_multi_with_leading_vert(input: ParseStream) ->
Result<Self> {
+ let leading_vert: Option<Token![|]> = input.parse()?;
+ multi_pat_impl(input, leading_vert)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for PatType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(PatType {
+ attrs: Vec::new(),
+ pat: Box::new(Pat::parse_single(input)?),
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ })
+ }
+ }
+
+ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) ->
Result<Pat> {
+ let mut pat = Pat::parse_single(input)?;
+ if leading_vert.is_some()
+ || input.peek(Token![|]) && !input.peek(Token![||]) &&
!input.peek(Token![|=])
+ {
+ let mut cases = Punctuated::new();
+ cases.push_value(pat);
+ while input.peek(Token![|]) && !input.peek(Token![||]) &&
!input.peek(Token![|=]) {
+ let punct = input.parse()?;
+ cases.push_punct(punct);
+ let pat = Pat::parse_single(input)?;
+ cases.push_value(pat);
+ }
+ pat = Pat::Or(PatOr {
+ attrs: Vec::new(),
+ leading_vert,
+ cases,
+ });
+ }
+ Ok(pat)
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat>
{
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if qself.is_none()
+ && input.peek(Token![!])
+ && !input.peek(Token![!=])
+ && path.is_mod_style()
+ {
+ let bang_token: Token![!] = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ return Ok(Pat::Macro(ExprMacro {
+ attrs: Vec::new(),
+ mac: Macro {
+ path,
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ }));
+ }
+
+ if input.peek(token::Brace) {
+ pat_struct(input, qself, path).map(Pat::Struct)
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, qself, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+ pat_range(input, qself, path)
+ } else {
+ Ok(Pat::Path(ExprPath {
+ attrs: Vec::new(),
+ qself,
+ path,
+ }))
+ }
+ }
+
+ fn pat_wild(input: ParseStream) -> Result<PatWild> {
+ Ok(PatWild {
+ attrs: Vec::new(),
+ underscore_token: input.parse()?,
+ })
+ }
+
+ fn pat_box(begin: ParseBuffer, input: ParseStream) -> Result<Pat> {
+ input.parse::<Token![box]>()?;
+ Pat::parse_single(input)?;
+ Ok(Pat::Verbatim(verbatim::between(&begin, input)))
+ }
+
+ fn pat_ident(input: ParseStream) -> Result<PatIdent> {
+ Ok(PatIdent {
+ attrs: Vec::new(),
+ by_ref: input.parse()?,
+ mutability: input.parse()?,
+ ident: {
+ if input.peek(Token![self]) {
+ input.call(Ident::parse_any)?
+ } else {
+ input.parse()?
+ }
+ },
+ subpat: {
+ if input.peek(Token![@]) {
+ let at_token: Token![@] = input.parse()?;
+ let subpat = Pat::parse_single(input)?;
+ Some((at_token, Box::new(subpat)))
+ } else {
+ None
+ }
+ },
+ })
+ }
+
+ fn pat_tuple_struct(
+ input: ParseStream,
+ qself: Option<QSelf>,
+ path: Path,
+ ) -> Result<PatTupleStruct> {
+ let content;
+ let paren_token = parenthesized!(content in input);
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+ let value = Pat::parse_multi_with_leading_vert(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+ }
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+ Ok(PatTupleStruct {
+ attrs: Vec::new(),
+ qself,
+ path,
+ paren_token,
+ elems,
+ })
+ }
+
+ fn pat_struct(input: ParseStream, qself: Option<QSelf>, path: Path) ->
Result<PatStruct> {
+ let content;
+ let brace_token = braced!(content in input);
+
+ let mut fields = Punctuated::new();
+ let mut rest = None;
+ while !content.is_empty() {
+ let attrs = content.call(Attribute::parse_outer)?;
+ if content.peek(Token![..]) {
+ rest = Some(PatRest {
+ attrs,
+ dot2_token: content.parse()?,
+ });
+ break;
+ }
+ let mut value = content.call(field_pat)?;
+ value.attrs = attrs;
+ fields.push_value(value);
+ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+ Ok(PatStruct {
+ attrs: Vec::new(),
+ qself,
+ path,
+ brace_token,
+ fields,
+ rest,
+ })
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
+ let begin = input.fork();
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+
+ let member = if boxed.is_some() || by_ref.is_some() ||
mutability.is_some() {
+ input.parse().map(Member::Named)
+ } else {
+ input.parse()
+ }?;
+
+ if boxed.is_none() && by_ref.is_none() && mutability.is_none() &&
input.peek(Token![:])
+ || !member.is_named()
+ {
+ return Ok(FieldPat {
+ attrs: Vec::new(),
+ member,
+ colon_token: Some(input.parse()?),
+ pat: Box::new(Pat::parse_multi_with_leading_vert(input)?),
+ });
+ }
+
+ let ident = match member {
+ Member::Named(ident) => ident,
+ Member::Unnamed(_) => unreachable!(),
+ };
+
+ let pat = if boxed.is_some() {
+ Pat::Verbatim(verbatim::between(&begin, input))
+ } else {
+ Pat::Ident(PatIdent {
+ attrs: Vec::new(),
+ by_ref,
+ mutability,
+ ident: ident.clone(),
+ subpat: None,
+ })
+ };
+
+ Ok(FieldPat {
+ attrs: Vec::new(),
+ member: Member::Named(ident),
+ colon_token: None,
+ pat: Box::new(pat),
+ })
+ }
+
+ fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) ->
Result<Pat> {
+ let limits = RangeLimits::parse_obsolete(input)?;
+ let end = input.call(pat_range_bound)?;
+ if let (RangeLimits::Closed(_), None) = (&limits, &end) {
+ return Err(input.error("expected range upper bound"));
+ }
+ Ok(Pat::Range(ExprRange {
+ attrs: Vec::new(),
+ start: Some(Box::new(Expr::Path(ExprPath {
+ attrs: Vec::new(),
+ qself,
+ path,
+ }))),
+ limits,
+ end: end.map(PatRangeBound::into_expr),
+ }))
+ }
+
+ fn pat_range_half_open(input: ParseStream) -> Result<Pat> {
+ let limits: RangeLimits = input.parse()?;
+ let end = input.call(pat_range_bound)?;
+ if end.is_some() {
+ Ok(Pat::Range(ExprRange {
+ attrs: Vec::new(),
+ start: None,
+ limits,
+ end: end.map(PatRangeBound::into_expr),
+ }))
+ } else {
+ match limits {
+ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
+ attrs: Vec::new(),
+ dot2_token,
+ })),
+ RangeLimits::Closed(_) => Err(input.error("expected range
upper bound")),
+ }
+ }
+ }
+
+ fn pat_paren_or_tuple(input: ParseStream) -> Result<Pat> {
+ let content;
+ let paren_token = parenthesized!(content in input);
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+ let value = Pat::parse_multi_with_leading_vert(&content)?;
+ if content.is_empty() {
+ if elems.is_empty() && !matches!(value, Pat::Rest(_)) {
+ return Ok(Pat::Paren(PatParen {
+ attrs: Vec::new(),
+ paren_token,
+ pat: Box::new(value),
+ }));
+ }
+ elems.push_value(value);
+ break;
+ }
+ elems.push_value(value);
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+ Ok(Pat::Tuple(PatTuple {
+ attrs: Vec::new(),
+ paren_token,
+ elems,
+ }))
+ }
+
+ fn pat_reference(input: ParseStream) -> Result<PatReference> {
+ Ok(PatReference {
+ attrs: Vec::new(),
+ and_token: input.parse()?,
+ mutability: input.parse()?,
+ pat: Box::new(Pat::parse_single(input)?),
+ })
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+ let start = input.call(pat_range_bound)?.unwrap();
+ if input.peek(Token![..]) {
+ let limits = RangeLimits::parse_obsolete(input)?;
+ let end = input.call(pat_range_bound)?;
+ if let (RangeLimits::Closed(_), None) = (&limits, &end) {
+ return Err(input.error("expected range upper bound"));
+ }
+ Ok(Pat::Range(ExprRange {
+ attrs: Vec::new(),
+ start: Some(start.into_expr()),
+ limits,
+ end: end.map(PatRangeBound::into_expr),
+ }))
+ } else {
+ Ok(start.into_pat())
+ }
+ }
+
+ // Patterns that can appear on either side of a range pattern.
+ enum PatRangeBound {
+ Const(ExprConst),
+ Lit(ExprLit),
+ Path(ExprPath),
+ }
+
+ impl PatRangeBound {
+ fn into_expr(self) -> Box<Expr> {
+ Box::new(match self {
+ PatRangeBound::Const(pat) => Expr::Const(pat),
+ PatRangeBound::Lit(pat) => Expr::Lit(pat),
+ PatRangeBound::Path(pat) => Expr::Path(pat),
+ })
+ }
+
+ fn into_pat(self) -> Pat {
+ match self {
+ PatRangeBound::Const(pat) => Pat::Const(pat),
+ PatRangeBound::Lit(pat) => Pat::Lit(pat),
+ PatRangeBound::Path(pat) => Pat::Path(pat),
+ }
+ }
+ }
+
+ fn pat_range_bound(input: ParseStream) -> Result<Option<PatRangeBound>> {
+ if input.is_empty()
+ || input.peek(Token![|])
+ || input.peek(Token![=])
+ || input.peek(Token![:]) && !input.peek(Token![::])
+ || input.peek(Token![,])
+ || input.peek(Token![;])
+ || input.peek(Token![if])
+ {
+ return Ok(None);
+ }
+
+ let lookahead = input.lookahead1();
+ let expr = if lookahead.peek(Lit) {
+ PatRangeBound::Lit(input.parse()?)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ {
+ PatRangeBound::Path(input.parse()?)
+ } else if lookahead.peek(Token![const]) {
+ PatRangeBound::Const(input.parse()?)
+ } else {
+ return Err(lookahead.error());
+ };
+
+ Ok(Some(expr))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+ let content;
+ let bracket_token = bracketed!(content in input);
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+ let value = Pat::parse_multi_with_leading_vert(&content)?;
+ match value {
+ Pat::Range(pat) if pat.start.is_none() || pat.end.is_none() =>
{
+ let (start, end) = match pat.limits {
+ RangeLimits::HalfOpen(dot_dot) => (dot_dot.spans[0],
dot_dot.spans[1]),
+ RangeLimits::Closed(dot_dot_eq) => {
+ (dot_dot_eq.spans[0], dot_dot_eq.spans[2])
+ }
+ };
+ let msg = "range pattern is not allowed unparenthesized
inside slice pattern";
+ return Err(error::new2(start, end, msg));
+ }
+ _ => {}
+ }
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+ }
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+ Ok(PatSlice {
+ attrs: Vec::new(),
+ bracket_token,
+ elems,
+ })
+ }
+
+ fn pat_const(input: ParseStream) -> Result<TokenStream> {
+ let begin = input.fork();
+ input.parse::<Token![const]>()?;
+
+ let content;
+ braced!(content in input);
+ content.call(Attribute::parse_inner)?;
+ content.call(Block::parse_within)?;
+
+ Ok(verbatim::between(&begin, input))
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::FilterAttrs;
+ use crate::pat::{
+ FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest,
PatSlice, PatStruct,
+ PatTuple, PatTupleStruct, PatType, PatWild,
+ };
+ use crate::path;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ if let Some((at_token, subpat)) = &self.subpat {
+ at_token.to_tokens(tokens);
+ subpat.to_tokens(tokens);
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatParen {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.pat.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ path::printing::print_path(tokens, &self.qself, &self.path);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+ // NOTE: We need a comma before the dot2 token if it is
present.
+ if !self.fields.empty_or_trailing() && self.rest.is_some() {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ self.rest.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ // If there is only one element, a trailing comma is needed to
+ // distinguish PatTuple from PatParen, unless this is `(..)`
+ // which is a tuple pattern even without comma.
+ if self.elems.len() == 1
+ && !self.elems.trailing_punct()
+ && !matches!(self.elems[0], Pat::Rest { .. })
+ {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ path::printing::print_path(tokens, &self.qself, &self.path);
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.pat.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+ }
+ self.pat.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/path.rs
b/rust/hw/char/pl011/vendor/syn/src/path.rs
new file mode 100644
index 0000000000..636d5d5e8f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/path.rs
@@ -0,0 +1,886 @@
+#[cfg(feature = "parsing")]
+use crate::error::Result;
+use crate::expr::Expr;
+use crate::generics::TypeParamBound;
+use crate::ident::Ident;
+use crate::lifetime::Lifetime;
+use crate::punctuated::Punctuated;
+use crate::token;
+use crate::ty::{ReturnType, Type};
+
+ast_struct! {
+ /// A path at which a named item is exported (e.g.
`std::collections::HashMap`).
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+ pub segments: Punctuated<PathSegment, Token![::]>,
+ }
+}
+
+impl<T> From<T> for Path
+where
+ T: Into<PathSegment>,
+{
+ fn from(segment: T) -> Self {
+ let mut path = Path {
+ leading_colon: None,
+ segments: Punctuated::new(),
+ };
+ path.segments.push_value(segment.into());
+ path
+ }
+}
+
+impl Path {
+ /// Determines whether this is a path of length 1 equal to the given
+ /// ident.
+ ///
+ /// For them to compare equal, it must be the case that:
+ ///
+ /// - the path has no leading colon,
+ /// - the number of path segments is 1,
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use proc_macro2::TokenStream;
+ /// use syn::{Attribute, Error, Meta, Result};
+ ///
+ /// fn get_serde_meta_item(attr: &Attribute) ->
Result<Option<&TokenStream>> {
+ /// if attr.path().is_ident("serde") {
+ /// match &attr.meta {
+ /// Meta::List(meta) => Ok(Some(&meta.tokens)),
+ /// bad => Err(Error::new_spanned(bad, "unrecognized
attribute")),
+ /// }
+ /// } else {
+ /// Ok(None)
+ /// }
+ /// }
+ /// ```
+ pub fn is_ident<I>(&self, ident: &I) -> bool
+ where
+ I: ?Sized,
+ Ident: PartialEq<I>,
+ {
+ match self.get_ident() {
+ Some(id) => id == ident,
+ None => false,
+ }
+ }
+
+ /// If this path consists of a single ident, returns the ident.
+ ///
+ /// A path is considered an ident if:
+ ///
+ /// - the path has no leading colon,
+ /// - the number of path segments is 1, and
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+ && self.segments.len() == 1
+ && self.segments[0].arguments.is_none()
+ {
+ Some(&self.segments[0].ident)
+ } else {
+ None
+ }
+ }
+
+ /// An error if this path is not a single ident, as defined in `get_ident`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn require_ident(&self) -> Result<&Ident> {
+ self.get_ident().ok_or_else(|| {
+ crate::error::new2(
+ self.segments.first().unwrap().ident.span(),
+ self.segments.last().unwrap().ident.span(),
+ "expected this path to be an identifier",
+ )
+ })
+ }
+}
+
+ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct PathSegment {
+ pub ident: Ident,
+ pub arguments: PathArguments,
+ }
+}
+
+impl<T> From<T> for PathSegment
+where
+ T: Into<Ident>,
+{
+ fn from(ident: T) -> Self {
+ PathSegment {
+ ident: ident.into(),
+ arguments: PathArguments::None,
+ }
+ }
+}
+
+ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+ /// ## Angle bracketed
+ ///
+ /// The `<'a, T>` in `std::slice::iter<'a, T>`.
+ ///
+ /// ## Parenthesized
+ ///
+ /// The `(A, B) -> C` in `Fn(A, B) -> C`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum PathArguments {
+ None,
+ /// The `<'a, T>` in `std::slice::iter<'a, T>`.
+ AngleBracketed(AngleBracketedGenericArguments),
+ /// The `(A, B) -> C` in `Fn(A, B) -> C`.
+ Parenthesized(ParenthesizedGenericArguments),
+ }
+}
+
+impl Default for PathArguments {
+ fn default() -> Self {
+ PathArguments::None
+ }
+}
+
+impl PathArguments {
+ pub fn is_empty(&self) -> bool {
+ match self {
+ PathArguments::None => true,
+ PathArguments::AngleBracketed(bracketed) =>
bracketed.args.is_empty(),
+ PathArguments::Parenthesized(_) => false,
+ }
+ }
+
+ pub fn is_none(&self) -> bool {
+ match self {
+ PathArguments::None => true,
+ PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_)
=> false,
+ }
+ }
+}
+
+ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum GenericArgument {
+ /// A lifetime argument.
+ Lifetime(Lifetime),
+ /// A type argument.
+ Type(Type),
+ /// A const expression. Must be inside of a block.
+ ///
+ /// NOTE: Identity expressions are represented as Type arguments, as
+ /// they are indistinguishable syntactically.
+ Const(Expr),
+ /// A binding (equality constraint) on an associated type: the `Item =
+ /// u8` in `Iterator<Item = u8>`.
+ AssocType(AssocType),
+ /// An equality constraint on an associated constant: the `PANIC =
+ /// false` in `Trait<PANIC = false>`.
+ AssocConst(AssocConst),
+ /// An associated type bound: `Iterator<Item: Display>`.
+ Constraint(Constraint),
+ }
+}
+
+ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in
`HashMap<K,
+ /// V>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+ pub lt_token: Token![<],
+ pub args: Punctuated<GenericArgument, Token![,]>,
+ pub gt_token: Token![>],
+ }
+}
+
+ast_struct! {
+ /// A binding (equality constraint) on an associated type: the `Item = u8`
+ /// in `Iterator<Item = u8>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct AssocType {
+ pub ident: Ident,
+ pub generics: Option<AngleBracketedGenericArguments>,
+ pub eq_token: Token![=],
+ pub ty: Type,
+ }
+}
+
+ast_struct! {
+ /// An equality constraint on an associated constant: the `PANIC = false`
in
+ /// `Trait<PANIC = false>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct AssocConst {
+ pub ident: Ident,
+ pub generics: Option<AngleBracketedGenericArguments>,
+ pub eq_token: Token![=],
+ pub value: Expr,
+ }
+}
+
+ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Constraint {
+ pub ident: Ident,
+ pub generics: Option<AngleBracketedGenericArguments>,
+ pub colon_token: Token![:],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+}
+
+ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+ /// `(A, B)`
+ pub inputs: Punctuated<Type, Token![,]>,
+ /// `C`
+ pub output: ReturnType,
+ }
+}
+
+ast_struct! {
+ /// The explicit Self type in a qualified path: the `T` in `<T as
+ /// Display>::fmt`.
+ ///
+ /// The actual path, including the trait and the associated item, is stored
+ /// separately. The `position` field represents the index of the associated
+ /// item qualified with this Self type.
+ ///
+ /// ```text
+ /// <Vec<T> as a::b::Trait>::AssociatedItem
+ /// ^~~~~~ ~~~~~~~~~~~~~~^
+ /// ty position = 3
+ ///
+ /// <Vec<T>>::AssociatedItem
+ /// ^~~~~~ ^
+ /// ty position = 0
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct QSelf {
+ pub lt_token: Token![<],
+ pub ty: Box<Type>,
+ pub position: usize,
+ pub as_token: Option<Token![as]>,
+ pub gt_token: Token![>],
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::error::Result;
+ #[cfg(feature = "full")]
+ use crate::expr::ExprBlock;
+ use crate::expr::{Expr, ExprPath};
+ use crate::ext::IdentExt as _;
+ #[cfg(feature = "full")]
+ use crate::generics::TypeParamBound;
+ use crate::ident::Ident;
+ use crate::lifetime::Lifetime;
+ use crate::lit::Lit;
+ use crate::parse::{Parse, ParseStream};
+ #[cfg(feature = "full")]
+ use crate::path::Constraint;
+ use crate::path::{
+ AngleBracketedGenericArguments, AssocConst, AssocType, GenericArgument,
+ ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
+ };
+ use crate::punctuated::Punctuated;
+ use crate::token;
+ use crate::ty::{ReturnType, Type};
+ #[cfg(not(feature = "full"))]
+ use crate::verbatim;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Path {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Self::parse_helper(input, false)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for GenericArgument {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Lifetime) && !input.peek2(Token![+]) {
+ return Ok(GenericArgument::Lifetime(input.parse()?));
+ }
+
+ if input.peek(Lit) || input.peek(token::Brace) {
+ return const_argument(input).map(GenericArgument::Const);
+ }
+
+ let mut argument: Type = input.parse()?;
+
+ match argument {
+ Type::Path(mut ty)
+ if ty.qself.is_none()
+ && ty.path.leading_colon.is_none()
+ && ty.path.segments.len() == 1
+ && match &ty.path.segments[0].arguments {
+ PathArguments::None |
PathArguments::AngleBracketed(_) => true,
+ PathArguments::Parenthesized(_) => false,
+ } =>
+ {
+ if let Some(eq_token) =
input.parse::<Option<Token![=]>>()? {
+ let segment =
ty.path.segments.pop().unwrap().into_value();
+ let ident = segment.ident;
+ let generics = match segment.arguments {
+ PathArguments::None => None,
+ PathArguments::AngleBracketed(arguments) =>
Some(arguments),
+ PathArguments::Parenthesized(_) => unreachable!(),
+ };
+ return if input.peek(Lit) || input.peek(token::Brace) {
+ Ok(GenericArgument::AssocConst(AssocConst {
+ ident,
+ generics,
+ eq_token,
+ value: const_argument(input)?,
+ }))
+ } else {
+ Ok(GenericArgument::AssocType(AssocType {
+ ident,
+ generics,
+ eq_token,
+ ty: input.parse()?,
+ }))
+ };
+ }
+
+ #[cfg(feature = "full")]
+ if let Some(colon_token) =
input.parse::<Option<Token![:]>>()? {
+ let segment =
ty.path.segments.pop().unwrap().into_value();
+ return Ok(GenericArgument::Constraint(Constraint {
+ ident: segment.ident,
+ generics: match segment.arguments {
+ PathArguments::None => None,
+ PathArguments::AngleBracketed(arguments) =>
Some(arguments),
+ PathArguments::Parenthesized(_) =>
unreachable!(),
+ },
+ colon_token,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ loop {
+ if input.peek(Token![,]) ||
input.peek(Token![>]) {
+ break;
+ }
+ let value: TypeParamBound = input.parse()?;
+ bounds.push_value(value);
+ if !input.peek(Token![+]) {
+ break;
+ }
+ let punct: Token![+] = input.parse()?;
+ bounds.push_punct(punct);
+ }
+ bounds
+ },
+ }));
+ }
+
+ argument = Type::Path(ty);
+ }
+ _ => {}
+ }
+
+ Ok(GenericArgument::Type(argument))
+ }
+ }
+
+ pub(crate) fn const_argument(input: ParseStream) -> Result<Expr> {
+ let lookahead = input.lookahead1();
+
+ if input.peek(Lit) {
+ let lit = input.parse()?;
+ return Ok(Expr::Lit(lit));
+ }
+
+ if input.peek(Ident) {
+ let ident: Ident = input.parse()?;
+ return Ok(Expr::Path(ExprPath {
+ attrs: Vec::new(),
+ qself: None,
+ path: Path::from(ident),
+ }));
+ }
+
+ if input.peek(token::Brace) {
+ #[cfg(feature = "full")]
+ {
+ let block: ExprBlock = input.parse()?;
+ return Ok(Expr::Block(block));
+ }
+
+ #[cfg(not(feature = "full"))]
+ {
+ let begin = input.fork();
+ let content;
+ braced!(content in input);
+ content.parse::<Expr>()?;
+ let verbatim = verbatim::between(&begin, input);
+ return Ok(Expr::Verbatim(verbatim));
+ }
+ }
+
+ Err(lookahead.error())
+ }
+
+ impl AngleBracketedGenericArguments {
+ /// Parse `::<…>` with mandatory leading `::`.
+ ///
+ /// The ordinary [`Parse`] impl for `AngleBracketedGenericArguments`
+ /// parses optional leading `::`.
+ #[cfg(feature = "full")]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature =
"full"))))]
+ pub fn parse_turbofish(input: ParseStream) -> Result<Self> {
+ let colon2_token: Token![::] = input.parse()?;
+ Self::do_parse(Some(colon2_token), input)
+ }
+
+ pub(crate) fn do_parse(
+ colon2_token: Option<Token![::]>,
+ input: ParseStream,
+ ) -> Result<Self> {
+ Ok(AngleBracketedGenericArguments {
+ colon2_token,
+ lt_token: input.parse()?,
+ args: {
+ let mut args = Punctuated::new();
+ loop {
+ if input.peek(Token![>]) {
+ break;
+ }
+ let value: GenericArgument = input.parse()?;
+ args.push_value(value);
+ if input.peek(Token![>]) {
+ break;
+ }
+ let punct: Token![,] = input.parse()?;
+ args.push_punct(punct);
+ }
+ args
+ },
+ gt_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for AngleBracketedGenericArguments {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let colon2_token: Option<Token![::]> = input.parse()?;
+ Self::do_parse(colon2_token, input)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ParenthesizedGenericArguments {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(ParenthesizedGenericArguments {
+ paren_token: parenthesized!(content in input),
+ inputs: content.parse_terminated(Type::parse, Token![,])?,
+ output: input.call(ReturnType::without_plus)?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for PathSegment {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Self::parse_helper(input, false)
+ }
+ }
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+ if input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![crate])
+ || cfg!(feature = "full") && input.peek(Token![try])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+
+ let ident = if input.peek(Token![Self]) {
+ input.call(Ident::parse_any)?
+ } else {
+ input.parse()?
+ };
+
+ if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=])
+ || input.peek(Token![::]) && input.peek3(Token![<])
+ {
+ Ok(PathSegment {
+ ident,
+ arguments: PathArguments::AngleBracketed(input.parse()?),
+ })
+ } else {
+ Ok(PathSegment::from(ident))
+ }
+ }
+ }
+
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Path, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // A simplified single `use` statement like:
+ /// //
+ /// // use std::collections::HashMap;
+ /// //
+ /// // Note that generic parameters are not allowed in a `use`
statement
+ /// // so the following must not be accepted.
+ /// //
+ /// // use a::<b>::c;
+ /// struct SingleUse {
+ /// use_token: Token![use],
+ /// path: Path,
+ /// }
+ ///
+ /// impl Parse for SingleUse {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// Ok(SingleUse {
+ /// use_token: input.parse()?,
+ /// path: input.call(Path::parse_mod_style)?,
+ /// })
+ /// }
+ /// }
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
+ Ok(Path {
+ leading_colon: input.parse()?,
+ segments: {
+ let mut segments = Punctuated::new();
+ loop {
+ if !input.peek(Ident)
+ && !input.peek(Token![super])
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+ {
+ break;
+ }
+ let ident = Ident::parse_any(input)?;
+ segments.push_value(PathSegment::from(ident));
+ if !input.peek(Token![::]) {
+ break;
+ }
+ let punct = input.parse()?;
+ segments.push_punct(punct);
+ }
+ if segments.is_empty() {
+ return Err(input.parse::<Ident>().unwrap_err());
+ } else if segments.trailing_punct() {
+ return Err(input.error("expected path segment after
`::`"));
+ }
+ segments
+ },
+ })
+ }
+
+ pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) ->
Result<Self> {
+ let mut path = Path {
+ leading_colon: input.parse()?,
+ segments: {
+ let mut segments = Punctuated::new();
+ let value = PathSegment::parse_helper(input, expr_style)?;
+ segments.push_value(value);
+ segments
+ },
+ };
+ Path::parse_rest(input, &mut path, expr_style)?;
+ Ok(path)
+ }
+
+ pub(crate) fn parse_rest(
+ input: ParseStream,
+ path: &mut Self,
+ expr_style: bool,
+ ) -> Result<()> {
+ while input.peek(Token![::]) && !input.peek3(token::Paren) {
+ let punct: Token![::] = input.parse()?;
+ path.segments.push_punct(punct);
+ let value = PathSegment::parse_helper(input, expr_style)?;
+ path.segments.push_value(value);
+ }
+ Ok(())
+ }
+
+ pub(crate) fn is_mod_style(&self) -> bool {
+ self.segments
+ .iter()
+ .all(|segment| segment.arguments.is_none())
+ }
+ }
+
+ pub(crate) fn qpath(input: ParseStream, expr_style: bool) ->
Result<(Option<QSelf>, Path)> {
+ if input.peek(Token![<]) {
+ let lt_token: Token![<] = input.parse()?;
+ let this: Type = input.parse()?;
+ let path = if input.peek(Token![as]) {
+ let as_token: Token![as] = input.parse()?;
+ let path: Path = input.parse()?;
+ Some((as_token, path))
+ } else {
+ None
+ };
+ let gt_token: Token![>] = input.parse()?;
+ let colon2_token: Token![::] = input.parse()?;
+ let mut rest = Punctuated::new();
+ loop {
+ let path = PathSegment::parse_helper(input, expr_style)?;
+ rest.push_value(path);
+ if !input.peek(Token![::]) {
+ break;
+ }
+ let punct: Token![::] = input.parse()?;
+ rest.push_punct(punct);
+ }
+ let (position, as_token, path) = match path {
+ Some((as_token, mut path)) => {
+ let pos = path.segments.len();
+ path.segments.push_punct(colon2_token);
+ path.segments.extend(rest.into_pairs());
+ (pos, Some(as_token), path)
+ }
+ None => {
+ let path = Path {
+ leading_colon: Some(colon2_token),
+ segments: rest,
+ };
+ (0, None, path)
+ }
+ };
+ let qself = QSelf {
+ lt_token,
+ ty: Box::new(this),
+ position,
+ as_token,
+ gt_token,
+ };
+ Ok((Some(qself), path))
+ } else {
+ let path = Path::parse_helper(input, expr_style)?;
+ Ok((None, path))
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+pub(crate) mod printing {
+ use crate::generics;
+ use crate::path::{
+ AngleBracketedGenericArguments, AssocConst, AssocType, Constraint,
GenericArgument,
+ ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
+ };
+ use crate::print::TokensOrDefault;
+ #[cfg(feature = "parsing")]
+ use crate::spanned::Spanned;
+ #[cfg(feature = "parsing")]
+ use proc_macro2::Span;
+ use proc_macro2::TokenStream;
+ use quote::ToTokens;
+ use std::cmp;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Path {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.leading_colon.to_tokens(tokens);
+ self.segments.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PathSegment {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.arguments.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for PathArguments {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ PathArguments::None => {}
+ PathArguments::AngleBracketed(arguments) => {
+ arguments.to_tokens(tokens);
+ }
+ PathArguments::Parenthesized(arguments) => {
+ arguments.to_tokens(tokens);
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for GenericArgument {
+ #[allow(clippy::match_same_arms)]
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
+ GenericArgument::Type(ty) => ty.to_tokens(tokens),
+ GenericArgument::Const(expr) => {
+ generics::printing::print_const_argument(expr, tokens);
+ }
+ GenericArgument::AssocType(assoc) => assoc.to_tokens(tokens),
+ GenericArgument::AssocConst(assoc) => assoc.to_tokens(tokens),
+ GenericArgument::Constraint(constraint) =>
constraint.to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for AngleBracketedGenericArguments {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.colon2_token.to_tokens(tokens);
+ self.lt_token.to_tokens(tokens);
+
+ // Print lifetimes before types/consts/bindings, regardless of
their
+ // order in self.args.
+ let mut trailing_or_empty = true;
+ for param in self.args.pairs() {
+ match param.value() {
+ GenericArgument::Lifetime(_) => {
+ param.to_tokens(tokens);
+ trailing_or_empty = param.punct().is_some();
+ }
+ GenericArgument::Type(_)
+ | GenericArgument::Const(_)
+ | GenericArgument::AssocType(_)
+ | GenericArgument::AssocConst(_)
+ | GenericArgument::Constraint(_) => {}
+ }
+ }
+ for param in self.args.pairs() {
+ match param.value() {
+ GenericArgument::Type(_)
+ | GenericArgument::Const(_)
+ | GenericArgument::AssocType(_)
+ | GenericArgument::AssocConst(_)
+ | GenericArgument::Constraint(_) => {
+ if !trailing_or_empty {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ param.to_tokens(tokens);
+ trailing_or_empty = param.punct().is_some();
+ }
+ GenericArgument::Lifetime(_) => {}
+ }
+ }
+
+ self.gt_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for AssocType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for AssocConst {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.eq_token.to_tokens(tokens);
+ generics::printing::print_const_argument(&self.value, tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Constraint {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ParenthesizedGenericArguments {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.paren_token.surround(tokens, |tokens| {
+ self.inputs.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ }
+ }
+
+ pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>,
path: &Path) {
+ let qself = match qself {
+ Some(qself) => qself,
+ None => {
+ path.to_tokens(tokens);
+ return;
+ }
+ };
+ qself.lt_token.to_tokens(tokens);
+ qself.ty.to_tokens(tokens);
+
+ let pos = cmp::min(qself.position, path.segments.len());
+ let mut segments = path.segments.pairs();
+ if pos > 0 {
+ TokensOrDefault(&qself.as_token).to_tokens(tokens);
+ path.leading_colon.to_tokens(tokens);
+ for (i, segment) in segments.by_ref().take(pos).enumerate() {
+ if i + 1 == pos {
+ segment.value().to_tokens(tokens);
+ qself.gt_token.to_tokens(tokens);
+ segment.punct().to_tokens(tokens);
+ } else {
+ segment.to_tokens(tokens);
+ }
+ }
+ } else {
+ qself.gt_token.to_tokens(tokens);
+ path.leading_colon.to_tokens(tokens);
+ }
+ for segment in segments {
+ segment.to_tokens(tokens);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature =
"printing"))))]
+ impl Spanned for QSelf {
+ fn span(&self) -> Span {
+ struct QSelfDelimiters<'a>(&'a QSelf);
+
+ impl<'a> ToTokens for QSelfDelimiters<'a> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.0.lt_token.to_tokens(tokens);
+ self.0.gt_token.to_tokens(tokens);
+ }
+ }
+
+ QSelfDelimiters(self).span()
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/precedence.rs
b/rust/hw/char/pl011/vendor/syn/src/precedence.rs
new file mode 100644
index 0000000000..450958ad34
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/precedence.rs
@@ -0,0 +1,163 @@
+#[cfg(feature = "printing")]
+use crate::expr::Expr;
+use crate::op::BinOp;
+#[cfg(all(feature = "printing", feature = "full"))]
+use crate::ty::ReturnType;
+use std::cmp::Ordering;
+
+// Reference:
https://doc.rust-lang.org/reference/expressions.html#expression-precedence
+pub(crate) enum Precedence {
+ // return, break, closures
+ Any,
+ // = += -= *= /= %= &= |= ^= <<= >>=
+ Assign,
+ // .. ..=
+ Range,
+ // ||
+ Or,
+ // &&
+ And,
+ // == != < > <= >=
+ Compare,
+ // |
+ BitOr,
+ // ^
+ BitXor,
+ // &
+ BitAnd,
+ // << >>
+ Shift,
+ // + -
+ Arithmetic,
+ // * / %
+ Term,
+ // as
+ Cast,
+ // unary - * ! & &mut
+ #[cfg(feature = "printing")]
+ Prefix,
+ // function calls, array indexing, field expressions, method calls, ?
+ #[cfg(feature = "printing")]
+ Postfix,
+ // paths, loops
+ #[cfg(feature = "printing")]
+ Unambiguous,
+}
+
+impl Precedence {
+ pub(crate) fn of_binop(op: &BinOp) -> Self {
+ match op {
+ BinOp::Add(_) | BinOp::Sub(_) => Precedence::Arithmetic,
+ BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Term,
+ BinOp::And(_) => Precedence::And,
+ BinOp::Or(_) => Precedence::Or,
+ BinOp::BitXor(_) => Precedence::BitXor,
+ BinOp::BitAnd(_) => Precedence::BitAnd,
+ BinOp::BitOr(_) => Precedence::BitOr,
+ BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift,
+
+ BinOp::Eq(_)
+ | BinOp::Lt(_)
+ | BinOp::Le(_)
+ | BinOp::Ne(_)
+ | BinOp::Ge(_)
+ | BinOp::Gt(_) => Precedence::Compare,
+
+ BinOp::AddAssign(_)
+ | BinOp::SubAssign(_)
+ | BinOp::MulAssign(_)
+ | BinOp::DivAssign(_)
+ | BinOp::RemAssign(_)
+ | BinOp::BitXorAssign(_)
+ | BinOp::BitAndAssign(_)
+ | BinOp::BitOrAssign(_)
+ | BinOp::ShlAssign(_)
+ | BinOp::ShrAssign(_) => Precedence::Assign,
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ pub(crate) fn of(e: &Expr) -> Self {
+ match e {
+ #[cfg(feature = "full")]
+ Expr::Closure(e) => match e.output {
+ ReturnType::Default => Precedence::Any,
+ ReturnType::Type(..) => Precedence::Unambiguous,
+ },
+
+ Expr::Break(_) | Expr::Return(_) | Expr::Yield(_) =>
Precedence::Any,
+ Expr::Assign(_) => Precedence::Assign,
+ Expr::Range(_) => Precedence::Range,
+ Expr::Binary(e) => Precedence::of_binop(&e.op),
+ Expr::Cast(_) => Precedence::Cast,
+ Expr::Let(_) | Expr::Reference(_) | Expr::Unary(_) =>
Precedence::Prefix,
+
+ Expr::Await(_)
+ | Expr::Call(_)
+ | Expr::MethodCall(_)
+ | Expr::Field(_)
+ | Expr::Index(_)
+ | Expr::Try(_) => Precedence::Postfix,
+
+ Expr::Array(_)
+ | Expr::Async(_)
+ | Expr::Block(_)
+ | Expr::Const(_)
+ | Expr::Continue(_)
+ | Expr::ForLoop(_)
+ | Expr::Group(_)
+ | Expr::If(_)
+ | Expr::Infer(_)
+ | Expr::Lit(_)
+ | Expr::Loop(_)
+ | Expr::Macro(_)
+ | Expr::Match(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Repeat(_)
+ | Expr::Struct(_)
+ | Expr::TryBlock(_)
+ | Expr::Tuple(_)
+ | Expr::Unsafe(_)
+ | Expr::Verbatim(_)
+ | Expr::While(_) => Precedence::Unambiguous,
+
+ #[cfg(not(feature = "full"))]
+ Expr::Closure(_) => unreachable!(),
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ pub(crate) fn of_rhs(e: &Expr) -> Self {
+ match e {
+ Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) |
Expr::Yield(_) => {
+ Precedence::Prefix
+ }
+ #[cfg(feature = "full")]
+ Expr::Range(e) if e.start.is_none() => Precedence::Prefix,
+ _ => Precedence::of(e),
+ }
+ }
+}
+
+impl Copy for Precedence {}
+
+impl Clone for Precedence {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl PartialEq for Precedence {
+ fn eq(&self, other: &Self) -> bool {
+ *self as u8 == *other as u8
+ }
+}
+
+impl PartialOrd for Precedence {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ let this = *self as u8;
+ let other = *other as u8;
+ Some(this.cmp(&other))
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/print.rs
b/rust/hw/char/pl011/vendor/syn/src/print.rs
new file mode 100644
index 0000000000..0740993267
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/print.rs
@@ -0,0 +1,16 @@
+use proc_macro2::TokenStream;
+use quote::ToTokens;
+
+pub(crate) struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>);
+
+impl<'a, T> ToTokens for TokensOrDefault<'a, T>
+where
+ T: ToTokens + Default,
+{
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self.0 {
+ Some(t) => t.to_tokens(tokens),
+ None => T::default().to_tokens(tokens),
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/punctuated.rs
b/rust/hw/char/pl011/vendor/syn/src/punctuated.rs
new file mode 100644
index 0000000000..29e8dce15f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/punctuated.rs
@@ -0,0 +1,1132 @@
+//! A punctuated sequence of syntax tree nodes separated by punctuation.
+//!
+//! Lots of things in Rust are punctuated sequences.
+//!
+//! - The fields of a struct are `Punctuated<Field, Token![,]>`.
+//! - The segments of a path are `Punctuated<PathSegment, Token![::]>`.
+//! - The bounds on a generic parameter are `Punctuated<TypeParamBound,
+//! Token![+]>`.
+//! - The arguments to a function call are `Punctuated<Expr, Token![,]>`.
+//!
+//! This module provides a common representation for these punctuated sequences
+//! in the form of the [`Punctuated<T, P>`] type. We store a vector of pairs of
+//! syntax tree node + punctuation, where every node in the sequence is
followed
+//! by punctuation except for possibly the final one.
+//!
+//! [`Punctuated<T, P>`]: Punctuated
+//!
+//! ```text
+//! a_function_call(arg1, arg2, arg3);
+//! ~~~~^ ~~~~^ ~~~~
+//! ```
+
+use crate::drops::{NoDrop, TrivialDrop};
+#[cfg(feature = "parsing")]
+use crate::error::Result;
+#[cfg(feature = "parsing")]
+use crate::parse::{Parse, ParseStream};
+#[cfg(feature = "parsing")]
+use crate::token::Token;
+#[cfg(feature = "extra-traits")]
+use std::fmt::{self, Debug};
+#[cfg(feature = "extra-traits")]
+use std::hash::{Hash, Hasher};
+#[cfg(any(feature = "full", feature = "derive"))]
+use std::iter;
+use std::ops::{Index, IndexMut};
+use std::option;
+use std::slice;
+use std::vec;
+
+/// **A punctuated sequence of syntax tree nodes of type `T` separated by
+/// punctuation of type `P`.**
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+}
+
+impl<T, P> Punctuated<T, P> {
+ /// Creates an empty punctuated sequence.
+ pub const fn new() -> Self {
+ Punctuated {
+ inner: Vec::new(),
+ last: None,
+ }
+ }
+
+ /// Determines whether this punctuated sequence is empty, meaning it
+ /// contains no syntax tree nodes or punctuation.
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0 && self.last.is_none()
+ }
+
+ /// Returns the number of syntax tree nodes in this punctuated sequence.
+ ///
+ /// This is the number of nodes of type `T`, not counting the punctuation
of
+ /// type `P`.
+ pub fn len(&self) -> usize {
+ self.inner.len() + if self.last.is_some() { 1 } else { 0 }
+ }
+
+ /// Borrows the first element in this sequence.
+ pub fn first(&self) -> Option<&T> {
+ self.iter().next()
+ }
+
+ /// Mutably borrows the first element in this sequence.
+ pub fn first_mut(&mut self) -> Option<&mut T> {
+ self.iter_mut().next()
+ }
+
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+ pub fn iter(&self) -> Iter<T> {
+ Iter {
+ inner: Box::new(NoDrop::new(PrivateIter {
+ inner: self.inner.iter(),
+ last: self.last.as_ref().map(Box::as_ref).into_iter(),
+ })),
+ }
+ }
+
+ /// Returns an iterator over mutably borrowed syntax tree nodes of type
+ /// `&mut T`.
+ pub fn iter_mut(&mut self) -> IterMut<T> {
+ IterMut {
+ inner: Box::new(NoDrop::new(PrivateIterMut {
+ inner: self.inner.iter_mut(),
+ last: self.last.as_mut().map(Box::as_mut).into_iter(),
+ })),
+ }
+ }
+
+ /// Returns an iterator over the contents of this sequence as borrowed
+ /// punctuated pairs.
+ pub fn pairs(&self) -> Pairs<T, P> {
+ Pairs {
+ inner: self.inner.iter(),
+ last: self.last.as_ref().map(Box::as_ref).into_iter(),
+ }
+ }
+
+ /// Returns an iterator over the contents of this sequence as mutably
+ /// borrowed punctuated pairs.
+ pub fn pairs_mut(&mut self) -> PairsMut<T, P> {
+ PairsMut {
+ inner: self.inner.iter_mut(),
+ last: self.last.as_mut().map(Box::as_mut).into_iter(),
+ }
+ }
+
+ /// Returns an iterator over the contents of this sequence as owned
+ /// punctuated pairs.
+ pub fn into_pairs(self) -> IntoPairs<T, P> {
+ IntoPairs {
+ inner: self.inner.into_iter(),
+ last: self.last.map(|t| *t).into_iter(),
+ }
+ }
+
+ /// Appends a syntax tree node onto the end of this punctuated sequence.
The
+ /// sequence must already have a trailing punctuation, or be empty.
+ ///
+ /// Use [`push`] instead if the punctuated sequence may or may not already
+ /// have trailing punctuation.
+ ///
+ /// [`push`]: Punctuated::push
+ ///
+ /// # Panics
+ ///
+ /// Panics if the sequence is nonempty and does not already have a trailing
+ /// punctuation.
+ pub fn push_value(&mut self, value: T) {
+ assert!(
+ self.empty_or_trailing(),
+ "Punctuated::push_value: cannot push value if Punctuated is
missing trailing punctuation",
+ );
+
+ self.last = Some(Box::new(value));
+ }
+
+ /// Appends a trailing punctuation onto the end of this punctuated
sequence.
+ /// The sequence must be non-empty and must not already have trailing
+ /// punctuation.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the sequence is empty or already has a trailing punctuation.
+ pub fn push_punct(&mut self, punctuation: P) {
+ assert!(
+ self.last.is_some(),
+ "Punctuated::push_punct: cannot push punctuation if Punctuated is
empty or already has trailing punctuation",
+ );
+
+ let last = self.last.take().unwrap();
+ self.inner.push((*last, punctuation));
+ }
+
+ /// Removes the last punctuated pair from this sequence, or `None` if the
+ /// sequence is empty.
+ pub fn pop(&mut self) -> Option<Pair<T, P>> {
+ if self.last.is_some() {
+ self.last.take().map(|t| Pair::End(*t))
+ } else {
+ self.inner.pop().map(|(t, p)| Pair::Punctuated(t, p))
+ }
+ }
+
+ /// Removes the trailing punctuation from this punctuated sequence, or
+ /// `None` if there isn't any.
+ pub fn pop_punct(&mut self) -> Option<P> {
+ if self.last.is_some() {
+ None
+ } else {
+ let (t, p) = self.inner.pop()?;
+ self.last = Some(Box::new(t));
+ Some(p)
+ }
+ }
+
+ /// Determines whether this punctuated sequence ends with a trailing
+ /// punctuation.
+ pub fn trailing_punct(&self) -> bool {
+ self.last.is_none() && !self.is_empty()
+ }
+
+ /// Returns true if either this `Punctuated` is empty, or it has a trailing
+ /// punctuation.
+ ///
+ /// Equivalent to `punctuated.is_empty() || punctuated.trailing_punct()`.
+ pub fn empty_or_trailing(&self) -> bool {
+ self.last.is_none()
+ }
+
+ /// Appends a syntax tree node onto the end of this punctuated sequence.
+ ///
+ /// If there is not a trailing punctuation in this sequence when this
method
+ /// is called, the default value of punctuation type `P` is inserted before
+ /// the given value of type `T`.
+ pub fn push(&mut self, value: T)
+ where
+ P: Default,
+ {
+ if !self.empty_or_trailing() {
+ self.push_punct(Default::default());
+ }
+ self.push_value(value);
+ }
+
+ /// Inserts an element at position `index`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is greater than the number of elements previously in
+ /// this punctuated sequence.
+ pub fn insert(&mut self, index: usize, value: T)
+ where
+ P: Default,
+ {
+ assert!(
+ index <= self.len(),
+ "Punctuated::insert: index out of range",
+ );
+
+ if index == self.len() {
+ self.push(value);
+ } else {
+ self.inner.insert(index, (value, Default::default()));
+ }
+ }
+
+ /// Clears the sequence of all values and punctuation, making it empty.
+ pub fn clear(&mut self) {
+ self.inner.clear();
+ self.last = None;
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire
content
+ /// of this parse stream must consist of `T` and `P`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+ where
+ T: Parse,
+ P: Parse,
+ {
+ Self::parse_terminated_with(input, T::parse)
+ }
+
+ /// Parses zero or more occurrences of `T` using the given parse function,
+ /// separated by punctuation of type `P`, with optional trailing
+ /// punctuation.
+ ///
+ /// Like [`parse_terminated`], the entire content of this stream is
expected
+ /// to be parsed.
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_terminated_with(
+ input: ParseStream,
+ parser: fn(ParseStream) -> Result<T>,
+ ) -> Result<Self>
+ where
+ P: Parse,
+ {
+ let mut punctuated = Punctuated::new();
+
+ loop {
+ if input.is_empty() {
+ break;
+ }
+ let value = parser(input)?;
+ punctuated.push_value(value);
+ if input.is_empty() {
+ break;
+ }
+ let punct = input.parse()?;
+ punctuated.push_punct(punct);
+ }
+
+ Ok(punctuated)
+ }
+
+ /// Parses one or more occurrences of `T` separated by punctuation of type
+ /// `P`, not accepting trailing punctuation.
+ ///
+ /// Parsing continues as long as punctuation `P` is present at the head of
+ /// the stream. This method returns upon parsing a `T` and observing that
it
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+ where
+ T: Parse,
+ P: Token + Parse,
+ {
+ Self::parse_separated_nonempty_with(input, T::parse)
+ }
+
+ /// Parses one or more occurrences of `T` using the given parse function,
+ /// separated by punctuation of type `P`, not accepting trailing
+ /// punctuation.
+ ///
+ /// Like [`parse_separated_nonempty`], may complete early without parsing
+ /// the entire content of this stream.
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_separated_nonempty_with(
+ input: ParseStream,
+ parser: fn(ParseStream) -> Result<T>,
+ ) -> Result<Self>
+ where
+ P: Token + Parse,
+ {
+ let mut punctuated = Punctuated::new();
+
+ loop {
+ let value = parser(input)?;
+ punctuated.push_value(value);
+ if !P::peek(input.cursor()) {
+ break;
+ }
+ let punct = input.parse()?;
+ punctuated.push_punct(punct);
+ }
+
+ Ok(punctuated)
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl<T, P> Clone for Punctuated<T, P>
+where
+ T: Clone,
+ P: Clone,
+{
+ fn clone(&self) -> Self {
+ Punctuated {
+ inner: self.inner.clone(),
+ last: self.last.clone(),
+ }
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ self.inner.clone_from(&other.inner);
+ self.last.clone_from(&other.last);
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl<T, P> Eq for Punctuated<T, P>
+where
+ T: Eq,
+ P: Eq,
+{
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl<T, P> PartialEq for Punctuated<T, P>
+where
+ T: PartialEq,
+ P: PartialEq,
+{
+ fn eq(&self, other: &Self) -> bool {
+ let Punctuated { inner, last } = self;
+ *inner == other.inner && *last == other.last
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl<T, P> Hash for Punctuated<T, P>
+where
+ T: Hash,
+ P: Hash,
+{
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ let Punctuated { inner, last } = self;
+ inner.hash(state);
+ last.hash(state);
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut list = f.debug_list();
+ for (t, p) in &self.inner {
+ list.entry(t);
+ list.entry(p);
+ }
+ if let Some(last) = &self.last {
+ list.entry(last);
+ }
+ list.finish()
+ }
+}
+
+impl<T, P> FromIterator<T> for Punctuated<T, P>
+where
+ P: Default,
+{
+ fn from_iter<I: IntoIterator<Item = T>>(i: I) -> Self {
+ let mut ret = Punctuated::new();
+ ret.extend(i);
+ ret
+ }
+}
+
+impl<T, P> Extend<T> for Punctuated<T, P>
+where
+ P: Default,
+{
+ fn extend<I: IntoIterator<Item = T>>(&mut self, i: I) {
+ for value in i {
+ self.push(value);
+ }
+ }
+}
+
+impl<T, P> FromIterator<Pair<T, P>> for Punctuated<T, P> {
+ fn from_iter<I: IntoIterator<Item = Pair<T, P>>>(i: I) -> Self {
+ let mut ret = Punctuated::new();
+ do_extend(&mut ret, i.into_iter());
+ ret
+ }
+}
+
+impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P>
+where
+ P: Default,
+{
+ fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) {
+ if !self.empty_or_trailing() {
+ self.push_punct(P::default());
+ }
+ do_extend(self, i.into_iter());
+ }
+}
+
+fn do_extend<T, P, I>(punctuated: &mut Punctuated<T, P>, i: I)
+where
+ I: Iterator<Item = Pair<T, P>>,
+{
+ let mut nomore = false;
+ for pair in i {
+ if nomore {
+ panic!("punctuated extended with items after a Pair::End");
+ }
+ match pair {
+ Pair::Punctuated(a, b) => punctuated.inner.push((a, b)),
+ Pair::End(a) => {
+ punctuated.last = Some(Box::new(a));
+ nomore = true;
+ }
+ }
+ }
+}
+
+impl<T, P> IntoIterator for Punctuated<T, P> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ let mut elements = Vec::with_capacity(self.len());
+ elements.extend(self.inner.into_iter().map(|pair| pair.0));
+ elements.extend(self.last.map(|t| *t));
+
+ IntoIter {
+ inner: elements.into_iter(),
+ }
+ }
+}
+
+impl<'a, T, P> IntoIterator for &'a Punctuated<T, P> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Punctuated::iter(self)
+ }
+}
+
+impl<'a, T, P> IntoIterator for &'a mut Punctuated<T, P> {
+ type Item = &'a mut T;
+ type IntoIter = IterMut<'a, T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Punctuated::iter_mut(self)
+ }
+}
+
+impl<T, P> Default for Punctuated<T, P> {
+ fn default() -> Self {
+ Punctuated::new()
+ }
+}
+
+/// An iterator over borrowed pairs of type `Pair<&T, &P>`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct Pairs<'a, T: 'a, P: 'a> {
+ inner: slice::Iter<'a, (T, P)>,
+ last: option::IntoIter<&'a T>,
+}
+
+impl<'a, T, P> Iterator for Pairs<'a, T, P> {
+ type Item = Pair<&'a T, &'a P>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .next()
+ .map(|(t, p)| Pair::Punctuated(t, p))
+ .or_else(|| self.last.next().map(Pair::End))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<'a, T, P> DoubleEndedIterator for Pairs<'a, T, P> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.last
+ .next()
+ .map(Pair::End)
+ .or_else(|| self.inner.next_back().map(|(t, p)|
Pair::Punctuated(t, p)))
+ }
+}
+
+impl<'a, T, P> ExactSizeIterator for Pairs<'a, T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+}
+
+// No Clone bound on T or P.
+impl<'a, T, P> Clone for Pairs<'a, T, P> {
+ fn clone(&self) -> Self {
+ Pairs {
+ inner: self.inner.clone(),
+ last: self.last.clone(),
+ }
+ }
+}
+
+/// An iterator over mutably borrowed pairs of type `Pair<&mut T, &mut P>`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct PairsMut<'a, T: 'a, P: 'a> {
+ inner: slice::IterMut<'a, (T, P)>,
+ last: option::IntoIter<&'a mut T>,
+}
+
+impl<'a, T, P> Iterator for PairsMut<'a, T, P> {
+ type Item = Pair<&'a mut T, &'a mut P>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .next()
+ .map(|(t, p)| Pair::Punctuated(t, p))
+ .or_else(|| self.last.next().map(Pair::End))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<'a, T, P> DoubleEndedIterator for PairsMut<'a, T, P> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.last
+ .next()
+ .map(Pair::End)
+ .or_else(|| self.inner.next_back().map(|(t, p)|
Pair::Punctuated(t, p)))
+ }
+}
+
+impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+}
+
+/// An iterator over owned pairs of type `Pair<T, P>`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+}
+
+impl<T, P> Iterator for IntoPairs<T, P> {
+ type Item = Pair<T, P>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .next()
+ .map(|(t, p)| Pair::Punctuated(t, p))
+ .or_else(|| self.last.next().map(Pair::End))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<T, P> DoubleEndedIterator for IntoPairs<T, P> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.last
+ .next()
+ .map(Pair::End)
+ .or_else(|| self.inner.next_back().map(|(t, p)|
Pair::Punctuated(t, p)))
+ }
+}
+
+impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+}
+
+impl<T, P> Clone for IntoPairs<T, P>
+where
+ T: Clone,
+ P: Clone,
+{
+ fn clone(&self) -> Self {
+ IntoPairs {
+ inner: self.inner.clone(),
+ last: self.last.clone(),
+ }
+ }
+}
+
+/// An iterator over owned values of type `T`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+}
+
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.inner.next_back()
+ }
+}
+
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<T> Clone for IntoIter<T>
+where
+ T: Clone,
+{
+ fn clone(&self) -> Self {
+ IntoIter {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+/// An iterator over borrowed values of type `&T`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct Iter<'a, T: 'a> {
+ inner: Box<NoDrop<dyn IterTrait<'a, T> + 'a>>,
+}
+
+trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> + DoubleEndedIterator +
ExactSizeIterator {
+ fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T> + 'a>>;
+}
+
+struct PrivateIter<'a, T: 'a, P: 'a> {
+ inner: slice::Iter<'a, (T, P)>,
+ last: option::IntoIter<&'a T>,
+}
+
+impl<'a, T, P> TrivialDrop for PrivateIter<'a, T, P>
+where
+ slice::Iter<'a, (T, P)>: TrivialDrop,
+ option::IntoIter<&'a T>: TrivialDrop,
+{
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+pub(crate) fn empty_punctuated_iter<'a, T>() -> Iter<'a, T> {
+ Iter {
+ inner: Box::new(NoDrop::new(iter::empty())),
+ }
+}
+
+// No Clone bound on T.
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Self {
+ Iter {
+ inner: self.inner.clone_box(),
+ }
+ }
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.inner.next_back()
+ }
+}
+
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<'a, T, P> Iterator for PrivateIter<'a, T, P> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .next()
+ .map(|pair| &pair.0)
+ .or_else(|| self.last.next())
+ }
+}
+
+impl<'a, T, P> DoubleEndedIterator for PrivateIter<'a, T, P> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.last
+ .next()
+ .or_else(|| self.inner.next_back().map(|pair| &pair.0))
+ }
+}
+
+impl<'a, T, P> ExactSizeIterator for PrivateIter<'a, T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+}
+
+// No Clone bound on T or P.
+impl<'a, T, P> Clone for PrivateIter<'a, T, P> {
+ fn clone(&self) -> Self {
+ PrivateIter {
+ inner: self.inner.clone(),
+ last: self.last.clone(),
+ }
+ }
+}
+
+impl<'a, T, I> IterTrait<'a, T> for I
+where
+ T: 'a,
+ I: DoubleEndedIterator<Item = &'a T>
+ + ExactSizeIterator<Item = &'a T>
+ + Clone
+ + TrivialDrop
+ + 'a,
+{
+ fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T> + 'a>> {
+ Box::new(NoDrop::new(self.clone()))
+ }
+}
+
+/// An iterator over mutably borrowed values of type `&mut T`.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub struct IterMut<'a, T: 'a> {
+ inner: Box<NoDrop<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>>,
+}
+
+trait IterMutTrait<'a, T: 'a>:
+ DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a mut T>
+{
+}
+
+struct PrivateIterMut<'a, T: 'a, P: 'a> {
+ inner: slice::IterMut<'a, (T, P)>,
+ last: option::IntoIter<&'a mut T>,
+}
+
+impl<'a, T, P> TrivialDrop for PrivateIterMut<'a, T, P>
+where
+ slice::IterMut<'a, (T, P)>: TrivialDrop,
+ option::IntoIter<&'a mut T>: TrivialDrop,
+{
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+pub(crate) fn empty_punctuated_iter_mut<'a, T>() -> IterMut<'a, T> {
+ IterMut {
+ inner: Box::new(NoDrop::new(iter::empty())),
+ }
+}
+
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len(), Some(self.len()))
+ }
+}
+
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.inner.next_back()
+ }
+}
+
+impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<'a, T, P> Iterator for PrivateIterMut<'a, T, P> {
+ type Item = &'a mut T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .next()
+ .map(|pair| &mut pair.0)
+ .or_else(|| self.last.next())
+ }
+}
+
+impl<'a, T, P> DoubleEndedIterator for PrivateIterMut<'a, T, P> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.last
+ .next()
+ .or_else(|| self.inner.next_back().map(|pair| &mut pair.0))
+ }
+}
+
+impl<'a, T, P> ExactSizeIterator for PrivateIterMut<'a, T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+}
+
+impl<'a, T, I> IterMutTrait<'a, T> for I
+where
+ T: 'a,
+ I: DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a
mut T> + 'a,
+{
+}
+
+/// A single syntax tree node of type `T` followed by its trailing punctuation
+/// of type `P` if any.
+///
+/// Refer to the [module documentation] for details about punctuated sequences.
+///
+/// [module documentation]: self
+pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+}
+
+impl<T, P> Pair<T, P> {
+ /// Extracts the syntax tree node from this punctuated pair, discarding the
+ /// following punctuation.
+ pub fn into_value(self) -> T {
+ match self {
+ Pair::Punctuated(t, _) | Pair::End(t) => t,
+ }
+ }
+
+ /// Borrows the syntax tree node from this punctuated pair.
+ pub fn value(&self) -> &T {
+ match self {
+ Pair::Punctuated(t, _) | Pair::End(t) => t,
+ }
+ }
+
+ /// Mutably borrows the syntax tree node from this punctuated pair.
+ pub fn value_mut(&mut self) -> &mut T {
+ match self {
+ Pair::Punctuated(t, _) | Pair::End(t) => t,
+ }
+ }
+
+ /// Borrows the punctuation from this punctuated pair, unless this pair is
+ /// the final one and there is no trailing punctuation.
+ pub fn punct(&self) -> Option<&P> {
+ match self {
+ Pair::Punctuated(_, p) => Some(p),
+ Pair::End(_) => None,
+ }
+ }
+
+ /// Mutably borrows the punctuation from this punctuated pair, unless the
+ /// pair is the final one and there is no trailing punctuation.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use proc_macro2::Span;
+ /// # use syn::punctuated::Punctuated;
+ /// # use syn::{parse_quote, Token, TypeParamBound};
+ /// #
+ /// # let mut punctuated = Punctuated::<TypeParamBound, Token![+]>::new();
+ /// # let span = Span::call_site();
+ /// #
+ /// punctuated.insert(0, parse_quote!('lifetime));
+ /// if let Some(punct) =
punctuated.pairs_mut().next().unwrap().punct_mut() {
+ /// punct.span = span;
+ /// }
+ /// ```
+ pub fn punct_mut(&mut self) -> Option<&mut P> {
+ match self {
+ Pair::Punctuated(_, p) => Some(p),
+ Pair::End(_) => None,
+ }
+ }
+
+ /// Creates a punctuated pair out of a syntax tree node and an optional
+ /// following punctuation.
+ pub fn new(t: T, p: Option<P>) -> Self {
+ match p {
+ Some(p) => Pair::Punctuated(t, p),
+ None => Pair::End(t),
+ }
+ }
+
+ /// Produces this punctuated pair as a tuple of syntax tree node and
+ /// optional following punctuation.
+ pub fn into_tuple(self) -> (T, Option<P>) {
+ match self {
+ Pair::Punctuated(t, p) => (t, Some(p)),
+ Pair::End(t) => (t, None),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl<T, P> Pair<&T, &P> {
+ pub fn cloned(self) -> Pair<T, P>
+ where
+ T: Clone,
+ P: Clone,
+ {
+ match self {
+ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
+ Pair::End(t) => Pair::End(t.clone()),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl<T, P> Clone for Pair<T, P>
+where
+ T: Clone,
+ P: Clone,
+{
+ fn clone(&self) -> Self {
+ match self {
+ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
+ Pair::End(t) => Pair::End(t.clone()),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl<T, P> Copy for Pair<T, P>
+where
+ T: Copy,
+ P: Copy,
+{
+}
+
+impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+ fn index(&self, index: usize) -> &Self::Output {
+ if index == self.len() - 1 {
+ match &self.last {
+ Some(t) => t,
+ None => &self.inner[index].0,
+ }
+ } else {
+ &self.inner[index].0
+ }
+ }
+}
+
+impl<T, P> IndexMut<usize> for Punctuated<T, P> {
+ fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+ if index == self.len() - 1 {
+ match &mut self.last {
+ Some(t) => t,
+ None => &mut self.inner[index].0,
+ }
+ } else {
+ &mut self.inner[index].0
+ }
+ }
+}
+
+#[cfg(all(feature = "fold", any(feature = "full", feature = "derive")))]
+pub(crate) fn fold<T, P, V, F>(
+ punctuated: Punctuated<T, P>,
+ fold: &mut V,
+ mut f: F,
+) -> Punctuated<T, P>
+where
+ V: ?Sized,
+ F: FnMut(&mut V, T) -> T,
+{
+ Punctuated {
+ inner: punctuated
+ .inner
+ .into_iter()
+ .map(|(t, p)| (f(fold, t), p))
+ .collect(),
+ last: match punctuated.last {
+ Some(t) => Some(Box::new(f(fold, *t))),
+ None => None,
+ },
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::punctuated::{Pair, Punctuated};
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl<T, P> ToTokens for Punctuated<T, P>
+ where
+ T: ToTokens,
+ P: ToTokens,
+ {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.pairs());
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl<T, P> ToTokens for Pair<T, P>
+ where
+ T: ToTokens,
+ P: ToTokens,
+ {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ Pair::Punctuated(a, b) => {
+ a.to_tokens(tokens);
+ b.to_tokens(tokens);
+ }
+ Pair::End(a) => a.to_tokens(tokens),
+ }
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/restriction.rs
b/rust/hw/char/pl011/vendor/syn/src/restriction.rs
new file mode 100644
index 0000000000..8a4d4706a5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/restriction.rs
@@ -0,0 +1,176 @@
+use crate::path::Path;
+use crate::token;
+
+ast_enum! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum Visibility {
+ /// A public visibility level: `pub`.
+ Public(Token![pub]),
+
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ Restricted(VisRestricted),
+
+ /// An inherited visibility, which usually means private.
+ Inherited,
+ }
+}
+
+ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+ pub paren_token: token::Paren,
+ pub in_token: Option<Token![in]>,
+ pub path: Box<Path>,
+ }
+}
+
+ast_enum! {
+ /// Unused, but reserved for RFC 3323 restrictions.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum FieldMutability {
+ None,
+
+ // TODO: https://rust-lang.github.io/rfcs/3323-restrictions.html
+ //
+ // FieldMutability::Restricted(MutRestricted)
+ //
+ // pub struct MutRestricted {
+ // pub mut_token: Token![mut],
+ // pub paren_token: token::Paren,
+ // pub in_token: Option<Token![in]>,
+ // pub path: Box<Path>,
+ // }
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::error::Result;
+ use crate::ext::IdentExt as _;
+ use crate::ident::Ident;
+ use crate::parse::discouraged::Speculative as _;
+ use crate::parse::{Parse, ParseStream};
+ use crate::path::Path;
+ use crate::restriction::{VisRestricted, Visibility};
+ use crate::token;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
+ // Recognize an empty None-delimited group, as produced by a $:vis
+ // matcher that matched no tokens.
+ if input.peek(token::Group) {
+ let ahead = input.fork();
+ let group = crate::group::parse_group(&ahead)?;
+ if group.content.is_empty() {
+ input.advance_to(&ahead);
+ return Ok(Visibility::Inherited);
+ }
+ }
+
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else {
+ Ok(Visibility::Inherited)
+ }
+ }
+ }
+
+ impl Visibility {
+ fn parse_pub(input: ParseStream) -> Result<Self> {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+ let ahead = input.fork();
+
+ let content;
+ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+ let path = content.call(Ident::parse_any)?;
+
+ // Ensure there are no additional tokens within `content`.
+ // Without explicitly checking, we may misinterpret a tuple
+ // field as a restricted visibility, causing a parse error.
+ // e.g. `pub (crate::A, crate::B)` (Issue #720).
+ if content.is_empty() {
+ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+ paren_token,
+ in_token: None,
+ path: Box::new(Path::from(path)),
+ }));
+ }
+ } else if content.peek(Token![in]) {
+ let in_token: Token![in] = content.parse()?;
+ let path = content.call(Path::parse_mod_style)?;
+
+ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+ paren_token,
+ in_token: Some(in_token),
+ path: Box::new(path),
+ }));
+ }
+ }
+
+ Ok(Visibility::Public(pub_token))
+ }
+
+ #[cfg(feature = "full")]
+ pub(crate) fn is_some(&self) -> bool {
+ match self {
+ Visibility::Inherited => false,
+ _ => true,
+ }
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::restriction::{VisRestricted, Visibility};
+ use proc_macro2::TokenStream;
+ use quote::ToTokens;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Visibility {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ Visibility::Public(pub_token) => pub_token.to_tokens(tokens),
+ Visibility::Restricted(vis_restricted) =>
vis_restricted.to_tokens(tokens),
+ Visibility::Inherited => {}
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for VisRestricted {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.pub_token.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+ // TODO: If we have a path which is not "self" or "super" or
+ // "crate", automatically add the "in" token.
+ self.in_token.to_tokens(tokens);
+ self.path.to_tokens(tokens);
+ });
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/sealed.rs
b/rust/hw/char/pl011/vendor/syn/src/sealed.rs
new file mode 100644
index 0000000000..dc804742d1
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/sealed.rs
@@ -0,0 +1,4 @@
+#[cfg(feature = "parsing")]
+pub(crate) mod lookahead {
+ pub trait Sealed: Copy {}
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/span.rs
b/rust/hw/char/pl011/vendor/syn/src/span.rs
new file mode 100644
index 0000000000..eb2779479a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/span.rs
@@ -0,0 +1,63 @@
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::{Delimiter, Group, Span, TokenStream};
+
+#[doc(hidden)]
+pub trait IntoSpans<S> {
+ fn into_spans(self) -> S;
+}
+
+impl IntoSpans<Span> for Span {
+ fn into_spans(self) -> Span {
+ self
+ }
+}
+
+impl IntoSpans<[Span; 1]> for Span {
+ fn into_spans(self) -> [Span; 1] {
+ [self]
+ }
+}
+
+impl IntoSpans<[Span; 2]> for Span {
+ fn into_spans(self) -> [Span; 2] {
+ [self, self]
+ }
+}
+
+impl IntoSpans<[Span; 3]> for Span {
+ fn into_spans(self) -> [Span; 3] {
+ [self, self, self]
+ }
+}
+
+impl IntoSpans<[Span; 1]> for [Span; 1] {
+ fn into_spans(self) -> [Span; 1] {
+ self
+ }
+}
+
+impl IntoSpans<[Span; 2]> for [Span; 2] {
+ fn into_spans(self) -> [Span; 2] {
+ self
+ }
+}
+
+impl IntoSpans<[Span; 3]> for [Span; 3] {
+ fn into_spans(self) -> [Span; 3] {
+ self
+ }
+}
+
+impl IntoSpans<DelimSpan> for Span {
+ fn into_spans(self) -> DelimSpan {
+ let mut group = Group::new(Delimiter::None, TokenStream::new());
+ group.set_span(self);
+ group.delim_span()
+ }
+}
+
+impl IntoSpans<DelimSpan> for DelimSpan {
+ fn into_spans(self) -> DelimSpan {
+ self
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/spanned.rs
b/rust/hw/char/pl011/vendor/syn/src/spanned.rs
new file mode 100644
index 0000000000..17b69e9f5b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/spanned.rs
@@ -0,0 +1,118 @@
+//! A trait that can provide the `Span` of the complete contents of a syntax
+//! tree node.
+//!
+//! <br>
+//!
+//! # Example
+//!
+//! Suppose in a procedural macro we have a [`Type`] that we want to assert
+//! implements the [`Sync`] trait. Maybe this is the type of one of the fields
+//! of a struct for which we are deriving a trait implementation, and we need
to
+//! be able to pass a reference to one of those fields across threads.
+//!
+//! [`Type`]: crate::Type
+//! [`Sync`]: std::marker::Sync
+//!
+//! If the field type does *not* implement `Sync` as required, we want the
+//! compiler to report an error pointing out exactly which type it was.
+//!
+//! The following macro code takes a variable `ty` of type `Type` and produces
a
+//! static assertion that `Sync` is implemented for that type.
+//!
+//! ```
+//! # extern crate proc_macro;
+//! #
+//! use proc_macro::TokenStream;
+//! use proc_macro2::Span;
+//! use quote::quote_spanned;
+//! use syn::Type;
+//! use syn::spanned::Spanned;
+//!
+//! # const IGNORE_TOKENS: &str = stringify! {
+//! #[proc_macro_derive(MyMacro)]
+//! # };
+//! pub fn my_macro(input: TokenStream) -> TokenStream {
+//! # let ty = get_a_type();
+//! /* ... */
+//!
+//! let assert_sync = quote_spanned! {ty.span()=>
+//! struct _AssertSync where #ty: Sync;
+//! };
+//!
+//! /* ... */
+//! # input
+//! }
+//! #
+//! # fn get_a_type() -> Type {
+//! # unimplemented!()
+//! # }
+//! ```
+//!
+//! By inserting this `assert_sync` fragment into the output code generated by
+//! our macro, the user's code will fail to compile if `ty` does not implement
+//! `Sync`. The errors they would see look like the following.
+//!
+//! ```text
+//! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not
satisfied
+//! --> src/main.rs:10:21
+//! |
+//! 10 | bad_field: *const i32,
+//! | ^^^^^^^^^^ `*const i32` cannot be shared between
threads safely
+//! ```
+//!
+//! In this technique, using the `Type`'s span for the error message makes the
+//! error appear in the correct place underlining the right type.
+//!
+//! <br>
+//!
+//! # Limitations
+//!
+//! The underlying [`proc_macro::Span::join`] method is nightly-only. When
+//! called from within a procedural macro in a nightly compiler, `Spanned` will
+//! use `join` to produce the intended span. When not using a nightly compiler,
+//! only the span of the *first token* of the syntax tree node is returned.
+//!
+//! In the common case of wanting to use the joined span as the span of a
+//! `syn::Error`, consider instead using [`syn::Error::new_spanned`] which is
+//! able to span the error correctly under the complete syntax tree node
without
+//! needing the unstable `join`.
+//!
+//! [`syn::Error::new_spanned`]: crate::Error::new_spanned
+
+use proc_macro2::Span;
+use quote::spanned::Spanned as ToTokens;
+
+/// A trait that can provide the `Span` of the complete contents of a syntax
+/// tree node.
+///
+/// This trait is automatically implemented for all types that implement
+/// [`ToTokens`] from the `quote` crate, as well as for `Span` itself.
+///
+/// [`ToTokens`]: quote::ToTokens
+///
+/// See the [module documentation] for an example.
+///
+/// [module documentation]: self
+pub trait Spanned: private::Sealed {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+ /// node, or [`Span::call_site()`] if this node is empty.
+ ///
+ /// [`Span::call_site()`]: proc_macro2::Span::call_site
+ fn span(&self) -> Span;
+}
+
+impl<T: ?Sized + ToTokens> Spanned for T {
+ fn span(&self) -> Span {
+ self.__span()
+ }
+}
+
+mod private {
+ use crate::spanned::ToTokens;
+
+ pub trait Sealed {}
+ impl<T: ?Sized + ToTokens> Sealed for T {}
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl Sealed for crate::QSelf {}
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/stmt.rs
b/rust/hw/char/pl011/vendor/syn/src/stmt.rs
new file mode 100644
index 0000000000..51d000581a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/stmt.rs
@@ -0,0 +1,481 @@
+use crate::attr::Attribute;
+use crate::expr::Expr;
+use crate::item::Item;
+use crate::mac::Macro;
+use crate::pat::Pat;
+use crate::token;
+
+ast_struct! {
+ /// A braced block containing Rust statements.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+ pub stmts: Vec<Stmt>,
+ }
+}
+
+ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+
+ /// An item definition.
+ Item(Item),
+
+ /// Expression, with or without trailing semicolon.
+ Expr(Expr, Option<Token![;]>),
+
+ /// A macro invocation in statement position.
+ ///
+ /// Syntactically it's ambiguous which other kind of statement this
+ /// macro would expand to. It can be any of local variable (`let`),
+ /// item, or expression.
+ Macro(StmtMacro),
+ }
+}
+
+ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+ pub pat: Pat,
+ pub init: Option<LocalInit>,
+ pub semi_token: Token![;],
+ }
+}
+
+ast_struct! {
+ /// The expression assigned in a local `let` binding, including optional
+ /// diverging `else` block.
+ ///
+ /// `LocalInit` represents `= s.parse()?` in `let x: u64 = s.parse()?` and
+ /// `= r else { return }` in `let Ok(x) = r else { return }`.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct LocalInit {
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ pub diverge: Option<(Token![else], Box<Expr>)>,
+ }
+}
+
+ast_struct! {
+ /// A macro invocation in statement position.
+ ///
+ /// Syntactically it's ambiguous which other kind of statement this macro
+ /// would expand to. It can be any of local variable (`let`), item, or
+ /// expression.
+ #[cfg_attr(docsrs, doc(cfg(feature = "full")))]
+ pub struct StmtMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::classify;
+ use crate::error::Result;
+ use crate::expr::{Expr, ExprBlock, ExprMacro};
+ use crate::ident::Ident;
+ use crate::item;
+ use crate::mac::{self, Macro};
+ use crate::parse::discouraged::Speculative as _;
+ use crate::parse::{Parse, ParseStream};
+ use crate::pat::{Pat, PatType};
+ use crate::path::Path;
+ use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro};
+ use crate::token;
+ use crate::ty::Type;
+ use proc_macro2::TokenStream;
+
+ struct AllowNoSemi(bool);
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt,
Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// // Parse a function with no generics or parameter list.
+ /// //
+ /// // fn playground {
+ /// // let mut x = 1;
+ /// // x += 1;
+ /// // println!("{}", x);
+ /// // }
+ /// struct MiniFunction {
+ /// attrs: Vec<Attribute>,
+ /// fn_token: Token![fn],
+ /// name: Ident,
+ /// brace_token: token::Brace,
+ /// stmts: Vec<Stmt>,
+ /// }
+ ///
+ /// impl Parse for MiniFunction {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// let outer_attrs = input.call(Attribute::parse_outer)?;
+ /// let fn_token: Token![fn] = input.parse()?;
+ /// let name: Ident = input.parse()?;
+ ///
+ /// let content;
+ /// let brace_token = braced!(content in input);
+ /// let inner_attrs = content.call(Attribute::parse_inner)?;
+ /// let stmts = content.call(Block::parse_within)?;
+ ///
+ /// Ok(MiniFunction {
+ /// attrs: {
+ /// let mut attrs = outer_attrs;
+ /// attrs.extend(inner_attrs);
+ /// attrs
+ /// },
+ /// fn_token,
+ /// name,
+ /// brace_token,
+ /// stmts,
+ /// })
+ /// }
+ /// }
+ /// ```
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+ while let semi @ Some(_) = input.parse()? {
+ stmts.push(Stmt::Expr(Expr::Verbatim(TokenStream::new()),
semi));
+ }
+ if input.is_empty() {
+ break;
+ }
+ let stmt = parse_stmt(input, AllowNoSemi(true))?;
+ let requires_semicolon = match &stmt {
+ Stmt::Expr(stmt, None) =>
classify::requires_semi_to_be_stmt(stmt),
+ Stmt::Macro(stmt) => {
+ stmt.semi_token.is_none() &&
!stmt.mac.delimiter.is_brace()
+ }
+ Stmt::Local(_) | Stmt::Item(_) | Stmt::Expr(_, Some(_)) =>
false,
+ };
+ stmts.push(stmt);
+ if input.is_empty() {
+ break;
+ } else if requires_semicolon {
+ return Err(input.error("unexpected token, expected `;`"));
+ }
+ }
+ Ok(stmts)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Block {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(Block {
+ brace_token: braced!(content in input),
+ stmts: content.call(Block::parse_within)?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Stmt {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_nosemi = AllowNoSemi(false);
+ parse_stmt(input, allow_nosemi)
+ }
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: AllowNoSemi) ->
Result<Stmt> {
+ let begin = input.fork();
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ // brace-style macros; paren and bracket macros get parsed as
+ // expression statements.
+ let ahead = input.fork();
+ let mut is_item_macro = false;
+ if let Ok(path) = ahead.call(Path::parse_mod_style) {
+ if ahead.peek(Token![!]) {
+ if ahead.peek2(Ident) || ahead.peek2(Token![try]) {
+ is_item_macro = true;
+ } else if ahead.peek2(token::Brace)
+ && !(ahead.peek3(Token![.]) || ahead.peek3(Token![?]))
+ {
+ input.advance_to(&ahead);
+ return stmt_mac(input, attrs, path).map(Stmt::Macro);
+ }
+ }
+ }
+
+ if input.peek(Token![let]) && !input.peek(token::Group) {
+ stmt_local(input, attrs).map(Stmt::Local)
+ } else if input.peek(Token![pub])
+ || input.peek(Token![crate]) && !input.peek2(Token![::])
+ || input.peek(Token![extern])
+ || input.peek(Token![use])
+ || input.peek(Token![static])
+ && (input.peek2(Token![mut])
+ || input.peek2(Ident)
+ && !(input.peek2(Token![async])
+ && (input.peek3(Token![move]) ||
input.peek3(Token![|]))))
+ || input.peek(Token![const])
+ && !(input.peek2(token::Brace)
+ || input.peek2(Token![static])
+ || input.peek2(Token![async])
+ && !(input.peek3(Token![unsafe])
+ || input.peek3(Token![extern])
+ || input.peek3(Token![fn]))
+ || input.peek2(Token![move])
+ || input.peek2(Token![|]))
+ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
+ || input.peek(Token![async])
+ && (input.peek2(Token![unsafe])
+ || input.peek2(Token![extern])
+ || input.peek2(Token![fn]))
+ || input.peek(Token![fn])
+ || input.peek(Token![mod])
+ || input.peek(Token![type])
+ || input.peek(Token![struct])
+ || input.peek(Token![enum])
+ || input.peek(Token![union]) && input.peek2(Ident)
+ || input.peek(Token![auto]) && input.peek2(Token![trait])
+ || input.peek(Token![trait])
+ || input.peek(Token![default])
+ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
+ || input.peek(Token![impl])
+ || input.peek(Token![macro])
+ || is_item_macro
+ {
+ let item = item::parsing::parse_rest_of_item(begin, attrs, input)?;
+ Ok(Stmt::Item(item))
+ } else {
+ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) ->
Result<StmtMacro> {
+ let bang_token: Token![!] = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ let semi_token: Option<Token![;]> = input.parse()?;
+
+ Ok(StmtMacro {
+ attrs,
+ mac: Macro {
+ path,
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ semi_token,
+ })
+ }
+
+ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ let let_token: Token![let] = input.parse()?;
+
+ let mut pat = Pat::parse_single(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ pat = Pat::Type(PatType {
+ attrs: Vec::new(),
+ pat: Box::new(pat),
+ colon_token,
+ ty: Box::new(ty),
+ });
+ }
+
+ let init = if let Some(eq_token) = input.parse()? {
+ let eq_token: Token![=] = eq_token;
+ let expr: Expr = input.parse()?;
+
+ let diverge = if !classify::expr_trailing_brace(&expr) &&
input.peek(Token![else]) {
+ let else_token: Token![else] = input.parse()?;
+ let diverge = ExprBlock {
+ attrs: Vec::new(),
+ label: None,
+ block: input.parse()?,
+ };
+ Some((else_token, Box::new(Expr::Block(diverge))))
+ } else {
+ None
+ };
+
+ Some(LocalInit {
+ eq_token,
+ expr: Box::new(expr),
+ diverge,
+ })
+ } else {
+ None
+ };
+
+ let semi_token: Token![;] = input.parse()?;
+
+ Ok(Local {
+ attrs,
+ let_token,
+ pat,
+ init,
+ semi_token,
+ })
+ }
+
+ fn stmt_expr(
+ input: ParseStream,
+ allow_nosemi: AllowNoSemi,
+ mut attrs: Vec<Attribute>,
+ ) -> Result<Stmt> {
+ let mut e = Expr::parse_with_earlier_boundary_rule(input)?;
+
+ let mut attr_target = &mut e;
+ loop {
+ attr_target = match attr_target {
+ Expr::Assign(e) => &mut e.left,
+ Expr::Binary(e) => &mut e.left,
+ Expr::Cast(e) => &mut e.expr,
+ Expr::Array(_)
+ | Expr::Async(_)
+ | Expr::Await(_)
+ | Expr::Block(_)
+ | Expr::Break(_)
+ | Expr::Call(_)
+ | Expr::Closure(_)
+ | Expr::Const(_)
+ | Expr::Continue(_)
+ | Expr::Field(_)
+ | Expr::ForLoop(_)
+ | Expr::Group(_)
+ | Expr::If(_)
+ | Expr::Index(_)
+ | Expr::Infer(_)
+ | Expr::Let(_)
+ | Expr::Lit(_)
+ | Expr::Loop(_)
+ | Expr::Macro(_)
+ | Expr::Match(_)
+ | Expr::MethodCall(_)
+ | Expr::Paren(_)
+ | Expr::Path(_)
+ | Expr::Range(_)
+ | Expr::Reference(_)
+ | Expr::Repeat(_)
+ | Expr::Return(_)
+ | Expr::Struct(_)
+ | Expr::Try(_)
+ | Expr::TryBlock(_)
+ | Expr::Tuple(_)
+ | Expr::Unary(_)
+ | Expr::Unsafe(_)
+ | Expr::While(_)
+ | Expr::Yield(_)
+ | Expr::Verbatim(_) => break,
+ };
+ }
+ attrs.extend(attr_target.replace_attrs(Vec::new()));
+ attr_target.replace_attrs(attrs);
+
+ let semi_token: Option<Token![;]> = input.parse()?;
+
+ match e {
+ Expr::Macro(ExprMacro { attrs, mac })
+ if semi_token.is_some() || mac.delimiter.is_brace() =>
+ {
+ return Ok(Stmt::Macro(StmtMacro {
+ attrs,
+ mac,
+ semi_token,
+ }));
+ }
+ _ => {}
+ }
+
+ if semi_token.is_some() {
+ Ok(Stmt::Expr(e, semi_token))
+ } else if allow_nosemi.0 || !classify::requires_semi_to_be_stmt(&e) {
+ Ok(Stmt::Expr(e, None))
+ } else {
+ Err(input.error("expected semicolon"))
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+pub(crate) mod printing {
+ use crate::classify;
+ use crate::expr::{self, Expr};
+ use crate::fixup::FixupContext;
+ use crate::stmt::{Block, Local, Stmt, StmtMacro};
+ use crate::token;
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Block {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(&self.stmts);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Stmt {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ Stmt::Local(local) => local.to_tokens(tokens),
+ Stmt::Item(item) => item.to_tokens(tokens),
+ Stmt::Expr(expr, semi) => {
+ expr::printing::print_expr(expr, tokens,
FixupContext::new_stmt());
+ semi.to_tokens(tokens);
+ }
+ Stmt::Macro(mac) => mac.to_tokens(tokens),
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Local {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
+ self.let_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ if let Some(init) = &self.init {
+ init.eq_token.to_tokens(tokens);
+ if init.diverge.is_some() &&
classify::expr_trailing_brace(&init.expr) {
+ token::Paren::default().surround(tokens, |tokens|
init.expr.to_tokens(tokens));
+ } else {
+ init.expr.to_tokens(tokens);
+ }
+ if let Some((else_token, diverge)) = &init.diverge {
+ else_token.to_tokens(tokens);
+ match &**diverge {
+ Expr::Block(diverge) => diverge.to_tokens(tokens),
+ _ => token::Brace::default().surround(tokens, |tokens|
{
+ expr::printing::print_expr(diverge, tokens,
FixupContext::new_stmt());
+ }),
+ }
+ }
+ }
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for StmtMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
+ self.mac.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/thread.rs
b/rust/hw/char/pl011/vendor/syn/src/thread.rs
new file mode 100644
index 0000000000..b33d248afc
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/thread.rs
@@ -0,0 +1,60 @@
+use std::fmt::{self, Debug};
+use std::thread::{self, ThreadId};
+
+/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value
+/// of type T only from the original thread on which the ThreadBound was
+/// constructed.
+pub(crate) struct ThreadBound<T> {
+ value: T,
+ thread_id: ThreadId,
+}
+
+unsafe impl<T> Sync for ThreadBound<T> {}
+
+// Send bound requires Copy, as otherwise Drop could run in the wrong place.
+//
+// Today Copy and Drop are mutually exclusive so `T: Copy` implies `T: !Drop`.
+// This impl needs to be revisited if that restriction is relaxed in the
future.
+unsafe impl<T: Copy> Send for ThreadBound<T> {}
+
+impl<T> ThreadBound<T> {
+ pub(crate) fn new(value: T) -> Self {
+ ThreadBound {
+ value,
+ thread_id: thread::current().id(),
+ }
+ }
+
+ pub(crate) fn get(&self) -> Option<&T> {
+ if thread::current().id() == self.thread_id {
+ Some(&self.value)
+ } else {
+ None
+ }
+ }
+}
+
+impl<T: Debug> Debug for ThreadBound<T> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self.get() {
+ Some(value) => Debug::fmt(value, formatter),
+ None => formatter.write_str("unknown"),
+ }
+ }
+}
+
+// Copy the bytes of T, even if the currently running thread is the "wrong"
+// thread. This is fine as long as the original thread is not simultaneously
+// mutating this value via interior mutability, which would be a data race.
+//
+// Currently `T: Copy` is sufficient to guarantee that T contains no interior
+// mutability, because _all_ interior mutability in Rust is built on
+// std::cell::UnsafeCell, which has no Copy impl. This impl needs to be
+// revisited if that restriction is relaxed in the future.
+impl<T: Copy> Copy for ThreadBound<T> {}
+
+impl<T: Copy> Clone for ThreadBound<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/token.rs
b/rust/hw/char/pl011/vendor/syn/src/token.rs
new file mode 100644
index 0000000000..e04f105747
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/token.rs
@@ -0,0 +1,1138 @@
+//! Tokens representing Rust punctuation, keywords, and delimiters.
+//!
+//! The type names in this module can be difficult to keep straight, so we
+//! prefer to use the [`Token!`] macro instead. This is a type-macro that
+//! expands to the token type of the given token.
+//!
+//! [`Token!`]: crate::Token
+//!
+//! # Example
+//!
+//! The [`ItemStatic`] syntax tree node is defined like this.
+//!
+//! [`ItemStatic`]: crate::ItemStatic
+//!
+//! ```
+//! # use syn::{Attribute, Expr, Ident, Token, Type, Visibility};
+//! #
+//! pub struct ItemStatic {
+//! pub attrs: Vec<Attribute>,
+//! pub vis: Visibility,
+//! pub static_token: Token![static],
+//! pub mutability: Option<Token![mut]>,
+//! pub ident: Ident,
+//! pub colon_token: Token![:],
+//! pub ty: Box<Type>,
+//! pub eq_token: Token![=],
+//! pub expr: Box<Expr>,
+//! pub semi_token: Token![;],
+//! }
+//! ```
+//!
+//! # Parsing
+//!
+//! Keywords and punctuation can be parsed through the [`ParseStream::parse`]
+//! method. Delimiter tokens are parsed using the [`parenthesized!`],
+//! [`bracketed!`] and [`braced!`] macros.
+//!
+//! [`ParseStream::parse`]: crate::parse::ParseBuffer::parse()
+//! [`parenthesized!`]: crate::parenthesized!
+//! [`bracketed!`]: crate::bracketed!
+//! [`braced!`]: crate::braced!
+//!
+//! ```
+//! use syn::{Attribute, Result};
+//! use syn::parse::{Parse, ParseStream};
+//! #
+//! # enum ItemStatic {}
+//!
+//! // Parse the ItemStatic struct shown above.
+//! impl Parse for ItemStatic {
+//! fn parse(input: ParseStream) -> Result<Self> {
+//! # use syn::ItemStatic;
+//! # fn parse(input: ParseStream) -> Result<ItemStatic> {
+//! Ok(ItemStatic {
+//! attrs: input.call(Attribute::parse_outer)?,
+//! vis: input.parse()?,
+//! static_token: input.parse()?,
+//! mutability: input.parse()?,
+//! ident: input.parse()?,
+//! colon_token: input.parse()?,
+//! ty: input.parse()?,
+//! eq_token: input.parse()?,
+//! expr: input.parse()?,
+//! semi_token: input.parse()?,
+//! })
+//! # }
+//! # unimplemented!()
+//! }
+//! }
+//! ```
+//!
+//! # Other operations
+//!
+//! Every keyword and punctuation token supports the following operations.
+//!
+//! - [Peeking] — `input.peek(Token![...])`
+//!
+//! - [Parsing] — `input.parse::<Token![...]>()?`
+//!
+//! - [Printing] — `quote!( ... #the_token ... )`
+//!
+//! - Construction from a [`Span`] — `let the_token = Token![...](sp)`
+//!
+//! - Field access to its span — `let sp = the_token.span`
+//!
+//! [Peeking]: crate::parse::ParseBuffer::peek()
+//! [Parsing]: crate::parse::ParseBuffer::parse()
+//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+#[cfg(feature = "parsing")]
+pub(crate) use self::private::CustomToken;
+use self::private::WithSpan;
+#[cfg(feature = "parsing")]
+use crate::buffer::Cursor;
+#[cfg(feature = "parsing")]
+use crate::error::Result;
+#[cfg(feature = "parsing")]
+use crate::lifetime::Lifetime;
+#[cfg(feature = "parsing")]
+use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt,
LitStr};
+#[cfg(feature = "parsing")]
+use crate::lookahead;
+#[cfg(feature = "parsing")]
+use crate::parse::{Parse, ParseStream};
+use crate::span::IntoSpans;
+use proc_macro2::extra::DelimSpan;
+use proc_macro2::Span;
+#[cfg(feature = "printing")]
+use proc_macro2::TokenStream;
+#[cfg(any(feature = "parsing", feature = "printing"))]
+use proc_macro2::{Delimiter, Ident};
+#[cfg(feature = "parsing")]
+use proc_macro2::{Literal, Punct, TokenTree};
+#[cfg(feature = "printing")]
+use quote::{ToTokens, TokenStreamExt};
+#[cfg(feature = "extra-traits")]
+use std::cmp;
+#[cfg(feature = "extra-traits")]
+use std::fmt::{self, Debug};
+#[cfg(feature = "extra-traits")]
+use std::hash::{Hash, Hasher};
+use std::ops::{Deref, DerefMut};
+
+/// Marker trait for types that represent single tokens.
+///
+/// This trait is sealed and cannot be implemented for types outside of Syn.
+#[cfg(feature = "parsing")]
+pub trait Token: private::Sealed {
+ // Not public API.
+ #[doc(hidden)]
+ fn peek(cursor: Cursor) -> bool;
+
+ // Not public API.
+ #[doc(hidden)]
+ fn display() -> &'static str;
+}
+
+pub(crate) mod private {
+ #[cfg(feature = "parsing")]
+ use crate::buffer::Cursor;
+ use proc_macro2::Span;
+
+ #[cfg(feature = "parsing")]
+ pub trait Sealed {}
+
+ /// Support writing `token.span` rather than `token.spans[0]` on tokens
that
+ /// hold a single span.
+ #[repr(transparent)]
+ #[allow(unknown_lints, repr_transparent_external_private_fields)] // False
positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
+ pub struct WithSpan {
+ pub span: Span,
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+ #[cfg(feature = "parsing")]
+ pub trait CustomToken {
+ fn peek(cursor: Cursor) -> bool;
+ fn display() -> &'static str;
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl private::Sealed for Ident {}
+
+#[cfg(feature = "parsing")]
+fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
+ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+}
+
+macro_rules! impl_token {
+ ($display:literal $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+ fn peek(input: ParseStream) -> bool {
+ <$name as Parse>::parse(input).is_ok()
+ }
+ peek_impl(cursor, peek)
+ }
+
+ fn display() -> &'static str {
+ $display
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $name {}
+ };
+}
+
+impl_token!("lifetime" Lifetime);
+impl_token!("literal" Lit);
+impl_token!("string literal" LitStr);
+impl_token!("byte string literal" LitByteStr);
+impl_token!("byte literal" LitByte);
+impl_token!("character literal" LitChar);
+impl_token!("integer literal" LitInt);
+impl_token!("floating point literal" LitFloat);
+impl_token!("boolean literal" LitBool);
+impl_token!("group token" proc_macro2::Group);
+
+macro_rules! impl_low_level_token {
+ ($display:literal $ty:ident $get:ident) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $ty {
+ fn peek(cursor: Cursor) -> bool {
+ cursor.$get().is_some()
+ }
+
+ fn display() -> &'static str {
+ $display
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $ty {}
+ };
+}
+
+impl_low_level_token!("punctuation token" Punct punct);
+impl_low_level_token!("literal" Literal literal);
+impl_low_level_token!("token" TokenTree token_tree);
+
+#[cfg(feature = "parsing")]
+impl<T: CustomToken> private::Sealed for T {}
+
+#[cfg(feature = "parsing")]
+impl<T: CustomToken> Token for T {
+ fn peek(cursor: Cursor) -> bool {
+ <Self as CustomToken>::peek(cursor)
+ }
+
+ fn display() -> &'static str {
+ <Self as CustomToken>::display()
+ }
+}
+
+macro_rules! define_keywords {
+ ($($token:literal pub struct $name:ident)*) => {
+ $(
+ #[doc = concat!('`', $token, '`')]
+ ///
+ /// Don't try to remember the name of this type — use the
+ /// [`Token!`] macro instead.
+ ///
+ /// [`Token!`]: crate::token
+ pub struct $name {
+ pub span: Span,
+ }
+
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $name<S: IntoSpans<Span>>(span: S) -> $name {
+ $name {
+ span: span.into_spans(),
+ }
+ }
+
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name {
+ span: Span::call_site(),
+ }
+ }
+ }
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Copy for $name {}
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Clone for $name {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl cmp::Eq for $name {}
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl PartialEq for $name {
+ fn eq(&self, _other: &$name) -> bool {
+ true
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Hash for $name {
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
+ }
+
+ #[cfg(feature = "printing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for $name {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ printing::keyword($token, self.span, tokens);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for $name {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok($name {
+ span: parsing::keyword(input, $token)?,
+ })
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+ parsing::peek_keyword(cursor, $token)
+ }
+
+ fn display() -> &'static str {
+ concat!("`", $token, "`")
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $name {}
+ )*
+ };
+}
+
+macro_rules! impl_deref_if_len_is_1 {
+ ($name:ident/1) => {
+ impl Deref for $name {
+ type Target = WithSpan;
+
+ fn deref(&self) -> &Self::Target {
+ unsafe { &*(self as *const Self).cast::<WithSpan>() }
+ }
+ }
+
+ impl DerefMut for $name {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ unsafe { &mut *(self as *mut Self).cast::<WithSpan>() }
+ }
+ }
+ };
+
+ ($name:ident/$len:literal) => {};
+}
+
+macro_rules! define_punctuation_structs {
+ ($($token:literal pub struct $name:ident/$len:tt #[doc =
$usage:literal])*) => {
+ $(
+ #[cfg_attr(not(doc), repr(transparent))]
+ #[allow(unknown_lints, repr_transparent_external_private_fields)]
// False positive:
https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
+ #[doc = concat!('`', $token, '`')]
+ ///
+ /// Usage:
+ #[doc = concat!($usage, '.')]
+ ///
+ /// Don't try to remember the name of this type — use the
+ /// [`Token!`] macro instead.
+ ///
+ /// [`Token!`]: crate::token
+ pub struct $name {
+ pub spans: [Span; $len],
+ }
+
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $name<S: IntoSpans<[Span; $len]>>(spans: S) -> $name {
+ $name {
+ spans: spans.into_spans(),
+ }
+ }
+
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name {
+ spans: [Span::call_site(); $len],
+ }
+ }
+ }
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Copy for $name {}
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Clone for $name {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl cmp::Eq for $name {}
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl PartialEq for $name {
+ fn eq(&self, _other: &$name) -> bool {
+ true
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Hash for $name {
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
+ }
+
+ impl_deref_if_len_is_1!($name/$len);
+ )*
+ };
+}
+
+macro_rules! define_punctuation {
+ ($($token:literal pub struct $name:ident/$len:tt #[doc =
$usage:literal])*) => {
+ $(
+ define_punctuation_structs! {
+ $token pub struct $name/$len #[doc = $usage]
+ }
+
+ #[cfg(feature = "printing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for $name {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ printing::punct($token, &self.spans, tokens);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for $name {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok($name {
+ spans: parsing::punct(input, $token)?,
+ })
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+ parsing::peek_punct(cursor, $token)
+ }
+
+ fn display() -> &'static str {
+ concat!("`", $token, "`")
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $name {}
+ )*
+ };
+}
+
+macro_rules! define_delimiters {
+ ($($delim:ident pub struct $name:ident #[$doc:meta])*) => {
+ $(
+ #[$doc]
+ pub struct $name {
+ pub span: DelimSpan,
+ }
+
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $name<S: IntoSpans<DelimSpan>>(span: S) -> $name {
+ $name {
+ span: span.into_spans(),
+ }
+ }
+
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name(Span::call_site())
+ }
+ }
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Copy for $name {}
+
+ #[cfg(feature = "clone-impls")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+ impl Clone for $name {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl cmp::Eq for $name {}
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl PartialEq for $name {
+ fn eq(&self, _other: &$name) -> bool {
+ true
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+ impl Hash for $name {
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
+ }
+
+ impl $name {
+ #[cfg(feature = "printing")]
+ pub fn surround<F>(&self, tokens: &mut TokenStream, f: F)
+ where
+ F: FnOnce(&mut TokenStream),
+ {
+ let mut inner = TokenStream::new();
+ f(&mut inner);
+ printing::delim(Delimiter::$delim, self.span.join(),
tokens, inner);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $name {}
+ )*
+ };
+}
+
+define_punctuation_structs! {
+ "_" pub struct Underscore/1 /// wildcard patterns, inferred types, unnamed
items in constants, extern crates, use declarations, and destructuring
assignment
+}
+
+#[cfg(feature = "printing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+impl ToTokens for Underscore {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append(Ident::new("_", self.span));
+ }
+}
+
+#[cfg(feature = "parsing")]
+#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+impl Parse for Underscore {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ if let Some((ident, rest)) = cursor.ident() {
+ if ident == "_" {
+ return Ok((Underscore(ident.span()), rest));
+ }
+ }
+ if let Some((punct, rest)) = cursor.punct() {
+ if punct.as_char() == '_' {
+ return Ok((Underscore(punct.span()), rest));
+ }
+ }
+ Err(cursor.error("expected `_`"))
+ })
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl Token for Underscore {
+ fn peek(cursor: Cursor) -> bool {
+ if let Some((ident, _rest)) = cursor.ident() {
+ return ident == "_";
+ }
+ if let Some((punct, _rest)) = cursor.punct() {
+ return punct.as_char() == '_';
+ }
+ false
+ }
+
+ fn display() -> &'static str {
+ "`_`"
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl private::Sealed for Underscore {}
+
+/// None-delimited group
+pub struct Group {
+ pub span: Span,
+}
+
+#[doc(hidden)]
+#[allow(non_snake_case)]
+pub fn Group<S: IntoSpans<Span>>(span: S) -> Group {
+ Group {
+ span: span.into_spans(),
+ }
+}
+
+impl std::default::Default for Group {
+ fn default() -> Self {
+ Group {
+ span: Span::call_site(),
+ }
+ }
+}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Copy for Group {}
+
+#[cfg(feature = "clone-impls")]
+#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))]
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("Group")
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl cmp::Eq for Group {}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl PartialEq for Group {
+ fn eq(&self, _other: &Group) -> bool {
+ true
+ }
+}
+
+#[cfg(feature = "extra-traits")]
+#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))]
+impl Hash for Group {
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
+}
+
+impl Group {
+ #[cfg(feature = "printing")]
+ pub fn surround<F>(&self, tokens: &mut TokenStream, f: F)
+ where
+ F: FnOnce(&mut TokenStream),
+ {
+ let mut inner = TokenStream::new();
+ f(&mut inner);
+ printing::delim(Delimiter::None, self.span, tokens, inner);
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl private::Sealed for Group {}
+
+#[cfg(feature = "parsing")]
+impl Token for Paren {
+ fn peek(cursor: Cursor) -> bool {
+ lookahead::is_delimiter(cursor, Delimiter::Parenthesis)
+ }
+
+ fn display() -> &'static str {
+ "parentheses"
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl Token for Brace {
+ fn peek(cursor: Cursor) -> bool {
+ lookahead::is_delimiter(cursor, Delimiter::Brace)
+ }
+
+ fn display() -> &'static str {
+ "curly braces"
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl Token for Bracket {
+ fn peek(cursor: Cursor) -> bool {
+ lookahead::is_delimiter(cursor, Delimiter::Bracket)
+ }
+
+ fn display() -> &'static str {
+ "square brackets"
+ }
+}
+
+#[cfg(feature = "parsing")]
+impl Token for Group {
+ fn peek(cursor: Cursor) -> bool {
+ lookahead::is_delimiter(cursor, Delimiter::None)
+ }
+
+ fn display() -> &'static str {
+ "invisible group"
+ }
+}
+
+define_keywords! {
+ "abstract" pub struct Abstract
+ "as" pub struct As
+ "async" pub struct Async
+ "auto" pub struct Auto
+ "await" pub struct Await
+ "become" pub struct Become
+ "box" pub struct Box
+ "break" pub struct Break
+ "const" pub struct Const
+ "continue" pub struct Continue
+ "crate" pub struct Crate
+ "default" pub struct Default
+ "do" pub struct Do
+ "dyn" pub struct Dyn
+ "else" pub struct Else
+ "enum" pub struct Enum
+ "extern" pub struct Extern
+ "final" pub struct Final
+ "fn" pub struct Fn
+ "for" pub struct For
+ "if" pub struct If
+ "impl" pub struct Impl
+ "in" pub struct In
+ "let" pub struct Let
+ "loop" pub struct Loop
+ "macro" pub struct Macro
+ "match" pub struct Match
+ "mod" pub struct Mod
+ "move" pub struct Move
+ "mut" pub struct Mut
+ "override" pub struct Override
+ "priv" pub struct Priv
+ "pub" pub struct Pub
+ "ref" pub struct Ref
+ "return" pub struct Return
+ "Self" pub struct SelfType
+ "self" pub struct SelfValue
+ "static" pub struct Static
+ "struct" pub struct Struct
+ "super" pub struct Super
+ "trait" pub struct Trait
+ "try" pub struct Try
+ "type" pub struct Type
+ "typeof" pub struct Typeof
+ "union" pub struct Union
+ "unsafe" pub struct Unsafe
+ "unsized" pub struct Unsized
+ "use" pub struct Use
+ "virtual" pub struct Virtual
+ "where" pub struct Where
+ "while" pub struct While
+ "yield" pub struct Yield
+}
+
+define_punctuation! {
+ "&" pub struct And/1 /// bitwise and logical AND, borrow,
references, reference patterns
+ "&&" pub struct AndAnd/2 /// lazy AND, borrow, references,
reference patterns
+ "&=" pub struct AndEq/2 /// bitwise AND assignment
+ "@" pub struct At/1 /// subpattern binding
+ "^" pub struct Caret/1 /// bitwise and logical XOR
+ "^=" pub struct CaretEq/2 /// bitwise XOR assignment
+ ":" pub struct Colon/1 /// various separators
+ "," pub struct Comma/1 /// various separators
+ "$" pub struct Dollar/1 /// macros
+ "." pub struct Dot/1 /// field access, tuple index
+ ".." pub struct DotDot/2 /// range, struct expressions,
patterns, range patterns
+ "..." pub struct DotDotDot/3 /// variadic functions, range
patterns
+ "..=" pub struct DotDotEq/3 /// inclusive range, range patterns
+ "=" pub struct Eq/1 /// assignment, attributes, various
type definitions
+ "==" pub struct EqEq/2 /// equal
+ "=>" pub struct FatArrow/2 /// match arms, macros
+ ">=" pub struct Ge/2 /// greater than or equal to,
generics
+ ">" pub struct Gt/1 /// greater than, generics, paths
+ "<-" pub struct LArrow/2 /// unused
+ "<=" pub struct Le/2 /// less than or equal to
+ "<" pub struct Lt/1 /// less than, generics, paths
+ "-" pub struct Minus/1 /// subtraction, negation
+ "-=" pub struct MinusEq/2 /// subtraction assignment
+ "!=" pub struct Ne/2 /// not equal
+ "!" pub struct Not/1 /// bitwise and logical NOT, macro
calls, inner attributes, never type, negative impls
+ "|" pub struct Or/1 /// bitwise and logical OR,
closures, patterns in match, if let, and while let
+ "|=" pub struct OrEq/2 /// bitwise OR assignment
+ "||" pub struct OrOr/2 /// lazy OR, closures
+ "::" pub struct PathSep/2 /// path separator
+ "%" pub struct Percent/1 /// remainder
+ "%=" pub struct PercentEq/2 /// remainder assignment
+ "+" pub struct Plus/1 /// addition, trait bounds, macro
Kleene matcher
+ "+=" pub struct PlusEq/2 /// addition assignment
+ "#" pub struct Pound/1 /// attributes
+ "?" pub struct Question/1 /// question mark operator,
questionably sized, macro Kleene matcher
+ "->" pub struct RArrow/2 /// function return type, closure
return type, function pointer type
+ ";" pub struct Semi/1 /// terminator for various items and
statements, array types
+ "<<" pub struct Shl/2 /// shift left, nested generics
+ "<<=" pub struct ShlEq/3 /// shift left assignment
+ ">>" pub struct Shr/2 /// shift right, nested generics
+ ">>=" pub struct ShrEq/3 /// shift right assignment, nested
generics
+ "/" pub struct Slash/1 /// division
+ "/=" pub struct SlashEq/2 /// division assignment
+ "*" pub struct Star/1 /// multiplication, dereference, raw
pointers, macro Kleene matcher, use wildcards
+ "*=" pub struct StarEq/2 /// multiplication assignment
+ "~" pub struct Tilde/1 /// unused since before Rust 1.0
+}
+
+define_delimiters! {
+ Brace pub struct Brace /// `{`…`}`
+ Bracket pub struct Bracket /// `[`…`]`
+ Parenthesis pub struct Paren /// `(`…`)`
+}
+
+/// A type-macro that expands to the name of the Rust type representation of a
+/// given token.
+///
+/// As a type, `Token!` is commonly used in the type of struct fields, the type
+/// of a `let` statement, or in turbofish for a `parse` function.
+///
+/// ```
+/// use syn::{Ident, Token};
+/// use syn::parse::{Parse, ParseStream, Result};
+///
+/// // `struct Foo;`
+/// pub struct UnitStruct {
+/// struct_token: Token![struct],
+/// ident: Ident,
+/// semi_token: Token![;],
+/// }
+///
+/// impl Parse for UnitStruct {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let struct_token: Token![struct] = input.parse()?;
+/// let ident: Ident = input.parse()?;
+/// let semi_token = input.parse::<Token![;]>()?;
+/// Ok(UnitStruct { struct_token, ident, semi_token })
+/// }
+/// }
+/// ```
+///
+/// As an expression, `Token!` is used for peeking tokens or instantiating
+/// tokens from a span.
+///
+/// ```
+/// # use syn::{Ident, Token};
+/// # use syn::parse::{Parse, ParseStream, Result};
+/// #
+/// # struct UnitStruct {
+/// # struct_token: Token![struct],
+/// # ident: Ident,
+/// # semi_token: Token![;],
+/// # }
+/// #
+/// # impl Parse for UnitStruct {
+/// # fn parse(input: ParseStream) -> Result<Self> {
+/// # unimplemented!()
+/// # }
+/// # }
+/// #
+/// fn make_unit_struct(name: Ident) -> UnitStruct {
+/// let span = name.span();
+/// UnitStruct {
+/// struct_token: Token![struct](span),
+/// ident: name,
+/// semi_token: Token![;](span),
+/// }
+/// }
+///
+/// # fn parse(input: ParseStream) -> Result<()> {
+/// if input.peek(Token![struct]) {
+/// let unit_struct: UnitStruct = input.parse()?;
+/// /* ... */
+/// }
+/// # Ok(())
+/// # }
+/// ```
+///
+/// See the [token module] documentation for details and examples.
+///
+/// [token module]: crate::token
+#[macro_export]
+macro_rules! Token {
+ [abstract] => { $crate::token::Abstract };
+ [as] => { $crate::token::As };
+ [async] => { $crate::token::Async };
+ [auto] => { $crate::token::Auto };
+ [await] => { $crate::token::Await };
+ [become] => { $crate::token::Become };
+ [box] => { $crate::token::Box };
+ [break] => { $crate::token::Break };
+ [const] => { $crate::token::Const };
+ [continue] => { $crate::token::Continue };
+ [crate] => { $crate::token::Crate };
+ [default] => { $crate::token::Default };
+ [do] => { $crate::token::Do };
+ [dyn] => { $crate::token::Dyn };
+ [else] => { $crate::token::Else };
+ [enum] => { $crate::token::Enum };
+ [extern] => { $crate::token::Extern };
+ [final] => { $crate::token::Final };
+ [fn] => { $crate::token::Fn };
+ [for] => { $crate::token::For };
+ [if] => { $crate::token::If };
+ [impl] => { $crate::token::Impl };
+ [in] => { $crate::token::In };
+ [let] => { $crate::token::Let };
+ [loop] => { $crate::token::Loop };
+ [macro] => { $crate::token::Macro };
+ [match] => { $crate::token::Match };
+ [mod] => { $crate::token::Mod };
+ [move] => { $crate::token::Move };
+ [mut] => { $crate::token::Mut };
+ [override] => { $crate::token::Override };
+ [priv] => { $crate::token::Priv };
+ [pub] => { $crate::token::Pub };
+ [ref] => { $crate::token::Ref };
+ [return] => { $crate::token::Return };
+ [Self] => { $crate::token::SelfType };
+ [self] => { $crate::token::SelfValue };
+ [static] => { $crate::token::Static };
+ [struct] => { $crate::token::Struct };
+ [super] => { $crate::token::Super };
+ [trait] => { $crate::token::Trait };
+ [try] => { $crate::token::Try };
+ [type] => { $crate::token::Type };
+ [typeof] => { $crate::token::Typeof };
+ [union] => { $crate::token::Union };
+ [unsafe] => { $crate::token::Unsafe };
+ [unsized] => { $crate::token::Unsized };
+ [use] => { $crate::token::Use };
+ [virtual] => { $crate::token::Virtual };
+ [where] => { $crate::token::Where };
+ [while] => { $crate::token::While };
+ [yield] => { $crate::token::Yield };
+ [&] => { $crate::token::And };
+ [&&] => { $crate::token::AndAnd };
+ [&=] => { $crate::token::AndEq };
+ [@] => { $crate::token::At };
+ [^] => { $crate::token::Caret };
+ [^=] => { $crate::token::CaretEq };
+ [:] => { $crate::token::Colon };
+ [,] => { $crate::token::Comma };
+ [$] => { $crate::token::Dollar };
+ [.] => { $crate::token::Dot };
+ [..] => { $crate::token::DotDot };
+ [...] => { $crate::token::DotDotDot };
+ [..=] => { $crate::token::DotDotEq };
+ [=] => { $crate::token::Eq };
+ [==] => { $crate::token::EqEq };
+ [=>] => { $crate::token::FatArrow };
+ [>=] => { $crate::token::Ge };
+ [>] => { $crate::token::Gt };
+ [<-] => { $crate::token::LArrow };
+ [<=] => { $crate::token::Le };
+ [<] => { $crate::token::Lt };
+ [-] => { $crate::token::Minus };
+ [-=] => { $crate::token::MinusEq };
+ [!=] => { $crate::token::Ne };
+ [!] => { $crate::token::Not };
+ [|] => { $crate::token::Or };
+ [|=] => { $crate::token::OrEq };
+ [||] => { $crate::token::OrOr };
+ [::] => { $crate::token::PathSep };
+ [%] => { $crate::token::Percent };
+ [%=] => { $crate::token::PercentEq };
+ [+] => { $crate::token::Plus };
+ [+=] => { $crate::token::PlusEq };
+ [#] => { $crate::token::Pound };
+ [?] => { $crate::token::Question };
+ [->] => { $crate::token::RArrow };
+ [;] => { $crate::token::Semi };
+ [<<] => { $crate::token::Shl };
+ [<<=] => { $crate::token::ShlEq };
+ [>>] => { $crate::token::Shr };
+ [>>=] => { $crate::token::ShrEq };
+ [/] => { $crate::token::Slash };
+ [/=] => { $crate::token::SlashEq };
+ [*] => { $crate::token::Star };
+ [*=] => { $crate::token::StarEq };
+ [~] => { $crate::token::Tilde };
+ [_] => { $crate::token::Underscore };
+}
+
+// Not public API.
+#[doc(hidden)]
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::buffer::Cursor;
+ use crate::error::{Error, Result};
+ use crate::parse::ParseStream;
+ use proc_macro2::{Spacing, Span};
+
+ pub(crate) fn keyword(input: ParseStream, token: &str) -> Result<Span> {
+ input.step(|cursor| {
+ if let Some((ident, rest)) = cursor.ident() {
+ if ident == token {
+ return Ok((ident.span(), rest));
+ }
+ }
+ Err(cursor.error(format!("expected `{}`", token)))
+ })
+ }
+
+ pub(crate) fn peek_keyword(cursor: Cursor, token: &str) -> bool {
+ if let Some((ident, _rest)) = cursor.ident() {
+ ident == token
+ } else {
+ false
+ }
+ }
+
+ #[doc(hidden)]
+ pub fn punct<const N: usize>(input: ParseStream, token: &str) ->
Result<[Span; N]> {
+ let mut spans = [input.span(); N];
+ punct_helper(input, token, &mut spans)?;
+ Ok(spans)
+ }
+
+ fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span]) ->
Result<()> {
+ input.step(|cursor| {
+ let mut cursor = *cursor;
+ assert_eq!(token.len(), spans.len());
+
+ for (i, ch) in token.chars().enumerate() {
+ match cursor.punct() {
+ Some((punct, rest)) => {
+ spans[i] = punct.span();
+ if punct.as_char() != ch {
+ break;
+ } else if i == token.len() - 1 {
+ return Ok(((), rest));
+ } else if punct.spacing() != Spacing::Joint {
+ break;
+ }
+ cursor = rest;
+ }
+ None => break,
+ }
+ }
+
+ Err(Error::new(spans[0], format!("expected `{}`", token)))
+ })
+ }
+
+ #[doc(hidden)]
+ pub fn peek_punct(mut cursor: Cursor, token: &str) -> bool {
+ for (i, ch) in token.chars().enumerate() {
+ match cursor.punct() {
+ Some((punct, rest)) => {
+ if punct.as_char() != ch {
+ break;
+ } else if i == token.len() - 1 {
+ return true;
+ } else if punct.spacing() != Spacing::Joint {
+ break;
+ }
+ cursor = rest;
+ }
+ None => break,
+ }
+ }
+ false
+ }
+}
+
+// Not public API.
+#[doc(hidden)]
+#[cfg(feature = "printing")]
+pub(crate) mod printing {
+ use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span,
TokenStream};
+ use quote::TokenStreamExt;
+
+ #[doc(hidden)]
+ pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
+ assert_eq!(s.len(), spans.len());
+
+ let mut chars = s.chars();
+ let mut spans = spans.iter();
+ let ch = chars.next_back().unwrap();
+ let span = spans.next_back().unwrap();
+ for (ch, span) in chars.zip(spans) {
+ let mut op = Punct::new(ch, Spacing::Joint);
+ op.set_span(*span);
+ tokens.append(op);
+ }
+
+ let mut op = Punct::new(ch, Spacing::Alone);
+ op.set_span(*span);
+ tokens.append(op);
+ }
+
+ pub(crate) fn keyword(s: &str, span: Span, tokens: &mut TokenStream) {
+ tokens.append(Ident::new(s, span));
+ }
+
+ pub(crate) fn delim(
+ delim: Delimiter,
+ span: Span,
+ tokens: &mut TokenStream,
+ inner: TokenStream,
+ ) {
+ let mut g = Group::new(delim, inner);
+ g.set_span(span);
+ tokens.append(g);
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/tt.rs
b/rust/hw/char/pl011/vendor/syn/src/tt.rs
new file mode 100644
index 0000000000..7d5d6a1ac3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/tt.rs
@@ -0,0 +1,107 @@
+use proc_macro2::{Delimiter, TokenStream, TokenTree};
+use std::hash::{Hash, Hasher};
+
+pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree);
+
+impl<'a> PartialEq for TokenTreeHelper<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ use proc_macro2::Spacing;
+
+ match (self.0, other.0) {
+ (TokenTree::Group(g1), TokenTree::Group(g2)) => {
+ match (g1.delimiter(), g2.delimiter()) {
+ (Delimiter::Parenthesis, Delimiter::Parenthesis)
+ | (Delimiter::Brace, Delimiter::Brace)
+ | (Delimiter::Bracket, Delimiter::Bracket)
+ | (Delimiter::None, Delimiter::None) => {}
+ _ => return false,
+ }
+
+ let s1 = g1.stream().into_iter();
+ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+ Some(item) => item,
+ None => return false,
+ };
+ if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
+ return false;
+ }
+ }
+ s2.next().is_none()
+ }
+ (TokenTree::Punct(o1), TokenTree::Punct(o2)) => {
+ o1.as_char() == o2.as_char()
+ && match (o1.spacing(), o2.spacing()) {
+ (Spacing::Alone, Spacing::Alone) | (Spacing::Joint,
Spacing::Joint) => true,
+ _ => false,
+ }
+ }
+ (TokenTree::Literal(l1), TokenTree::Literal(l2)) => l1.to_string()
== l2.to_string(),
+ (TokenTree::Ident(s1), TokenTree::Ident(s2)) => s1 == s2,
+ _ => false,
+ }
+ }
+}
+
+impl<'a> Hash for TokenTreeHelper<'a> {
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ use proc_macro2::Spacing;
+
+ match self.0 {
+ TokenTree::Group(g) => {
+ 0u8.hash(h);
+ match g.delimiter() {
+ Delimiter::Parenthesis => 0u8.hash(h),
+ Delimiter::Brace => 1u8.hash(h),
+ Delimiter::Bracket => 2u8.hash(h),
+ Delimiter::None => 3u8.hash(h),
+ }
+
+ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xFFu8.hash(h); // terminator w/ a variant we don't normally
hash
+ }
+ TokenTree::Punct(op) => {
+ 1u8.hash(h);
+ op.as_char().hash(h);
+ match op.spacing() {
+ Spacing::Alone => 0u8.hash(h),
+ Spacing::Joint => 1u8.hash(h),
+ }
+ }
+ TokenTree::Literal(lit) => (2u8, lit.to_string()).hash(h),
+ TokenTree::Ident(word) => (3u8, word).hash(h),
+ }
+ }
+}
+
+pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream);
+
+impl<'a> PartialEq for TokenStreamHelper<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ let left = self.0.clone().into_iter().collect::<Vec<_>>();
+ let right = other.0.clone().into_iter().collect::<Vec<_>>();
+ if left.len() != right.len() {
+ return false;
+ }
+ for (a, b) in left.into_iter().zip(right) {
+ if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
+ return false;
+ }
+ }
+ true
+ }
+}
+
+impl<'a> Hash for TokenStreamHelper<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ let tts = self.0.clone().into_iter().collect::<Vec<_>>();
+ tts.len().hash(state);
+ for tt in tts {
+ TokenTreeHelper(&tt).hash(state);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/ty.rs
b/rust/hw/char/pl011/vendor/syn/src/ty.rs
new file mode 100644
index 0000000000..fa9870e5a2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/ty.rs
@@ -0,0 +1,1216 @@
+use crate::attr::Attribute;
+use crate::expr::Expr;
+use crate::generics::{BoundLifetimes, TypeParamBound};
+use crate::ident::Ident;
+use crate::lifetime::Lifetime;
+use crate::lit::LitStr;
+use crate::mac::Macro;
+use crate::path::{Path, QSelf};
+use crate::punctuated::Punctuated;
+use crate::token;
+use proc_macro2::TokenStream;
+
+ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ #[non_exhaustive]
+ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+ /// A bare function type: `fn(usize) -> bool`.
+ BareFn(TypeBareFn),
+
+ /// A type contained within invisible delimiters.
+ Group(TypeGroup),
+
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ImplTrait(TypeImplTrait),
+
+ /// Indication that a type should be inferred by the compiler: `_`.
+ Infer(TypeInfer),
+
+ /// A macro in the type position.
+ Macro(TypeMacro),
+
+ /// The never type: `!`.
+ Never(TypeNever),
+
+ /// A parenthesized type equivalent to the inner type.
+ Paren(TypeParen),
+
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ Path(TypePath),
+
+ /// A raw pointer type: `*const T` or `*mut T`.
+ Ptr(TypePtr),
+
+ /// A reference type: `&'a T` or `&'a mut T`.
+ Reference(TypeReference),
+
+ /// A dynamically sized slice type: `[T]`.
+ Slice(TypeSlice),
+
+ /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound`
is a
+ /// trait or a lifetime.
+ TraitObject(TypeTraitObject),
+
+ /// A tuple type: `(A, B, C, String)`.
+ Tuple(TypeTuple),
+
+ /// Tokens in type position not interpreted by Syn.
+ Verbatim(TokenStream),
+
+ // For testing exhaustiveness in downstream code, use the following
idiom:
+ //
+ // match ty {
+ // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
+ //
+ // Type::Array(ty) => {...}
+ // Type::BareFn(ty) => {...}
+ // ...
+ // Type::Verbatim(ty) => {...}
+ //
+ // _ => { /* some sane fallback */ }
+ // }
+ //
+ // This way we fail your tests but don't break your library when adding
+ // a variant. You will be notified by a test failure when a variant is
+ // added, so that you can add code to handle it, but your library will
+ // continue to compile and work for downstream users in the interim.
+ }
+}
+
+ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+ pub elem: Box<Type>,
+ pub semi_token: Token![;],
+ pub len: Expr,
+ }
+}
+
+ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub abi: Option<Abi>,
+ pub fn_token: Token![fn],
+ pub paren_token: token::Paren,
+ pub inputs: Punctuated<BareFnArg, Token![,]>,
+ pub variadic: Option<BareVariadic>,
+ pub output: ReturnType,
+ }
+}
+
+ast_struct! {
+ /// A type contained within invisible delimiters.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+ pub elem: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+}
+
+ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+ }
+}
+
+ast_struct! {
+ /// A macro in the type position.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeMacro {
+ pub mac: Macro,
+ }
+}
+
+ast_struct! {
+ /// The never type: `!`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+ }
+}
+
+ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+ pub elem: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+}
+
+ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypePtr {
+ pub star_token: Token![*],
+ pub const_token: Option<Token![const]>,
+ pub mutability: Option<Token![mut]>,
+ pub elem: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeReference {
+ pub and_token: Token![&],
+ pub lifetime: Option<Lifetime>,
+ pub mutability: Option<Token![mut]>,
+ pub elem: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+ pub elem: Box<Type>,
+ }
+}
+
+ast_struct! {
+ /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+}
+
+ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Type, Token![,]>,
+ }
+}
+
+ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct Abi {
+ pub extern_token: Token![extern],
+ pub name: Option<LitStr>,
+ }
+}
+
+ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+ pub name: Option<(Ident, Token![:])>,
+ pub ty: Type,
+ }
+}
+
+ast_struct! {
+ /// The variadic argument of a function pointer like `fn(usize, ...)`.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub struct BareVariadic {
+ pub attrs: Vec<Attribute>,
+ pub name: Option<(Ident, Token![:])>,
+ pub dots: Token![...],
+ pub comma: Option<Token![,]>,
+ }
+}
+
+ast_enum! {
+ /// Return type of a function signature.
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
+ pub enum ReturnType {
+ /// Return type is not specified.
+ ///
+ /// Functions default to `()` and closures default to type inference.
+ Default,
+ /// A particular type is returned.
+ Type(Token![->], Box<Type>),
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub(crate) mod parsing {
+ use crate::attr::Attribute;
+ use crate::error::{self, Result};
+ use crate::ext::IdentExt as _;
+ use crate::generics::{BoundLifetimes, TraitBound, TraitBoundModifier,
TypeParamBound};
+ use crate::ident::Ident;
+ use crate::lifetime::Lifetime;
+ use crate::mac::{self, Macro};
+ use crate::parse::{Parse, ParseStream};
+ use crate::path;
+ use crate::path::{Path, PathArguments, QSelf};
+ use crate::punctuated::Punctuated;
+ use crate::token;
+ use crate::ty::{
+ Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn,
TypeGroup,
+ TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath,
TypePtr,
+ TypeReference, TypeSlice, TypeTraitObject, TypeTuple,
+ };
+ use crate::verbatim;
+ use proc_macro2::Span;
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_plus = true;
+ let allow_group_generic = true;
+ ambig_ty(input, allow_plus, allow_group_generic)
+ }
+ }
+
+ impl Type {
+ /// In some positions, types may not contain the `+` character, to
+ /// disambiguate them. For example in the expression `1 as T`, T may
not
+ /// contain a `+` character.
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+ let allow_plus = false;
+ let allow_group_generic = true;
+ ambig_ty(input, allow_plus, allow_group_generic)
+ }
+ }
+
+ pub(crate) fn ambig_ty(
+ input: ParseStream,
+ allow_plus: bool,
+ allow_group_generic: bool,
+ ) -> Result<Type> {
+ let begin = input.fork();
+
+ if input.peek(token::Group) {
+ let mut group: TypeGroup = input.parse()?;
+ if input.peek(Token![::]) && input.peek3(Ident::peek_any) {
+ if let Type::Path(mut ty) = *group.elem {
+ Path::parse_rest(input, &mut ty.path, false)?;
+ return Ok(Type::Path(ty));
+ } else {
+ return Ok(Type::Path(TypePath {
+ qself: Some(QSelf {
+ lt_token: Token![<](group.group_token.span),
+ position: 0,
+ as_token: None,
+ gt_token: Token![>](group.group_token.span),
+ ty: group.elem,
+ }),
+ path: Path::parse_helper(input, false)?,
+ }));
+ }
+ } else if input.peek(Token![<]) && allow_group_generic
+ || input.peek(Token![::]) && input.peek3(Token![<])
+ {
+ if let Type::Path(mut ty) = *group.elem {
+ let arguments = &mut
ty.path.segments.last_mut().unwrap().arguments;
+ if arguments.is_none() {
+ *arguments =
PathArguments::AngleBracketed(input.parse()?);
+ Path::parse_rest(input, &mut ty.path, false)?;
+ return Ok(Type::Path(ty));
+ } else {
+ group.elem = Box::new(Type::Path(ty));
+ }
+ }
+ }
+ return Ok(Type::Group(group));
+ }
+
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+ lifetimes = input.parse()?;
+ lookahead = input.lookahead1();
+ if !lookahead.peek(Ident)
+ && !lookahead.peek(Token![fn])
+ && !lookahead.peek(Token![unsafe])
+ && !lookahead.peek(Token![extern])
+ && !lookahead.peek(Token![super])
+ && !lookahead.peek(Token![self])
+ && !lookahead.peek(Token![Self])
+ && !lookahead.peek(Token![crate])
+ || input.peek(Token![dyn])
+ {
+ return Err(lookahead.error());
+ }
+ }
+
+ if lookahead.peek(token::Paren) {
+ let content;
+ let paren_token = parenthesized!(content in input);
+ if content.is_empty() {
+ return Ok(Type::Tuple(TypeTuple {
+ paren_token,
+ elems: Punctuated::new(),
+ }));
+ }
+ if content.peek(Lifetime) {
+ return Ok(Type::Paren(TypeParen {
+ paren_token,
+ elem: Box::new(Type::TraitObject(content.parse()?)),
+ }));
+ }
+ if content.peek(Token![?]) {
+ return Ok(Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ bounds.push_value(TypeParamBound::Trait(TraitBound {
+ paren_token: Some(paren_token),
+ ..content.parse()?
+ }));
+ while let Some(plus) = input.parse()? {
+ bounds.push_punct(plus);
+ bounds.push_value(input.parse()?);
+ }
+ bounds
+ },
+ }));
+ }
+ let mut first: Type = content.parse()?;
+ if content.peek(Token![,]) {
+ return Ok(Type::Tuple(TypeTuple {
+ paren_token,
+ elems: {
+ let mut elems = Punctuated::new();
+ elems.push_value(first);
+ elems.push_punct(content.parse()?);
+ while !content.is_empty() {
+ elems.push_value(content.parse()?);
+ if content.is_empty() {
+ break;
+ }
+ elems.push_punct(content.parse()?);
+ }
+ elems
+ },
+ }));
+ }
+ if allow_plus && input.peek(Token![+]) {
+ loop {
+ let first = match first {
+ Type::Path(TypePath { qself: None, path }) => {
+ TypeParamBound::Trait(TraitBound {
+ paren_token: Some(paren_token),
+ modifier: TraitBoundModifier::None,
+ lifetimes: None,
+ path,
+ })
+ }
+ Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds,
+ }) => {
+ if bounds.len() > 1 || bounds.trailing_punct() {
+ first = Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds,
+ });
+ break;
+ }
+ match bounds.into_iter().next().unwrap() {
+ TypeParamBound::Trait(trait_bound) => {
+ TypeParamBound::Trait(TraitBound {
+ paren_token: Some(paren_token),
+ ..trait_bound
+ })
+ }
+ other @ (TypeParamBound::Lifetime(_)
+ | TypeParamBound::Verbatim(_)) => other,
+ }
+ }
+ _ => break,
+ };
+ return Ok(Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ bounds.push_value(first);
+ while let Some(plus) = input.parse()? {
+ bounds.push_punct(plus);
+ bounds.push_value(input.parse()?);
+ }
+ bounds
+ },
+ }));
+ }
+ }
+ Ok(Type::Paren(TypeParen {
+ paren_token,
+ elem: Box::new(first),
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+ || lookahead.peek(Token![extern])
+ {
+ let mut bare_fn: TypeBareFn = input.parse()?;
+ bare_fn.lifetimes = lifetimes;
+ Ok(Type::BareFn(bare_fn))
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+ let ty: TypePath = input.parse()?;
+ if ty.qself.is_some() {
+ return Ok(Type::Path(ty));
+ }
+
+ if input.peek(Token![!]) && !input.peek(Token![!=]) &&
ty.path.is_mod_style() {
+ let bang_token: Token![!] = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ return Ok(Type::Macro(TypeMacro {
+ mac: Macro {
+ path: ty.path,
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ }));
+ }
+
+ if lifetimes.is_some() || allow_plus && input.peek(Token![+]) {
+ let mut bounds = Punctuated::new();
+ bounds.push_value(TypeParamBound::Trait(TraitBound {
+ paren_token: None,
+ modifier: TraitBoundModifier::None,
+ lifetimes,
+ path: ty.path,
+ }));
+ if allow_plus {
+ while input.peek(Token![+]) {
+ bounds.push_punct(input.parse()?);
+ if !(input.peek(Ident::peek_any)
+ || input.peek(Token![::])
+ || input.peek(Token![?])
+ || input.peek(Lifetime)
+ || input.peek(token::Paren))
+ {
+ break;
+ }
+ bounds.push_value(input.parse()?);
+ }
+ }
+ return Ok(Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds,
+ }));
+ }
+
+ Ok(Type::Path(ty))
+ } else if lookahead.peek(Token![dyn]) {
+ let dyn_token: Token![dyn] = input.parse()?;
+ let dyn_span = dyn_token.span;
+ let star_token: Option<Token![*]> = input.parse()?;
+ let bounds = TypeTraitObject::parse_bounds(dyn_span, input,
allow_plus)?;
+ return Ok(if star_token.is_some() {
+ Type::Verbatim(verbatim::between(&begin, input))
+ } else {
+ Type::TraitObject(TypeTraitObject {
+ dyn_token: Some(dyn_token),
+ bounds,
+ })
+ });
+ } else if lookahead.peek(token::Bracket) {
+ let content;
+ let bracket_token = bracketed!(content in input);
+ let elem: Type = content.parse()?;
+ if content.peek(Token![;]) {
+ Ok(Type::Array(TypeArray {
+ bracket_token,
+ elem: Box::new(elem),
+ semi_token: content.parse()?,
+ len: content.parse()?,
+ }))
+ } else {
+ Ok(Type::Slice(TypeSlice {
+ bracket_token,
+ elem: Box::new(elem),
+ }))
+ }
+ } else if lookahead.peek(Token![*]) {
+ input.parse().map(Type::Ptr)
+ } else if lookahead.peek(Token![&]) {
+ input.parse().map(Type::Reference)
+ } else if lookahead.peek(Token![!]) && !input.peek(Token![=]) {
+ input.parse().map(Type::Never)
+ } else if lookahead.peek(Token![impl]) {
+ TypeImplTrait::parse(input, allow_plus).map(Type::ImplTrait)
+ } else if lookahead.peek(Token![_]) {
+ input.parse().map(Type::Infer)
+ } else if lookahead.peek(Lifetime) {
+ input.parse().map(Type::TraitObject)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeSlice {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(TypeSlice {
+ bracket_token: bracketed!(content in input),
+ elem: content.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeArray {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ Ok(TypeArray {
+ bracket_token: bracketed!(content in input),
+ elem: content.parse()?,
+ semi_token: content.parse()?,
+ len: content.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypePtr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let star_token: Token![*] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let (const_token, mutability) = if lookahead.peek(Token![const]) {
+ (Some(input.parse()?), None)
+ } else if lookahead.peek(Token![mut]) {
+ (None, Some(input.parse()?))
+ } else {
+ return Err(lookahead.error());
+ };
+
+ Ok(TypePtr {
+ star_token,
+ const_token,
+ mutability,
+ elem: Box::new(input.call(Type::without_plus)?),
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeReference {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeReference {
+ and_token: input.parse()?,
+ lifetime: input.parse()?,
+ mutability: input.parse()?,
+ // & binds tighter than +, so we don't allow + here.
+ elem: Box::new(input.call(Type::without_plus)?),
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let args;
+ let mut variadic = None;
+
+ Ok(TypeBareFn {
+ lifetimes: input.parse()?,
+ unsafety: input.parse()?,
+ abi: input.parse()?,
+ fn_token: input.parse()?,
+ paren_token: parenthesized!(args in input),
+ inputs: {
+ let mut inputs = Punctuated::new();
+
+ while !args.is_empty() {
+ let attrs = args.call(Attribute::parse_outer)?;
+
+ if inputs.empty_or_trailing()
+ && (args.peek(Token![...])
+ || args.peek(Ident)
+ && args.peek2(Token![:])
+ && args.peek3(Token![...]))
+ {
+ variadic = Some(parse_bare_variadic(&args,
attrs)?);
+ break;
+ }
+
+ let allow_self = inputs.is_empty();
+ let arg = parse_bare_fn_arg(&args, allow_self)?;
+ inputs.push_value(BareFnArg { attrs, ..arg });
+ if args.is_empty() {
+ break;
+ }
+
+ let comma = args.parse()?;
+ inputs.push_punct(comma);
+ }
+
+ inputs
+ },
+ variadic,
+ output: input.call(ReturnType::without_plus)?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeNever {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeNever {
+ bang_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeInfer {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeInfer {
+ underscore_token: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
+ let paren_token = parenthesized!(content in input);
+
+ if content.is_empty() {
+ return Ok(TypeTuple {
+ paren_token,
+ elems: Punctuated::new(),
+ });
+ }
+
+ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+ paren_token,
+ elems: {
+ let mut elems = Punctuated::new();
+ elems.push_value(first);
+ elems.push_punct(content.parse()?);
+ while !content.is_empty() {
+ elems.push_value(content.parse()?);
+ if content.is_empty() {
+ break;
+ }
+ elems.push_punct(content.parse()?);
+ }
+ elems
+ },
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeMacro {
+ mac: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypePath {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let expr_style = false;
+ let (qself, path) = path::parsing::qpath(input, expr_style)?;
+ Ok(TypePath { qself, path })
+ }
+ }
+
+ impl ReturnType {
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+ let allow_plus = false;
+ Self::parse(input, allow_plus)
+ }
+
+ pub(crate) fn parse(input: ParseStream, allow_plus: bool) ->
Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+ let allow_group_generic = true;
+ let ty = ambig_ty(input, allow_plus, allow_group_generic)?;
+ Ok(ReturnType::Type(arrow, Box::new(ty)))
+ } else {
+ Ok(ReturnType::Default)
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for ReturnType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_plus = true;
+ Self::parse(input, allow_plus)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeTraitObject {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_plus = true;
+ Self::parse(input, allow_plus)
+ }
+ }
+
+ impl TypeTraitObject {
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+ let allow_plus = false;
+ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
+ pub(crate) fn parse(input: ParseStream, allow_plus: bool) ->
Result<Self> {
+ let dyn_token: Option<Token![dyn]> = input.parse()?;
+ let dyn_span = match &dyn_token {
+ Some(token) => token.span,
+ None => input.span(),
+ };
+ let bounds = Self::parse_bounds(dyn_span, input, allow_plus)?;
+ Ok(TypeTraitObject { dyn_token, bounds })
+ }
+
+ fn parse_bounds(
+ dyn_span: Span,
+ input: ParseStream,
+ allow_plus: bool,
+ ) -> Result<Punctuated<TypeParamBound, Token![+]>> {
+ let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+ let mut last_lifetime_span = None;
+ let mut at_least_one_trait = false;
+ for bound in &bounds {
+ match bound {
+ TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => {
+ at_least_one_trait = true;
+ break;
+ }
+ TypeParamBound::Lifetime(lifetime) => {
+ last_lifetime_span = Some(lifetime.ident.span());
+ }
+ }
+ }
+ // Just lifetimes like `'a + 'b` is not a TraitObject.
+ if !at_least_one_trait {
+ let msg = "at least one trait is required for an object type";
+ return Err(error::new2(dyn_span, last_lifetime_span.unwrap(),
msg));
+ }
+ Ok(bounds)
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeImplTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_plus = true;
+ Self::parse(input, allow_plus)
+ }
+ }
+
+ impl TypeImplTrait {
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+ let allow_plus = false;
+ Self::parse(input, allow_plus)
+ }
+
+ pub(crate) fn parse(input: ParseStream, allow_plus: bool) ->
Result<Self> {
+ let impl_token: Token![impl] = input.parse()?;
+ let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+ let mut last_lifetime_span = None;
+ let mut at_least_one_trait = false;
+ for bound in &bounds {
+ match bound {
+ TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => {
+ at_least_one_trait = true;
+ break;
+ }
+ TypeParamBound::Lifetime(lifetime) => {
+ last_lifetime_span = Some(lifetime.ident.span());
+ }
+ }
+ }
+ if !at_least_one_trait {
+ let msg = "at least one trait must be specified";
+ return Err(error::new2(
+ impl_token.span,
+ last_lifetime_span.unwrap(),
+ msg,
+ ));
+ }
+ Ok(TypeImplTrait { impl_token, bounds })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeGroup {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let group = crate::group::parse_group(input)?;
+ Ok(TypeGroup {
+ group_token: group.token,
+ elem: group.content.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_plus = false;
+ Self::parse(input, allow_plus)
+ }
+ }
+
+ impl TypeParen {
+ fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ let content;
+ Ok(TypeParen {
+ paren_token: parenthesized!(content in input),
+ elem: Box::new({
+ let allow_group_generic = true;
+ ambig_ty(&content, allow_plus, allow_group_generic)?
+ }),
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let allow_self = false;
+ parse_bare_fn_arg(input, allow_self)
+ }
+ }
+
+ fn parse_bare_fn_arg(input: ParseStream, allow_self: bool) ->
Result<BareFnArg> {
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ let begin = input.fork();
+
+ let has_mut_self = allow_self && input.peek(Token![mut]) &&
input.peek2(Token![self]);
+ if has_mut_self {
+ input.parse::<Token![mut]>()?;
+ }
+
+ let mut has_self = false;
+ let mut name = if (input.peek(Ident) || input.peek(Token![_]) || {
+ has_self = allow_self && input.peek(Token![self]);
+ has_self
+ }) && input.peek2(Token![:])
+ && !input.peek2(Token![::])
+ {
+ let name = input.call(Ident::parse_any)?;
+ let colon: Token![:] = input.parse()?;
+ Some((name, colon))
+ } else {
+ has_self = false;
+ None
+ };
+
+ let ty = if allow_self && !has_self && input.peek(Token![mut]) &&
input.peek2(Token![self])
+ {
+ input.parse::<Token![mut]>()?;
+ input.parse::<Token![self]>()?;
+ None
+ } else if has_mut_self && name.is_none() {
+ input.parse::<Token![self]>()?;
+ None
+ } else {
+ Some(input.parse()?)
+ };
+
+ let ty = match ty {
+ Some(ty) if !has_mut_self => ty,
+ _ => {
+ name = None;
+ Type::Verbatim(verbatim::between(&begin, input))
+ }
+ };
+
+ Ok(BareFnArg { attrs, name, ty })
+ }
+
+ fn parse_bare_variadic(input: ParseStream, attrs: Vec<Attribute>) ->
Result<BareVariadic> {
+ Ok(BareVariadic {
+ attrs,
+ name: if input.peek(Ident) || input.peek(Token![_]) {
+ let name = input.call(Ident::parse_any)?;
+ let colon: Token![:] = input.parse()?;
+ Some((name, colon))
+ } else {
+ None
+ },
+ dots: input.parse()?,
+ comma: input.parse()?,
+ })
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Abi {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(Abi {
+ extern_token: input.parse()?,
+ name: input.parse()?,
+ })
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
+ impl Parse for Option<Abi> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![extern]) {
+ input.parse().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use crate::attr::FilterAttrs;
+ use crate::path;
+ use crate::print::TokensOrDefault;
+ use crate::ty::{
+ Abi, BareFnArg, BareVariadic, ReturnType, TypeArray, TypeBareFn,
TypeGroup, TypeImplTrait,
+ TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr,
TypeReference, TypeSlice,
+ TypeTraitObject, TypeTuple,
+ };
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elem.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeArray {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elem.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ self.len.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypePtr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.star_token.to_tokens(tokens);
+ match &self.mutability {
+ Some(tok) => tok.to_tokens(tokens),
+ None => {
+ TokensOrDefault(&self.const_token).to_tokens(tokens);
+ }
+ }
+ self.elem.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.and_token.to_tokens(tokens);
+ self.lifetime.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.elem.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeBareFn {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.lifetimes.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.abi.to_tokens(tokens);
+ self.fn_token.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+ self.inputs.to_tokens(tokens);
+ if let Some(variadic) = &self.variadic {
+ if !self.inputs.empty_or_trailing() {
+ let span = variadic.dots.spans[0];
+ Token![,](span).to_tokens(tokens);
+ }
+ variadic.to_tokens(tokens);
+ }
+ });
+ self.output.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeNever {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.bang_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ // If we only have one argument, we need a trailing comma to
+ // distinguish TypeTuple from TypeParen.
+ if self.elems.len() == 1 && !self.elems.trailing_punct() {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypePath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ path::printing::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeTraitObject {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.dyn_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeImplTrait {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.impl_token.to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeGroup {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.group_token.surround(tokens, |tokens| {
+ self.elem.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeParen {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.paren_token.surround(tokens, |tokens| {
+ self.elem.to_tokens(tokens);
+ });
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeInfer {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for TypeMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for ReturnType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ ReturnType::Default => {}
+ ReturnType::Type(arrow, ty) => {
+ arrow.to_tokens(tokens);
+ ty.to_tokens(tokens);
+ }
+ }
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for BareFnArg {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ if let Some((name, colon)) = &self.name {
+ name.to_tokens(tokens);
+ colon.to_tokens(tokens);
+ }
+ self.ty.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for BareVariadic {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ if let Some((name, colon)) = &self.name {
+ name.to_tokens(tokens);
+ colon.to_tokens(tokens);
+ }
+ self.dots.to_tokens(tokens);
+ self.comma.to_tokens(tokens);
+ }
+ }
+
+ #[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
+ impl ToTokens for Abi {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.extern_token.to_tokens(tokens);
+ self.name.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/verbatim.rs
b/rust/hw/char/pl011/vendor/syn/src/verbatim.rs
new file mode 100644
index 0000000000..54dc1cfa0d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/verbatim.rs
@@ -0,0 +1,33 @@
+use crate::parse::ParseStream;
+use proc_macro2::{Delimiter, TokenStream};
+use std::cmp::Ordering;
+use std::iter;
+
+pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) ->
TokenStream {
+ let end = end.cursor();
+ let mut cursor = begin.cursor();
+ assert!(crate::buffer::same_buffer(end, cursor));
+
+ let mut tokens = TokenStream::new();
+ while cursor != end {
+ let (tt, next) = cursor.token_tree().unwrap();
+
+ if crate::buffer::cmp_assuming_same_buffer(end, next) ==
Ordering::Less {
+ // A syntax node can cross the boundary of a None-delimited group
+ // due to such groups being transparent to the parser in most
cases.
+ // Any time this occurs the group is known to be semantically
+ // irrelevant. https://github.com/dtolnay/syn/issues/1235
+ if let Some((inside, _span, after)) =
cursor.group(Delimiter::None) {
+ assert!(next == after);
+ cursor = inside;
+ continue;
+ } else {
+ panic!("verbatim end must not be inside a delimited group");
+ }
+ }
+
+ tokens.extend(iter::once(tt));
+ cursor = next;
+ }
+ tokens
+}
diff --git a/rust/hw/char/pl011/vendor/syn/src/whitespace.rs
b/rust/hw/char/pl011/vendor/syn/src/whitespace.rs
new file mode 100644
index 0000000000..a50b5069a6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/src/whitespace.rs
@@ -0,0 +1,65 @@
+pub(crate) fn skip(mut s: &str) -> &str {
+ 'skip: while !s.is_empty() {
+ let byte = s.as_bytes()[0];
+ if byte == b'/' {
+ if s.starts_with("//")
+ && (!s.starts_with("///") || s.starts_with("////"))
+ && !s.starts_with("//!")
+ {
+ if let Some(i) = s.find('\n') {
+ s = &s[i + 1..];
+ continue;
+ } else {
+ return "";
+ }
+ } else if s.starts_with("/**/") {
+ s = &s[4..];
+ continue;
+ } else if s.starts_with("/*")
+ && (!s.starts_with("/**") || s.starts_with("/***"))
+ && !s.starts_with("/*!")
+ {
+ let mut depth = 0;
+ let bytes = s.as_bytes();
+ let mut i = 0;
+ let upper = bytes.len() - 1;
+ while i < upper {
+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+ depth += 1;
+ i += 1; // eat '*'
+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+ depth -= 1;
+ if depth == 0 {
+ s = &s[i + 2..];
+ continue 'skip;
+ }
+ i += 1; // eat '/'
+ }
+ i += 1;
+ }
+ return s;
+ }
+ }
+ match byte {
+ b' ' | 0x09..=0x0D => {
+ s = &s[1..];
+ continue;
+ }
+ b if b <= 0x7F => {}
+ _ => {
+ let ch = s.chars().next().unwrap();
+ if is_whitespace(ch) {
+ s = &s[ch.len_utf8()..];
+ continue;
+ }
+ }
+ }
+ return s;
+ }
+ s
+}
+
+fn is_whitespace(ch: char) -> bool {
+ // Rust treats left-to-right mark and right-to-left mark as whitespace
+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/common/eq.rs
b/rust/hw/char/pl011/vendor/syn/tests/common/eq.rs
new file mode 100644
index 0000000000..b44ea3660d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/common/eq.rs
@@ -0,0 +1,900 @@
+#![allow(unused_macro_rules)]
+
+extern crate rustc_ast;
+extern crate rustc_data_structures;
+extern crate rustc_driver;
+extern crate rustc_span;
+extern crate thin_vec;
+
+use rustc_ast::ast::AngleBracketedArg;
+use rustc_ast::ast::AngleBracketedArgs;
+use rustc_ast::ast::AnonConst;
+use rustc_ast::ast::Arm;
+use rustc_ast::ast::AssocConstraint;
+use rustc_ast::ast::AssocConstraintKind;
+use rustc_ast::ast::AssocItemKind;
+use rustc_ast::ast::AttrArgs;
+use rustc_ast::ast::AttrArgsEq;
+use rustc_ast::ast::AttrId;
+use rustc_ast::ast::AttrItem;
+use rustc_ast::ast::AttrKind;
+use rustc_ast::ast::AttrStyle;
+use rustc_ast::ast::Attribute;
+use rustc_ast::ast::BareFnTy;
+use rustc_ast::ast::BinOpKind;
+use rustc_ast::ast::BindingMode;
+use rustc_ast::ast::Block;
+use rustc_ast::ast::BlockCheckMode;
+use rustc_ast::ast::BorrowKind;
+use rustc_ast::ast::BoundAsyncness;
+use rustc_ast::ast::BoundConstness;
+use rustc_ast::ast::BoundPolarity;
+use rustc_ast::ast::ByRef;
+use rustc_ast::ast::CaptureBy;
+use rustc_ast::ast::Closure;
+use rustc_ast::ast::ClosureBinder;
+use rustc_ast::ast::Const;
+use rustc_ast::ast::ConstItem;
+use rustc_ast::ast::CoroutineKind;
+use rustc_ast::ast::Crate;
+use rustc_ast::ast::Defaultness;
+use rustc_ast::ast::Delegation;
+use rustc_ast::ast::DelegationMac;
+use rustc_ast::ast::DelimArgs;
+use rustc_ast::ast::EnumDef;
+use rustc_ast::ast::Expr;
+use rustc_ast::ast::ExprField;
+use rustc_ast::ast::ExprKind;
+use rustc_ast::ast::Extern;
+use rustc_ast::ast::FieldDef;
+use rustc_ast::ast::FloatTy;
+use rustc_ast::ast::Fn;
+use rustc_ast::ast::FnDecl;
+use rustc_ast::ast::FnHeader;
+use rustc_ast::ast::FnRetTy;
+use rustc_ast::ast::FnSig;
+use rustc_ast::ast::ForLoopKind;
+use rustc_ast::ast::ForeignItemKind;
+use rustc_ast::ast::ForeignMod;
+use rustc_ast::ast::FormatAlignment;
+use rustc_ast::ast::FormatArgPosition;
+use rustc_ast::ast::FormatArgPositionKind;
+use rustc_ast::ast::FormatArgs;
+use rustc_ast::ast::FormatArgsPiece;
+use rustc_ast::ast::FormatArgument;
+use rustc_ast::ast::FormatArgumentKind;
+use rustc_ast::ast::FormatArguments;
+use rustc_ast::ast::FormatCount;
+use rustc_ast::ast::FormatDebugHex;
+use rustc_ast::ast::FormatOptions;
+use rustc_ast::ast::FormatPlaceholder;
+use rustc_ast::ast::FormatSign;
+use rustc_ast::ast::FormatTrait;
+use rustc_ast::ast::GenBlockKind;
+use rustc_ast::ast::GenericArg;
+use rustc_ast::ast::GenericArgs;
+use rustc_ast::ast::GenericBound;
+use rustc_ast::ast::GenericParam;
+use rustc_ast::ast::GenericParamKind;
+use rustc_ast::ast::Generics;
+use rustc_ast::ast::Impl;
+use rustc_ast::ast::ImplPolarity;
+use rustc_ast::ast::Inline;
+use rustc_ast::ast::InlineAsm;
+use rustc_ast::ast::InlineAsmOperand;
+use rustc_ast::ast::InlineAsmOptions;
+use rustc_ast::ast::InlineAsmRegOrRegClass;
+use rustc_ast::ast::InlineAsmSym;
+use rustc_ast::ast::InlineAsmTemplatePiece;
+use rustc_ast::ast::IntTy;
+use rustc_ast::ast::IsAuto;
+use rustc_ast::ast::Item;
+use rustc_ast::ast::ItemKind;
+use rustc_ast::ast::Label;
+use rustc_ast::ast::Lifetime;
+use rustc_ast::ast::LitFloatType;
+use rustc_ast::ast::LitIntType;
+use rustc_ast::ast::LitKind;
+use rustc_ast::ast::Local;
+use rustc_ast::ast::LocalKind;
+use rustc_ast::ast::MacCall;
+use rustc_ast::ast::MacCallStmt;
+use rustc_ast::ast::MacStmtStyle;
+use rustc_ast::ast::MacroDef;
+use rustc_ast::ast::MatchKind;
+use rustc_ast::ast::MetaItemLit;
+use rustc_ast::ast::MethodCall;
+use rustc_ast::ast::ModKind;
+use rustc_ast::ast::ModSpans;
+use rustc_ast::ast::Movability;
+use rustc_ast::ast::MutTy;
+use rustc_ast::ast::Mutability;
+use rustc_ast::ast::NodeId;
+use rustc_ast::ast::NormalAttr;
+use rustc_ast::ast::Param;
+use rustc_ast::ast::ParenthesizedArgs;
+use rustc_ast::ast::Pat;
+use rustc_ast::ast::PatField;
+use rustc_ast::ast::PatFieldsRest;
+use rustc_ast::ast::PatKind;
+use rustc_ast::ast::Path;
+use rustc_ast::ast::PathSegment;
+use rustc_ast::ast::PolyTraitRef;
+use rustc_ast::ast::PreciseCapturingArg;
+use rustc_ast::ast::QSelf;
+use rustc_ast::ast::RangeEnd;
+use rustc_ast::ast::RangeLimits;
+use rustc_ast::ast::RangeSyntax;
+use rustc_ast::ast::Recovered;
+use rustc_ast::ast::Safety;
+use rustc_ast::ast::StaticForeignItem;
+use rustc_ast::ast::StaticItem;
+use rustc_ast::ast::Stmt;
+use rustc_ast::ast::StmtKind;
+use rustc_ast::ast::StrLit;
+use rustc_ast::ast::StrStyle;
+use rustc_ast::ast::StructExpr;
+use rustc_ast::ast::StructRest;
+use rustc_ast::ast::Term;
+use rustc_ast::ast::Trait;
+use rustc_ast::ast::TraitBoundModifiers;
+use rustc_ast::ast::TraitObjectSyntax;
+use rustc_ast::ast::TraitRef;
+use rustc_ast::ast::Ty;
+use rustc_ast::ast::TyAlias;
+use rustc_ast::ast::TyAliasWhereClause;
+use rustc_ast::ast::TyAliasWhereClauses;
+use rustc_ast::ast::TyKind;
+use rustc_ast::ast::UintTy;
+use rustc_ast::ast::UnOp;
+use rustc_ast::ast::UnsafeSource;
+use rustc_ast::ast::UseTree;
+use rustc_ast::ast::UseTreeKind;
+use rustc_ast::ast::Variant;
+use rustc_ast::ast::VariantData;
+use rustc_ast::ast::Visibility;
+use rustc_ast::ast::VisibilityKind;
+use rustc_ast::ast::WhereBoundPredicate;
+use rustc_ast::ast::WhereClause;
+use rustc_ast::ast::WhereEqPredicate;
+use rustc_ast::ast::WherePredicate;
+use rustc_ast::ast::WhereRegionPredicate;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{
+ self, CommentKind, Delimiter, IdentIsRaw, Lit, Nonterminal, Token,
TokenKind,
+};
+use rustc_ast::tokenstream::{
+ AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing, DelimSpan,
LazyAttrTokenStream,
+ Spacing, TokenStream, TokenTree,
+};
+use rustc_data_structures::packed::Pu128;
+use rustc_data_structures::sync::Lrc;
+use rustc_span::source_map::Spanned;
+use rustc_span::symbol::{sym, Ident};
+use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP};
+use std::collections::HashMap;
+use std::hash::{BuildHasher, Hash};
+use thin_vec::ThinVec;
+
+pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+}
+
+impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&**self, &**other)
+ }
+}
+
+impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&**self, &**other)
+ }
+}
+
+impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&**self, &**other)
+ }
+}
+
+impl<T: SpanlessEq> SpanlessEq for Option<T> {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (None, None) => true,
+ (Some(this), Some(other)) => SpanlessEq::eq(this, other),
+ _ => false,
+ }
+ }
+}
+
+impl<T: SpanlessEq, E: SpanlessEq> SpanlessEq for Result<T, E> {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Ok(this), Ok(other)) => SpanlessEq::eq(this, other),
+ (Err(this), Err(other)) => SpanlessEq::eq(this, other),
+ _ => false,
+ }
+ }
+}
+
+impl<T: SpanlessEq> SpanlessEq for [T] {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().zip(other).all(|(a, b)|
SpanlessEq::eq(a, b))
+ }
+}
+
+impl<T: SpanlessEq> SpanlessEq for Vec<T> {
+ fn eq(&self, other: &Self) -> bool {
+ <[T] as SpanlessEq>::eq(self, other)
+ }
+}
+
+impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len()
+ && self
+ .iter()
+ .zip(other.iter())
+ .all(|(a, b)| SpanlessEq::eq(a, b))
+ }
+}
+
+impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V,
S> {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len()
+ && self.iter().all(|(key, this_v)| {
+ other
+ .get(key)
+ .map_or(false, |other_v| SpanlessEq::eq(this_v, other_v))
+ })
+ }
+}
+
+impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&self.node, &other.node)
+ }
+}
+
+impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
+ }
+}
+
+impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&self.0, &other.0)
+ && SpanlessEq::eq(&self.1, &other.1)
+ && SpanlessEq::eq(&self.2, &other.2)
+ }
+}
+
+macro_rules! spanless_eq_true {
+ ($name:ty) => {
+ impl SpanlessEq for $name {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+ }
+ };
+}
+
+spanless_eq_true!(Span);
+spanless_eq_true!(DelimSpan);
+spanless_eq_true!(AttrId);
+spanless_eq_true!(NodeId);
+spanless_eq_true!(SyntaxContext);
+spanless_eq_true!(Spacing);
+
+macro_rules! spanless_eq_partial_eq {
+ ($name:ty) => {
+ impl SpanlessEq for $name {
+ fn eq(&self, other: &Self) -> bool {
+ PartialEq::eq(self, other)
+ }
+ }
+ };
+}
+
+spanless_eq_partial_eq!(bool);
+spanless_eq_partial_eq!(u8);
+spanless_eq_partial_eq!(u16);
+spanless_eq_partial_eq!(u32);
+spanless_eq_partial_eq!(u128);
+spanless_eq_partial_eq!(usize);
+spanless_eq_partial_eq!(char);
+spanless_eq_partial_eq!(String);
+spanless_eq_partial_eq!(Pu128);
+spanless_eq_partial_eq!(Symbol);
+spanless_eq_partial_eq!(CommentKind);
+spanless_eq_partial_eq!(Delimiter);
+spanless_eq_partial_eq!(InlineAsmOptions);
+spanless_eq_partial_eq!(token::LitKind);
+spanless_eq_partial_eq!(ErrorGuaranteed);
+
+macro_rules! spanless_eq_struct {
+ {
+ $($name:ident)::+ $(<$param:ident>)?
+ $([$field:tt $this:ident $other:ident])*
+ $(![$ignore:tt])*;
+ } => {
+ impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
+ let $($name)::+ { $($field: $other,)* $($ignore: _,)* } =
other;
+ true $(&& SpanlessEq::eq($this, $other))*
+ }
+ }
+ };
+
+ {
+ $($name:ident)::+ $(<$param:ident>)?
+ $([$field:tt $this:ident $other:ident])*
+ $(![$ignore:tt])*;
+ !$next:tt
+ $($rest:tt)*
+ } => {
+ spanless_eq_struct! {
+ $($name)::+ $(<$param>)*
+ $([$field $this $other])*
+ $(![$ignore])*
+ ![$next];
+ $($rest)*
+ }
+ };
+
+ {
+ $($name:ident)::+ $(<$param:ident>)?
+ $([$field:tt $this:ident $other:ident])*
+ $(![$ignore:tt])*;
+ $next:tt
+ $($rest:tt)*
+ } => {
+ spanless_eq_struct! {
+ $($name)::+ $(<$param>)*
+ $([$field $this $other])*
+ [$next this other]
+ $(![$ignore])*;
+ $($rest)*
+ }
+ };
+}
+
+macro_rules! spanless_eq_enum {
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*])*
+ } => {
+ impl SpanlessEq for $($name)::+ {
+ fn eq(&self, other: &Self) -> bool {
+ match self {
+ $(
+ $($variant)::+ { .. } => {}
+ )*
+ }
+ #[allow(unreachable_patterns)]
+ match (self, other) {
+ $(
+ (
+ $($variant)::+ { $($field: $this,)* $($ignore:
_,)* },
+ $($variant)::+ { $($field: $other,)* $($ignore:
_,)* },
+ ) => {
+ true $(&& SpanlessEq::eq($this, $other))*
+ }
+ )*
+ _ => false,
+ }
+ }
+ }
+ };
+
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $($fields:tt)*])*
+ $next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt
$($field:tt)*)
+ $($rest:tt)*
+ } => {
+ spanless_eq_enum! {
+ $($name)::+;
+ $([$($variant)::+; $($fields)*])*
+ $next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
+ $($rest)*
+ }
+ };
+
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $($fields:tt)*])*
+ $next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt
$($field:tt)*)
+ $($rest:tt)*
+ } => {
+ spanless_eq_enum! {
+ $($name)::+;
+ $([$($variant)::+; $($fields)*])*
+ $next [$([$($named)*])* [$i this other] $(![$ignore])*]
($($field)*)
+ $($rest)*
+ }
+ };
+
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $($fields:tt)*])*
+ $next:ident [$($named:tt)*] ()
+ $($rest:tt)*
+ } => {
+ spanless_eq_enum! {
+ $($name)::+;
+ $([$($variant)::+; $($fields)*])*
+ [$($name)::+::$next; $($named)*]
+ $($rest)*
+ }
+ };
+
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $($fields:tt)*])*
+ $next:ident ($($field:tt)*)
+ $($rest:tt)*
+ } => {
+ spanless_eq_enum! {
+ $($name)::+;
+ $([$($variant)::+; $($fields)*])*
+ $next [] ($($field)*)
+ $($rest)*
+ }
+ };
+
+ {
+ $($name:ident)::+;
+ $([$($variant:ident)::+; $($fields:tt)*])*
+ $next:ident
+ $($rest:tt)*
+ } => {
+ spanless_eq_enum! {
+ $($name)::+;
+ $([$($variant)::+; $($fields)*])*
+ [$($name)::+::$next;]
+ $($rest)*
+ }
+ };
+}
+
+spanless_eq_struct!(AngleBracketedArgs; span args);
+spanless_eq_struct!(AnonConst; id value);
+spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
+spanless_eq_struct!(AttrItem; path args tokens);
+spanless_eq_struct!(AttrTokenStream; 0);
+spanless_eq_struct!(Attribute; kind id style span);
+spanless_eq_struct!(AttributesData; attrs tokens);
+spanless_eq_struct!(BareFnTy; safety ext generic_params decl decl_span);
+spanless_eq_struct!(BindingMode; 0 1);
+spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
+spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind
movability fn_decl body !fn_decl_span !fn_arg_span);
+spanless_eq_struct!(ConstItem; defaultness generics ty expr);
+spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
+spanless_eq_struct!(Delegation; id qself path rename body);
+spanless_eq_struct!(DelegationMac; qself prefix suffixes body);
+spanless_eq_struct!(DelimArgs; dspan delim tokens);
+spanless_eq_struct!(DelimSpacing; open close);
+spanless_eq_struct!(EnumDef; variants);
+spanless_eq_struct!(Expr; id kind span attrs !tokens);
+spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand
is_placeholder);
+spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
+spanless_eq_struct!(Fn; defaultness generics sig body);
+spanless_eq_struct!(FnDecl; inputs output);
+spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
+spanless_eq_struct!(FnSig; header decl span);
+spanless_eq_struct!(ForeignMod; safety abi items);
+spanless_eq_struct!(FormatArgPosition; index kind span);
+spanless_eq_struct!(FormatArgs; span template arguments);
+spanless_eq_struct!(FormatArgument; kind expr);
+spanless_eq_struct!(FormatOptions; width precision alignment fill sign
alternate zero_pad debug_hex);
+spanless_eq_struct!(FormatPlaceholder; argument span format_trait
format_options);
+spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind
!colon_span);
+spanless_eq_struct!(Generics; params where_clause span);
+spanless_eq_struct!(Impl; defaultness safety generics constness polarity
of_trait self_ty items);
+spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis
options line_spans);
+spanless_eq_struct!(InlineAsmSym; id qself path);
+spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+spanless_eq_struct!(Label; ident);
+spanless_eq_struct!(Lifetime; id ident);
+spanless_eq_struct!(Lit; kind symbol suffix);
+spanless_eq_struct!(Local; id pat ty kind span colon_sp attrs !tokens);
+spanless_eq_struct!(MacCall; path args);
+spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
+spanless_eq_struct!(MacroDef; body macro_rules);
+spanless_eq_struct!(MetaItemLit; symbol suffix kind span);
+spanless_eq_struct!(MethodCall; seg receiver args !span);
+spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
+spanless_eq_struct!(MutTy; ty mutbl);
+spanless_eq_struct!(NormalAttr; item tokens);
+spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
+spanless_eq_struct!(Pat; id kind span tokens);
+spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span
is_placeholder);
+spanless_eq_struct!(Path; span segments tokens);
+spanless_eq_struct!(PathSegment; ident id args);
+spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+spanless_eq_struct!(QSelf; ty path_span position);
+spanless_eq_struct!(StaticForeignItem; ty mutability expr);
+spanless_eq_struct!(StaticItem; ty mutability expr);
+spanless_eq_struct!(Stmt; id kind span);
+spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span);
+spanless_eq_struct!(StructExpr; qself path fields rest);
+spanless_eq_struct!(Token; kind span);
+spanless_eq_struct!(Trait; safety is_auto generics bounds items);
+spanless_eq_struct!(TraitBoundModifiers; constness asyncness polarity);
+spanless_eq_struct!(TraitRef; path ref_id);
+spanless_eq_struct!(Ty; id kind span tokens);
+spanless_eq_struct!(TyAlias; defaultness generics where_clauses bounds ty);
+spanless_eq_struct!(TyAliasWhereClause; !has_where_token span);
+spanless_eq_struct!(TyAliasWhereClauses; before after !split);
+spanless_eq_struct!(UseTree; prefix kind span);
+spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr
is_placeholder);
+spanless_eq_struct!(Visibility; kind span tokens);
+spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty
bounds);
+spanless_eq_struct!(WhereClause; has_where_token predicates span);
+spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
+spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
+spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
+spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0)
Delegation(0) DelegationMac(0));
+spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
+spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
+spanless_eq_enum!(AttrStyle; Outer Inner);
+spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) Attributes(0));
+spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr
Shl Shr Eq Lt Le Ne Ge Gt);
+spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
+spanless_eq_enum!(BorrowKind; Ref Raw);
+spanless_eq_enum!(BoundAsyncness; Normal Async(0));
+spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0));
+spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0));
+spanless_eq_enum!(ByRef; Yes(0) No);
+spanless_eq_enum!(CaptureBy; Value(move_kw) Ref);
+spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
+spanless_eq_enum!(Const; Yes(0) No);
+spanless_eq_enum!(Defaultness; Default(0) Final);
+spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
+spanless_eq_enum!(FloatTy; F16 F32 F64 F128);
+spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
+spanless_eq_enum!(ForLoopKind; For ForAwait);
+spanless_eq_enum!(ForeignItemKind; Static(0) Fn(0) TyAlias(0) MacCall(0));
+spanless_eq_enum!(FormatAlignment; Left Right Center);
+spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named);
+spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0));
+spanless_eq_enum!(FormatArgumentKind; Normal Named(0) Captured(0));
+spanless_eq_enum!(FormatCount; Literal(0) Argument(0));
+spanless_eq_enum!(FormatDebugHex; Lower Upper);
+spanless_eq_enum!(FormatSign; Plus Minus);
+spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer
Binary LowerHex UpperHex);
+spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen);
+spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span
default));
+spanless_eq_enum!(ImplPolarity; Positive Negative(0));
+spanless_eq_enum!(Inline; Yes No);
+spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
+spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx
modifier span));
+spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+spanless_eq_enum!(IsAuto; Yes No);
+spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
+spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
+spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+spanless_eq_enum!(MatchKind; Prefix Postfix);
+spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
+spanless_eq_enum!(Movability; Static Movable);
+spanless_eq_enum!(Mutability; Mut Not);
+spanless_eq_enum!(PatFieldsRest; Rest None);
+spanless_eq_enum!(PreciseCapturingArg; Lifetime(0) Arg(0 1));
+spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+spanless_eq_enum!(Recovered; No Yes(0));
+spanless_eq_enum!(Safety; Unsafe(0) Default);
+spanless_eq_enum!(StmtKind; Let(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+spanless_eq_enum!(StrStyle; Cooked Raw(0));
+spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
+spanless_eq_enum!(Term; Ty(0) Const(0));
+spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3));
+spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
+spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+spanless_eq_enum!(UnOp; Deref Not Neg);
+spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+spanless_eq_enum!(UseTreeKind; Simple(0) Nested(items span) Glob);
+spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0));
+spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand)
Inherited);
+spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0)
EqPredicate(0));
+spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id)
+ Gen(span closure_id return_impl_trait_id)
+ AsyncGen(span closure_id return_impl_trait_id));
+spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
+ Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
+ If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2)
+ Match(0 1 2) Closure(0) Block(0 1) Gen(0 1 2) Await(0 1) TryBlock(0)
+ Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore
+ Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
+ InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0)
+ Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) Err(0) Dummy);
+spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
+ InOut(reg late expr) SplitInOut(reg late in_expr out_expr)
Const(anon_const)
+ Sym(sym) Label(block));
+spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0)
+ Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
+ Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0)
+ Delegation(0) DelegationMac(0));
+spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0)
+ Int(0 1) Float(0 1) Bool(0) Err(0));
+spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
+ Or(0) Path(0 1) Tuple(0) Box(0) Deref(0) Ref(0 1) Lit(0) Range(0 1 2)
+ Slice(0) Rest Never Paren(0) MacCall(0) Err(0));
+spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never
+ Tup(0) AnonStruct(0 1) AnonUnion(0 1) Path(0 1) TraitObject(0 1)
+ ImplTrait(0 1 2) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs
+ Pat(0 1) Dummy Err(0));
+
+impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+
+impl SpanlessEq for RangeSyntax {
+ fn eq(&self, _other: &Self) -> bool {
+ match self {
+ RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true,
+ }
+ }
+}
+
+impl SpanlessEq for Param {
+ fn eq(&self, other: &Self) -> bool {
+ let Param {
+ attrs,
+ ty,
+ pat,
+ id,
+ span: _,
+ is_placeholder,
+ } = self;
+ let Param {
+ attrs: attrs2,
+ ty: ty2,
+ pat: pat2,
+ id: id2,
+ span: _,
+ is_placeholder: is_placeholder2,
+ } = other;
+ SpanlessEq::eq(id, id2)
+ && SpanlessEq::eq(is_placeholder, is_placeholder2)
+ && (matches!(ty.kind, TyKind::Err(_))
+ || matches!(ty2.kind, TyKind::Err(_))
+ || SpanlessEq::eq(attrs, attrs2)
+ && SpanlessEq::eq(ty, ty2)
+ && SpanlessEq::eq(pat, pat2))
+ }
+}
+
+impl SpanlessEq for TokenKind {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (TokenKind::Literal(this), TokenKind::Literal(other)) =>
SpanlessEq::eq(this, other),
+ (TokenKind::DotDotEq | TokenKind::DotDotDot, _) => match other {
+ TokenKind::DotDotEq | TokenKind::DotDotDot => true,
+ _ => false,
+ },
+ (TokenKind::Interpolated(this), TokenKind::Interpolated(other)) =>
{
+ let this = this.as_ref();
+ let other = other.as_ref();
+ match (this, other) {
+ (Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) =>
{
+ SpanlessEq::eq(this, other)
+ }
+ _ => this == other,
+ }
+ }
+ _ => self == other,
+ }
+ }
+}
+
+impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+ let mut this_trees = self.trees();
+ let mut other_trees = other.trees();
+ loop {
+ let this = match this_trees.next() {
+ None => return other_trees.next().is_none(),
+ Some(tree) => tree,
+ };
+ let other = match other_trees.next() {
+ None => return false,
+ Some(tree) => tree,
+ };
+ if SpanlessEq::eq(this, other) {
+ continue;
+ }
+ if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) =
(this, other) {
+ if match (&this.kind, &other.kind) {
+ (TokenKind::Literal(this), TokenKind::Literal(other)) => {
+ SpanlessEq::eq(this, other)
+ }
+ (TokenKind::DocComment(_kind, style, symbol),
TokenKind::Pound) => {
+ doc_comment(*style, *symbol, &mut other_trees)
+ }
+ (TokenKind::Pound, TokenKind::DocComment(_kind, style,
symbol)) => {
+ doc_comment(*style, *symbol, &mut this_trees)
+ }
+ _ => false,
+ } {
+ continue;
+ }
+ }
+ return false;
+ }
+ }
+}
+
+fn doc_comment<'a>(
+ style: AttrStyle,
+ unescaped: Symbol,
+ trees: &mut impl Iterator<Item = &'a TokenTree>,
+) -> bool {
+ if match style {
+ AttrStyle::Outer => false,
+ AttrStyle::Inner => true,
+ } {
+ match trees.next() {
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Not,
+ span: _,
+ },
+ _spacing,
+ )) => {}
+ _ => return false,
+ }
+ }
+ let stream = match trees.next() {
+ Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket,
stream)) => stream,
+ _ => return false,
+ };
+ let mut trees = stream.trees();
+ match trees.next() {
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(symbol, IdentIsRaw::No),
+ span: _,
+ },
+ _spacing,
+ )) if *symbol == sym::doc => {}
+ _ => return false,
+ }
+ match trees.next() {
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Eq,
+ span: _,
+ },
+ _spacing,
+ )) => {}
+ _ => return false,
+ }
+ match trees.next() {
+ Some(TokenTree::Token(token, _spacing)) => {
+ is_escaped_literal_token(token, unescaped) &&
trees.next().is_none()
+ }
+ _ => false,
+ }
+}
+
+fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
+ match token {
+ Token {
+ kind: TokenKind::Literal(lit),
+ span: _,
+ } => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
+ Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
+ Err(_) => false,
+ },
+ Token {
+ kind: TokenKind::Interpolated(nonterminal),
+ span: _,
+ } => match nonterminal.as_ref() {
+ Nonterminal::NtExpr(expr) => match &expr.kind {
+ ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
+ _ => false,
+ },
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool
{
+ match value {
+ AttrArgsEq::Ast(expr) => match &expr.kind {
+ ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
+ _ => false,
+ },
+ AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit,
unescaped),
+ }
+}
+
+fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) ->
bool {
+ match lit {
+ MetaItemLit {
+ symbol: _,
+ suffix: None,
+ kind,
+ span: _,
+ } => is_escaped_lit_kind(kind, unescaped),
+ _ => false,
+ }
+}
+
+fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
+ match lit {
+ Lit {
+ kind: token::LitKind::Str,
+ symbol: _,
+ suffix: None,
+ } => match LitKind::from_token_lit(*lit) {
+ Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
+ match kind {
+ LitKind::Str(symbol, StrStyle::Cooked) => {
+ symbol.as_str().replace('\r', "") ==
unescaped.as_str().replace('\r', "")
+ }
+ _ => false,
+ }
+}
+
+impl SpanlessEq for LazyAttrTokenStream {
+ fn eq(&self, other: &Self) -> bool {
+ let this = self.to_attr_token_stream();
+ let other = other.to_attr_token_stream();
+ SpanlessEq::eq(&this, &other)
+ }
+}
+
+impl SpanlessEq for AttrKind {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
+ SpanlessEq::eq(normal, normal2)
+ }
+ (AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2,
symbol2)) => {
+ SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
+ }
+ (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2))
=> {
+ match kind {
+ CommentKind::Line | CommentKind::Block => {}
+ }
+ let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
+ SpanlessEq::eq(&path, &normal2.item.path)
+ && match &normal2.item.args {
+ AttrArgs::Empty | AttrArgs::Delimited(_) => false,
+ AttrArgs::Eq(_span, value) => {
+ is_escaped_literal_attr_args(value, *unescaped)
+ }
+ }
+ }
+ (AttrKind::Normal(_), AttrKind::DocComment(..)) =>
SpanlessEq::eq(other, self),
+ }
+ }
+}
+
+impl SpanlessEq for FormatArguments {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(self.all_args(), other.all_args())
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/common/mod.rs
b/rust/hw/char/pl011/vendor/syn/tests/common/mod.rs
new file mode 100644
index 0000000000..f29d800c72
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/common/mod.rs
@@ -0,0 +1,28 @@
+#![allow(dead_code)]
+#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)]
+
+use rayon::ThreadPoolBuilder;
+use std::env;
+
+pub mod eq;
+pub mod parse;
+
+/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
+pub fn abort_after() -> usize {
+ match env::var("ABORT_AFTER_FAILURE") {
+ Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
+ Err(_) => usize::MAX,
+ }
+}
+
+/// Configure Rayon threadpool.
+pub fn rayon_init() {
+ let stack_size = match env::var("RUST_MIN_STACK") {
+ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
+ Err(_) => 1024 * 1024 * if cfg!(debug_assertions) { 40 } else { 20 },
+ };
+ ThreadPoolBuilder::new()
+ .stack_size(stack_size)
+ .build_global()
+ .unwrap();
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/common/parse.rs
b/rust/hw/char/pl011/vendor/syn/tests/common/parse.rs
new file mode 100644
index 0000000000..2d00a62f46
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/common/parse.rs
@@ -0,0 +1,49 @@
+extern crate rustc_ast;
+extern crate rustc_driver;
+extern crate rustc_expand;
+extern crate rustc_parse as parse;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use rustc_ast::ast;
+use rustc_ast::ptr::P;
+use rustc_session::parse::ParseSess;
+use rustc_span::FileName;
+use std::panic;
+
+pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
+ let sess = ParseSess::new(locale_resources);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+ input.to_string(),
+ )
+ .parse_expr();
+ match e {
+ Ok(expr) => Some(expr),
+ Err(diagnostic) => {
+ diagnostic.emit();
+ None
+ }
+ }
+ }) {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+ errorf!("librustc panicked\n");
+ None
+ }
+ }
+}
+
+pub fn syn_expr(input: &str) -> Option<syn::Expr> {
+ match syn::parse_str(input) {
+ Ok(e) => Some(e),
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+ None
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/debug/gen.rs
b/rust/hw/char/pl011/vendor/syn/tests/debug/gen.rs
new file mode 100644
index 0000000000..9f726683a2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/debug/gen.rs
@@ -0,0 +1,5163 @@
+// This file is @generated by syn-internal-codegen.
+// It is not intended for manual editing.
+
+#![allow(repr_transparent_external_private_fields)]
+#![allow(clippy::match_wildcard_for_single_variants)]
+use super::{Lite, Present};
+use ref_cast::RefCast;
+use std::fmt::{self, Debug, Display};
+impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Abi");
+ if let Some(val) = &self.value.name {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::LitStr);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("name", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::AngleBracketedGenericArguments> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter =
formatter.debug_struct("AngleBracketedGenericArguments");
+ if self.value.colon2_token.is_some() {
+ formatter.field("colon2_token", &Present);
+ }
+ if !self.value.args.is_empty() {
+ formatter.field("args", Lite(&self.value.args));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Arm> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Arm");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ if let Some(val) = &self.value.guard {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::If, Box<syn::Expr>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("guard", Print::ref_cast(val));
+ }
+ formatter.field("body", Lite(&self.value.body));
+ if self.value.comma.is_some() {
+ formatter.field("comma", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::AssocConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("AssocConst");
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.generics {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::AngleBracketedGenericArguments);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("generics", Print::ref_cast(val));
+ }
+ formatter.field("value", Lite(&self.value.value));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::AssocType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("AssocType");
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.generics {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::AngleBracketedGenericArguments);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("generics", Print::ref_cast(val));
+ }
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::AttrStyle> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::AttrStyle::Outer => formatter.write_str("AttrStyle::Outer"),
+ syn::AttrStyle::Inner(_val) => {
+ formatter.write_str("AttrStyle::Inner")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Attribute> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Attribute");
+ formatter.field("style", Lite(&self.value.style));
+ formatter.field("meta", Lite(&self.value.meta));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::BareFnArg> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BareFnArg");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.name {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((proc_macro2::Ident, syn::token::Colon));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("name", Print::ref_cast(val));
+ }
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::BareVariadic> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BareVariadic");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.name {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((proc_macro2::Ident, syn::token::Colon));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("name", Print::ref_cast(val));
+ }
+ if self.value.comma.is_some() {
+ formatter.field("comma", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::BinOp> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::BinOp::Add(_val) => {
+ formatter.write_str("BinOp::Add")?;
+ Ok(())
+ }
+ syn::BinOp::Sub(_val) => {
+ formatter.write_str("BinOp::Sub")?;
+ Ok(())
+ }
+ syn::BinOp::Mul(_val) => {
+ formatter.write_str("BinOp::Mul")?;
+ Ok(())
+ }
+ syn::BinOp::Div(_val) => {
+ formatter.write_str("BinOp::Div")?;
+ Ok(())
+ }
+ syn::BinOp::Rem(_val) => {
+ formatter.write_str("BinOp::Rem")?;
+ Ok(())
+ }
+ syn::BinOp::And(_val) => {
+ formatter.write_str("BinOp::And")?;
+ Ok(())
+ }
+ syn::BinOp::Or(_val) => {
+ formatter.write_str("BinOp::Or")?;
+ Ok(())
+ }
+ syn::BinOp::BitXor(_val) => {
+ formatter.write_str("BinOp::BitXor")?;
+ Ok(())
+ }
+ syn::BinOp::BitAnd(_val) => {
+ formatter.write_str("BinOp::BitAnd")?;
+ Ok(())
+ }
+ syn::BinOp::BitOr(_val) => {
+ formatter.write_str("BinOp::BitOr")?;
+ Ok(())
+ }
+ syn::BinOp::Shl(_val) => {
+ formatter.write_str("BinOp::Shl")?;
+ Ok(())
+ }
+ syn::BinOp::Shr(_val) => {
+ formatter.write_str("BinOp::Shr")?;
+ Ok(())
+ }
+ syn::BinOp::Eq(_val) => {
+ formatter.write_str("BinOp::Eq")?;
+ Ok(())
+ }
+ syn::BinOp::Lt(_val) => {
+ formatter.write_str("BinOp::Lt")?;
+ Ok(())
+ }
+ syn::BinOp::Le(_val) => {
+ formatter.write_str("BinOp::Le")?;
+ Ok(())
+ }
+ syn::BinOp::Ne(_val) => {
+ formatter.write_str("BinOp::Ne")?;
+ Ok(())
+ }
+ syn::BinOp::Ge(_val) => {
+ formatter.write_str("BinOp::Ge")?;
+ Ok(())
+ }
+ syn::BinOp::Gt(_val) => {
+ formatter.write_str("BinOp::Gt")?;
+ Ok(())
+ }
+ syn::BinOp::AddAssign(_val) => {
+ formatter.write_str("BinOp::AddAssign")?;
+ Ok(())
+ }
+ syn::BinOp::SubAssign(_val) => {
+ formatter.write_str("BinOp::SubAssign")?;
+ Ok(())
+ }
+ syn::BinOp::MulAssign(_val) => {
+ formatter.write_str("BinOp::MulAssign")?;
+ Ok(())
+ }
+ syn::BinOp::DivAssign(_val) => {
+ formatter.write_str("BinOp::DivAssign")?;
+ Ok(())
+ }
+ syn::BinOp::RemAssign(_val) => {
+ formatter.write_str("BinOp::RemAssign")?;
+ Ok(())
+ }
+ syn::BinOp::BitXorAssign(_val) => {
+ formatter.write_str("BinOp::BitXorAssign")?;
+ Ok(())
+ }
+ syn::BinOp::BitAndAssign(_val) => {
+ formatter.write_str("BinOp::BitAndAssign")?;
+ Ok(())
+ }
+ syn::BinOp::BitOrAssign(_val) => {
+ formatter.write_str("BinOp::BitOrAssign")?;
+ Ok(())
+ }
+ syn::BinOp::ShlAssign(_val) => {
+ formatter.write_str("BinOp::ShlAssign")?;
+ Ok(())
+ }
+ syn::BinOp::ShrAssign(_val) => {
+ formatter.write_str("BinOp::ShrAssign")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::Block> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Block");
+ formatter.field("stmts", Lite(&self.value.stmts));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::BoundLifetimes> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("BoundLifetimes");
+ if !self.value.lifetimes.is_empty() {
+ formatter.field("lifetimes", Lite(&self.value.lifetimes));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ConstParam> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ConstParam");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("ty", Lite(&self.value.ty));
+ if self.value.eq_token.is_some() {
+ formatter.field("eq_token", &Present);
+ }
+ if let Some(val) = &self.value.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Expr);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Constraint> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Constraint");
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.generics {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::AngleBracketedGenericArguments);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("generics", Print::ref_cast(val));
+ }
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Data> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Data::Struct(_val) => {
+ let mut formatter = formatter.debug_struct("Data::Struct");
+ formatter.field("fields", Lite(&_val.fields));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::Data::Enum(_val) => {
+ let mut formatter = formatter.debug_struct("Data::Enum");
+ if !_val.variants.is_empty() {
+ formatter.field("variants", Lite(&_val.variants));
+ }
+ formatter.finish()
+ }
+ syn::Data::Union(_val) => {
+ let mut formatter = formatter.debug_struct("Data::Union");
+ formatter.field("fields", Lite(&_val.fields));
+ formatter.finish()
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::DataEnum> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("DataEnum");
+ if !self.value.variants.is_empty() {
+ formatter.field("variants", Lite(&self.value.variants));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::DataStruct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("DataStruct");
+ formatter.field("fields", Lite(&self.value.fields));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::DataUnion> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("DataUnion");
+ formatter.field("fields", Lite(&self.value.fields));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::DeriveInput> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("DeriveInput");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("data", Lite(&self.value.data));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Expr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Expr::Array(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Array");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Assign(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Assign");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("left", Lite(&_val.left));
+ formatter.field("right", Lite(&_val.right));
+ formatter.finish()
+ }
+ syn::Expr::Async(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Async");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.capture.is_some() {
+ formatter.field("capture", &Present);
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Await(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Await");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("base", Lite(&_val.base));
+ formatter.finish()
+ }
+ syn::Expr::Binary(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Binary");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("left", Lite(&_val.left));
+ formatter.field("op", Lite(&_val.op));
+ formatter.field("right", Lite(&_val.right));
+ formatter.finish()
+ }
+ syn::Expr::Block(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Block");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Break(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Break");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ if let Some(val) = &_val.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Call(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Call");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("func", Lite(&_val.func));
+ if !_val.args.is_empty() {
+ formatter.field("args", Lite(&_val.args));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Cast(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Cast");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::Expr::Closure(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Closure");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ if _val.constness.is_some() {
+ formatter.field("constness", &Present);
+ }
+ if _val.movability.is_some() {
+ formatter.field("movability", &Present);
+ }
+ if _val.asyncness.is_some() {
+ formatter.field("asyncness", &Present);
+ }
+ if _val.capture.is_some() {
+ formatter.field("capture", &Present);
+ }
+ if !_val.inputs.is_empty() {
+ formatter.field("inputs", Lite(&_val.inputs));
+ }
+ formatter.field("output", Lite(&_val.output));
+ formatter.field("body", Lite(&_val.body));
+ formatter.finish()
+ }
+ syn::Expr::Const(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Const");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Continue(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Continue");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Field(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Field");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("base", Lite(&_val.base));
+ formatter.field("member", Lite(&_val.member));
+ formatter.finish()
+ }
+ syn::Expr::ForLoop(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::ForLoop");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.field("body", Lite(&_val.body));
+ formatter.finish()
+ }
+ syn::Expr::Group(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Group");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::If(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::If");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("cond", Lite(&_val.cond));
+ formatter.field("then_branch", Lite(&_val.then_branch));
+ if let Some(val) = &_val.else_branch {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Else, Box<syn::Expr>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("else_branch", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Index(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Index");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.field("index", Lite(&_val.index));
+ formatter.finish()
+ }
+ syn::Expr::Infer(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Infer");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Let(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Let");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::Lit(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Lit");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("lit", Lite(&_val.lit));
+ formatter.finish()
+ }
+ syn::Expr::Loop(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Loop");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("body", Lite(&_val.body));
+ formatter.finish()
+ }
+ syn::Expr::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ formatter.finish()
+ }
+ syn::Expr::Match(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Match");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ if !_val.arms.is_empty() {
+ formatter.field("arms", Lite(&_val.arms));
+ }
+ formatter.finish()
+ }
+ syn::Expr::MethodCall(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::MethodCall");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("receiver", Lite(&_val.receiver));
+ formatter.field("method", Lite(&_val.method));
+ if let Some(val) = &_val.turbofish {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::AngleBracketedGenericArguments);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("turbofish", Print::ref_cast(val));
+ }
+ if !_val.args.is_empty() {
+ formatter.field("args", Lite(&_val.args));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Paren(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Paren");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::Path(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Path");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&_val.path));
+ formatter.finish()
+ }
+ syn::Expr::Range(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Range");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.start {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("start", Print::ref_cast(val));
+ }
+ formatter.field("limits", Lite(&_val.limits));
+ if let Some(val) = &_val.end {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("end", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Reference(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Reference");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::Repeat(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Repeat");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.field("len", Lite(&_val.len));
+ formatter.finish()
+ }
+ syn::Expr::Return(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Return");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Struct(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Struct");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&_val.path));
+ if !_val.fields.is_empty() {
+ formatter.field("fields", Lite(&_val.fields));
+ }
+ if _val.dot2_token.is_some() {
+ formatter.field("dot2_token", &Present);
+ }
+ if let Some(val) = &_val.rest {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rest", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Try(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Try");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::TryBlock(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::TryBlock");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Tuple(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Tuple");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Expr::Unary(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Unary");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("op", Lite(&_val.op));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Expr::Unsafe(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Unsafe");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Expr::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::While");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("cond", Lite(&_val.cond));
+ formatter.field("body", Lite(&_val.body));
+ formatter.finish()
+ }
+ syn::Expr::Yield(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::Yield");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::ExprArray> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprArray");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprAssign> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprAssign");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("left", Lite(&self.value.left));
+ formatter.field("right", Lite(&self.value.right));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprAsync> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprAsync");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.capture.is_some() {
+ formatter.field("capture", &Present);
+ }
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprAwait> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprAwait");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("base", Lite(&self.value.base));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprBinary> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprBinary");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("left", Lite(&self.value.left));
+ formatter.field("op", Lite(&self.value.op));
+ formatter.field("right", Lite(&self.value.right));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprBlock> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprBlock");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprBreak> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprBreak");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ if let Some(val) = &self.value.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprCall> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprCall");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("func", Lite(&self.value.func));
+ if !self.value.args.is_empty() {
+ formatter.field("args", Lite(&self.value.args));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprCast> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprCast");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprClosure> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprClosure");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ if self.value.constness.is_some() {
+ formatter.field("constness", &Present);
+ }
+ if self.value.movability.is_some() {
+ formatter.field("movability", &Present);
+ }
+ if self.value.asyncness.is_some() {
+ formatter.field("asyncness", &Present);
+ }
+ if self.value.capture.is_some() {
+ formatter.field("capture", &Present);
+ }
+ if !self.value.inputs.is_empty() {
+ formatter.field("inputs", Lite(&self.value.inputs));
+ }
+ formatter.field("output", Lite(&self.value.output));
+ formatter.field("body", Lite(&self.value.body));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprConst");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprContinue> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprContinue");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprField> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprField");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("base", Lite(&self.value.base));
+ formatter.field("member", Lite(&self.value.member));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprForLoop> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprForLoop");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.field("body", Lite(&self.value.body));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprGroup> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprGroup");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprIf> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprIf");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("cond", Lite(&self.value.cond));
+ formatter.field("then_branch", Lite(&self.value.then_branch));
+ if let Some(val) = &self.value.else_branch {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Else, Box<syn::Expr>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("else_branch", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprIndex> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprIndex");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.field("index", Lite(&self.value.index));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprInfer> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprInfer");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprLet> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprLet");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprLit> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprLit");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("lit", Lite(&self.value.lit));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprLoop> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprLoop");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("body", Lite(&self.value.body));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprMatch> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprMatch");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ if !self.value.arms.is_empty() {
+ formatter.field("arms", Lite(&self.value.arms));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprMethodCall> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprMethodCall");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("receiver", Lite(&self.value.receiver));
+ formatter.field("method", Lite(&self.value.method));
+ if let Some(val) = &self.value.turbofish {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::AngleBracketedGenericArguments);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("turbofish", Print::ref_cast(val));
+ }
+ if !self.value.args.is_empty() {
+ formatter.field("args", Lite(&self.value.args));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprParen> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprParen");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprPath> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprPath");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprRange> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprRange");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.start {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("start", Print::ref_cast(val));
+ }
+ formatter.field("limits", Lite(&self.value.limits));
+ if let Some(val) = &self.value.end {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("end", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprReference> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprReference");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprRepeat> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprRepeat");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.field("len", Lite(&self.value.len));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprReturn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprReturn");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprStruct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprStruct");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ if !self.value.fields.is_empty() {
+ formatter.field("fields", Lite(&self.value.fields));
+ }
+ if self.value.dot2_token.is_some() {
+ formatter.field("dot2_token", &Present);
+ }
+ if let Some(val) = &self.value.rest {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rest", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprTry> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprTry");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprTryBlock> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprTryBlock");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprTuple> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprTuple");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprUnary> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprUnary");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("op", Lite(&self.value.op));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprUnsafe> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprUnsafe");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprWhile> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprWhile");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.label {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Label);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("label", Print::ref_cast(val));
+ }
+ formatter.field("cond", Lite(&self.value.cond));
+ formatter.field("body", Lite(&self.value.body));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ExprYield> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ExprYield");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.expr {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Box<syn::Expr>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("expr", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Field> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Field");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ match self.value.mutability {
+ syn::FieldMutability::None => {}
+ _ => {
+ formatter.field("mutability", Lite(&self.value.mutability));
+ }
+ }
+ if let Some(val) = &self.value.ident {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(proc_macro2::Ident);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("ident", Print::ref_cast(val));
+ }
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::FieldMutability> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::FieldMutability::None =>
formatter.write_str("FieldMutability::None"),
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::FieldPat> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldPat");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("member", Lite(&self.value.member));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::FieldValue> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldValue");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("member", Lite(&self.value.member));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Fields> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Fields::Named(_val) => {
+ let mut formatter = formatter.debug_struct("Fields::Named");
+ if !_val.named.is_empty() {
+ formatter.field("named", Lite(&_val.named));
+ }
+ formatter.finish()
+ }
+ syn::Fields::Unnamed(_val) => {
+ let mut formatter = formatter.debug_struct("Fields::Unnamed");
+ if !_val.unnamed.is_empty() {
+ formatter.field("unnamed", Lite(&_val.unnamed));
+ }
+ formatter.finish()
+ }
+ syn::Fields::Unit => formatter.write_str("Fields::Unit"),
+ }
+ }
+}
+impl Debug for Lite<syn::FieldsNamed> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldsNamed");
+ if !self.value.named.is_empty() {
+ formatter.field("named", Lite(&self.value.named));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::FieldsUnnamed> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("FieldsUnnamed");
+ if !self.value.unnamed.is_empty() {
+ formatter.field("unnamed", Lite(&self.value.unnamed));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::File> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("File");
+ if let Some(val) = &self.value.shebang {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(String);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("shebang", Print::ref_cast(val));
+ }
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if !self.value.items.is_empty() {
+ formatter.field("items", Lite(&self.value.items));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::FnArg> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::FnArg::Receiver(_val) => {
+ formatter.write_str("FnArg::Receiver")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::FnArg::Typed(_val) => {
+ formatter.write_str("FnArg::Typed")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::ForeignItem> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::ForeignItem::Fn(_val) => {
+ let mut formatter = formatter.debug_struct("ForeignItem::Fn");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("sig", Lite(&_val.sig));
+ formatter.finish()
+ }
+ syn::ForeignItem::Static(_val) => {
+ let mut formatter =
formatter.debug_struct("ForeignItem::Static");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ match _val.mutability {
+ syn::StaticMutability::None => {}
+ _ => {
+ formatter.field("mutability", Lite(&_val.mutability));
+ }
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::ForeignItem::Type(_val) => {
+ let mut formatter =
formatter.debug_struct("ForeignItem::Type");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.finish()
+ }
+ syn::ForeignItem::Macro(_val) => {
+ let mut formatter =
formatter.debug_struct("ForeignItem::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("ForeignItem::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::ForeignItemFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ForeignItemFn");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("sig", Lite(&self.value.sig));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ForeignItemMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ForeignItemMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ForeignItemStatic> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ForeignItemStatic");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ match self.value.mutability {
+ syn::StaticMutability::None => {}
+ _ => {
+ formatter.field("mutability", Lite(&self.value.mutability));
+ }
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ForeignItemType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ForeignItemType");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::GenericArgument> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::GenericArgument::Lifetime(_val) => {
+ formatter.write_str("GenericArgument::Lifetime")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericArgument::Type(_val) => {
+ formatter.write_str("GenericArgument::Type")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericArgument::Const(_val) => {
+ formatter.write_str("GenericArgument::Const")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericArgument::AssocType(_val) => {
+ formatter.write_str("GenericArgument::AssocType")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericArgument::AssocConst(_val) => {
+ formatter.write_str("GenericArgument::AssocConst")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericArgument::Constraint(_val) => {
+ formatter.write_str("GenericArgument::Constraint")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::GenericParam> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::GenericParam::Lifetime(_val) => {
+ formatter.write_str("GenericParam::Lifetime")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericParam::Type(_val) => {
+ formatter.write_str("GenericParam::Type")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::GenericParam::Const(_val) => {
+ formatter.write_str("GenericParam::Const")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Generics> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Generics");
+ if self.value.lt_token.is_some() {
+ formatter.field("lt_token", &Present);
+ }
+ if !self.value.params.is_empty() {
+ formatter.field("params", Lite(&self.value.params));
+ }
+ if self.value.gt_token.is_some() {
+ formatter.field("gt_token", &Present);
+ }
+ if let Some(val) = &self.value.where_clause {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::WhereClause);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("where_clause", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ImplItem> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::ImplItem::Const(_val) => {
+ let mut formatter = formatter.debug_struct("ImplItem::Const");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::ImplItem::Fn(_val) => {
+ let mut formatter = formatter.debug_struct("ImplItem::Fn");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("sig", Lite(&_val.sig));
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::ImplItem::Type(_val) => {
+ let mut formatter = formatter.debug_struct("ImplItem::Type");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::ImplItem::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("ImplItem::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("ImplItem::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::ImplItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ImplItemConst");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ImplItemFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ImplItemFn");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("sig", Lite(&self.value.sig));
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ImplItemMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ImplItemMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ImplItemType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ImplItemType");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ImplRestriction> {
+ fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
+ unreachable!()
+ }
+}
+impl Debug for Lite<syn::Index> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Index");
+ formatter.field("index", Lite(&self.value.index));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Item> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Item::Const(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Const");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Item::Enum(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Enum");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ if !_val.variants.is_empty() {
+ formatter.field("variants", Lite(&_val.variants));
+ }
+ formatter.finish()
+ }
+ syn::Item::ExternCrate(_val) => {
+ let mut formatter =
formatter.debug_struct("Item::ExternCrate");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ if let Some(val) = &_val.rename {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::As, proc_macro2::Ident));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rename", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Item::Fn(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Fn");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("sig", Lite(&_val.sig));
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Item::ForeignMod(_val) => {
+ let mut formatter = formatter.debug_struct("Item::ForeignMod");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("abi", Lite(&_val.abi));
+ if !_val.items.is_empty() {
+ formatter.field("items", Lite(&_val.items));
+ }
+ formatter.finish()
+ }
+ syn::Item::Impl(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Impl");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ if _val.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("generics", Lite(&_val.generics));
+ if let Some(val) = &_val.trait_ {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((Option<syn::token::Not>, syn::Path,
syn::token::For));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(
+ &(
+ &super::Option {
+ present: self.0.0.is_some(),
+ },
+ Lite(&self.0.1),
+ ),
+ formatter,
+ )?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("trait_", Print::ref_cast(val));
+ }
+ formatter.field("self_ty", Lite(&_val.self_ty));
+ if !_val.items.is_empty() {
+ formatter.field("items", Lite(&_val.items));
+ }
+ formatter.finish()
+ }
+ syn::Item::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.ident {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(proc_macro2::Ident);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("ident", Print::ref_cast(val));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::Item::Mod(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Mod");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ if let Some(val) = &_val.content {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Brace, Vec<syn::Item>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("content", Print::ref_cast(val));
+ }
+ if _val.semi.is_some() {
+ formatter.field("semi", &Present);
+ }
+ formatter.finish()
+ }
+ syn::Item::Static(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Static");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ match _val.mutability {
+ syn::StaticMutability::None => {}
+ _ => {
+ formatter.field("mutability", Lite(&_val.mutability));
+ }
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.field("expr", Lite(&_val.expr));
+ formatter.finish()
+ }
+ syn::Item::Struct(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Struct");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("fields", Lite(&_val.fields));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::Item::Trait(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Trait");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ if _val.auto_token.is_some() {
+ formatter.field("auto_token", &Present);
+ }
+ if let Some(val) = &_val.restriction {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::ImplRestriction);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("restriction", Print::ref_cast(val));
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ if _val.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !_val.supertraits.is_empty() {
+ formatter.field("supertraits", Lite(&_val.supertraits));
+ }
+ if !_val.items.is_empty() {
+ formatter.field("items", Lite(&_val.items));
+ }
+ formatter.finish()
+ }
+ syn::Item::TraitAlias(_val) => {
+ let mut formatter = formatter.debug_struct("Item::TraitAlias");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ if !_val.bounds.is_empty() {
+ formatter.field("bounds", Lite(&_val.bounds));
+ }
+ formatter.finish()
+ }
+ syn::Item::Type(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Type");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::Item::Union(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Union");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("fields", Lite(&_val.fields));
+ formatter.finish()
+ }
+ syn::Item::Use(_val) => {
+ let mut formatter = formatter.debug_struct("Item::Use");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("vis", Lite(&_val.vis));
+ if _val.leading_colon.is_some() {
+ formatter.field("leading_colon", &Present);
+ }
+ formatter.field("tree", Lite(&_val.tree));
+ formatter.finish()
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Item::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::ItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemConst");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemEnum> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemEnum");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ if !self.value.variants.is_empty() {
+ formatter.field("variants", Lite(&self.value.variants));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemExternCrate> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemExternCrate");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.rename {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::As, proc_macro2::Ident));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rename", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemFn");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("sig", Lite(&self.value.sig));
+ formatter.field("block", Lite(&self.value.block));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemForeignMod> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemForeignMod");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("abi", Lite(&self.value.abi));
+ if !self.value.items.is_empty() {
+ formatter.field("items", Lite(&self.value.items));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemImpl> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemImpl");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.defaultness.is_some() {
+ formatter.field("defaultness", &Present);
+ }
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("generics", Lite(&self.value.generics));
+ if let Some(val) = &self.value.trait_ {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((Option<syn::token::Not>, syn::Path,
syn::token::For));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(
+ &(
+ &super::Option {
+ present: self.0.0.is_some(),
+ },
+ Lite(&self.0.1),
+ ),
+ formatter,
+ )?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("trait_", Print::ref_cast(val));
+ }
+ formatter.field("self_ty", Lite(&self.value.self_ty));
+ if !self.value.items.is_empty() {
+ formatter.field("items", Lite(&self.value.items));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.ident {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(proc_macro2::Ident);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("ident", Print::ref_cast(val));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemMod> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemMod");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.content {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Brace, Vec<syn::Item>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("content", Print::ref_cast(val));
+ }
+ if self.value.semi.is_some() {
+ formatter.field("semi", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemStatic> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemStatic");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ match self.value.mutability {
+ syn::StaticMutability::None => {}
+ _ => {
+ formatter.field("mutability", Lite(&self.value.mutability));
+ }
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.field("expr", Lite(&self.value.expr));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemStruct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemStruct");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("fields", Lite(&self.value.fields));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemTrait> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemTrait");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ if self.value.auto_token.is_some() {
+ formatter.field("auto_token", &Present);
+ }
+ if let Some(val) = &self.value.restriction {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::ImplRestriction);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("restriction", Print::ref_cast(val));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !self.value.supertraits.is_empty() {
+ formatter.field("supertraits", Lite(&self.value.supertraits));
+ }
+ if !self.value.items.is_empty() {
+ formatter.field("items", Lite(&self.value.items));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemTraitAlias> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemTraitAlias");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemType");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemUnion> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemUnion");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("fields", Lite(&self.value.fields));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ItemUse> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("ItemUse");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("vis", Lite(&self.value.vis));
+ if self.value.leading_colon.is_some() {
+ formatter.field("leading_colon", &Present);
+ }
+ formatter.field("tree", Lite(&self.value.tree));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Label> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Label");
+ formatter.field("name", Lite(&self.value.name));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Lifetime> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Lifetime");
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::LifetimeParam> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("LifetimeParam");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("lifetime", Lite(&self.value.lifetime));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Lit> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Lit::Str(_val) => write!(formatter, "{:?}", _val.value()),
+ syn::Lit::ByteStr(_val) => write!(formatter, "{:?}", _val.value()),
+ syn::Lit::CStr(_val) => write!(formatter, "{:?}", _val.value()),
+ syn::Lit::Byte(_val) => write!(formatter, "{:?}", _val.value()),
+ syn::Lit::Char(_val) => write!(formatter, "{:?}", _val.value()),
+ syn::Lit::Int(_val) => write!(formatter, "{}", _val),
+ syn::Lit::Float(_val) => write!(formatter, "{}", _val),
+ syn::Lit::Bool(_val) => {
+ let mut formatter = formatter.debug_struct("Lit::Bool");
+ formatter.field("value", Lite(&_val.value));
+ formatter.finish()
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Lit::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::LitBool> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("LitBool");
+ formatter.field("value", Lite(&self.value.value));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::LitByte> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.value())
+ }
+}
+impl Debug for Lite<syn::LitByteStr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.value())
+ }
+}
+impl Debug for Lite<syn::LitCStr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.value())
+ }
+}
+impl Debug for Lite<syn::LitChar> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.value())
+ }
+}
+impl Debug for Lite<syn::LitFloat> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", & self.value)
+ }
+}
+impl Debug for Lite<syn::LitInt> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", & self.value)
+ }
+}
+impl Debug for Lite<syn::LitStr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.value())
+ }
+}
+impl Debug for Lite<syn::Local> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Local");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ if let Some(val) = &self.value.init {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::LocalInit);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("init", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::LocalInit> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("LocalInit");
+ formatter.field("expr", Lite(&self.value.expr));
+ if let Some(val) = &self.value.diverge {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Else, Box<syn::Expr>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("diverge", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Macro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Macro");
+ formatter.field("path", Lite(&self.value.path));
+ formatter.field("delimiter", Lite(&self.value.delimiter));
+ formatter.field("tokens", Lite(&self.value.tokens));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::MacroDelimiter> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::MacroDelimiter::Paren(_val) => {
+ formatter.write_str("MacroDelimiter::Paren")?;
+ Ok(())
+ }
+ syn::MacroDelimiter::Brace(_val) => {
+ formatter.write_str("MacroDelimiter::Brace")?;
+ Ok(())
+ }
+ syn::MacroDelimiter::Bracket(_val) => {
+ formatter.write_str("MacroDelimiter::Bracket")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Member> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Member::Named(_val) => {
+ formatter.write_str("Member::Named")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Member::Unnamed(_val) => {
+ formatter.write_str("Member::Unnamed")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Meta> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Meta::Path(_val) => {
+ let mut formatter = formatter.debug_struct("Meta::Path");
+ if _val.leading_colon.is_some() {
+ formatter.field("leading_colon", &Present);
+ }
+ if !_val.segments.is_empty() {
+ formatter.field("segments", Lite(&_val.segments));
+ }
+ formatter.finish()
+ }
+ syn::Meta::List(_val) => {
+ let mut formatter = formatter.debug_struct("Meta::List");
+ formatter.field("path", Lite(&_val.path));
+ formatter.field("delimiter", Lite(&_val.delimiter));
+ formatter.field("tokens", Lite(&_val.tokens));
+ formatter.finish()
+ }
+ syn::Meta::NameValue(_val) => {
+ let mut formatter = formatter.debug_struct("Meta::NameValue");
+ formatter.field("path", Lite(&_val.path));
+ formatter.field("value", Lite(&_val.value));
+ formatter.finish()
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::MetaList> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("MetaList");
+ formatter.field("path", Lite(&self.value.path));
+ formatter.field("delimiter", Lite(&self.value.delimiter));
+ formatter.field("tokens", Lite(&self.value.tokens));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::MetaNameValue> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("MetaNameValue");
+ formatter.field("path", Lite(&self.value.path));
+ formatter.field("value", Lite(&self.value.value));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ParenthesizedGenericArguments> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter =
formatter.debug_struct("ParenthesizedGenericArguments");
+ if !self.value.inputs.is_empty() {
+ formatter.field("inputs", Lite(&self.value.inputs));
+ }
+ formatter.field("output", Lite(&self.value.output));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Pat> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Pat::Const(_val) => {
+ formatter.write_str("Pat::Const")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Pat::Ident(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Ident");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.by_ref.is_some() {
+ formatter.field("by_ref", &Present);
+ }
+ if _val.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ if let Some(val) = &_val.subpat {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::At, Box<syn::Pat>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("subpat", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Lit(_val) => {
+ formatter.write_str("Pat::Lit")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Pat::Macro(_val) => {
+ formatter.write_str("Pat::Macro")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Pat::Or(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Or");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.leading_vert.is_some() {
+ formatter.field("leading_vert", &Present);
+ }
+ if !_val.cases.is_empty() {
+ formatter.field("cases", Lite(&_val.cases));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Paren(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Paren");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.finish()
+ }
+ syn::Pat::Path(_val) => {
+ formatter.write_str("Pat::Path")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Pat::Range(_val) => {
+ formatter.write_str("Pat::Range")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Pat::Reference(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Reference");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if _val.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.finish()
+ }
+ syn::Pat::Rest(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Rest");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Slice(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Slice");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Struct(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Struct");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&_val.path));
+ if !_val.fields.is_empty() {
+ formatter.field("fields", Lite(&_val.fields));
+ }
+ if let Some(val) = &_val.rest {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::PatRest);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rest", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Tuple(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Tuple");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Pat::TupleStruct(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::TupleStruct");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&_val.path));
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Pat::Type(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Type");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Pat::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Wild");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.finish()
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::PatIdent> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatIdent");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.by_ref.is_some() {
+ formatter.field("by_ref", &Present);
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ if let Some(val) = &self.value.subpat {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::At, Box<syn::Pat>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("subpat", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatOr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatOr");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.leading_vert.is_some() {
+ formatter.field("leading_vert", &Present);
+ }
+ if !self.value.cases.is_empty() {
+ formatter.field("cases", Lite(&self.value.cases));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatParen> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatParen");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatReference> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatReference");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatRest> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatRest");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatSlice> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatSlice");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatStruct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatStruct");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ if !self.value.fields.is_empty() {
+ formatter.field("fields", Lite(&self.value.fields));
+ }
+ if let Some(val) = &self.value.rest {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::PatRest);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("rest", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatTuple> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatTuple");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatTupleStruct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatTupleStruct");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatType");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("pat", Lite(&self.value.pat));
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PatWild> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PatWild");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Path> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Path");
+ if self.value.leading_colon.is_some() {
+ formatter.field("leading_colon", &Present);
+ }
+ if !self.value.segments.is_empty() {
+ formatter.field("segments", Lite(&self.value.segments));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PathArguments> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::PathArguments::None =>
formatter.write_str("PathArguments::None"),
+ syn::PathArguments::AngleBracketed(_val) => {
+ let mut formatter = formatter
+ .debug_struct("PathArguments::AngleBracketed");
+ if _val.colon2_token.is_some() {
+ formatter.field("colon2_token", &Present);
+ }
+ if !_val.args.is_empty() {
+ formatter.field("args", Lite(&_val.args));
+ }
+ formatter.finish()
+ }
+ syn::PathArguments::Parenthesized(_val) => {
+ let mut formatter = formatter
+ .debug_struct("PathArguments::Parenthesized");
+ if !_val.inputs.is_empty() {
+ formatter.field("inputs", Lite(&_val.inputs));
+ }
+ formatter.field("output", Lite(&_val.output));
+ formatter.finish()
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::PathSegment> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PathSegment");
+ formatter.field("ident", Lite(&self.value.ident));
+ match self.value.arguments {
+ syn::PathArguments::None => {}
+ _ => {
+ formatter.field("arguments", Lite(&self.value.arguments));
+ }
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PredicateLifetime> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PredicateLifetime");
+ formatter.field("lifetime", Lite(&self.value.lifetime));
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::PredicateType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("PredicateType");
+ if let Some(val) = &self.value.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ formatter.field("bounded_ty", Lite(&self.value.bounded_ty));
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::QSelf> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("QSelf");
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.field("position", Lite(&self.value.position));
+ if self.value.as_token.is_some() {
+ formatter.field("as_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::RangeLimits> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::RangeLimits::HalfOpen(_val) => {
+ formatter.write_str("RangeLimits::HalfOpen")?;
+ Ok(())
+ }
+ syn::RangeLimits::Closed(_val) => {
+ formatter.write_str("RangeLimits::Closed")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Receiver> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Receiver");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.reference {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::And, Option<syn::Lifetime>));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(
+ {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Option<syn::Lifetime>);
+ impl Debug for Print {
+ fn fmt(
+ &self,
+ formatter: &mut fmt::Formatter,
+ ) -> fmt::Result {
+ match &self.0 {
+ Some(_val) => {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ None => formatter.write_str("None"),
+ }
+ }
+ }
+ Print::ref_cast(&self.0.1)
+ },
+ formatter,
+ )?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("reference", Print::ref_cast(val));
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ formatter.field("ty", Lite(&self.value.ty));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::ReturnType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::ReturnType::Default =>
formatter.write_str("ReturnType::Default"),
+ syn::ReturnType::Type(_v0, _v1) => {
+ let mut formatter = formatter.debug_tuple("ReturnType::Type");
+ formatter.field(Lite(_v1));
+ formatter.finish()
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Signature> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Signature");
+ if self.value.constness.is_some() {
+ formatter.field("constness", &Present);
+ }
+ if self.value.asyncness.is_some() {
+ formatter.field("asyncness", &Present);
+ }
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ if let Some(val) = &self.value.abi {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Abi);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("abi", Print::ref_cast(val));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ if !self.value.inputs.is_empty() {
+ formatter.field("inputs", Lite(&self.value.inputs));
+ }
+ if let Some(val) = &self.value.variadic {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Variadic);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("variadic", Print::ref_cast(val));
+ }
+ formatter.field("output", Lite(&self.value.output));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::StaticMutability> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::StaticMutability::Mut(_val) => {
+ formatter.write_str("StaticMutability::Mut")?;
+ Ok(())
+ }
+ syn::StaticMutability::None =>
formatter.write_str("StaticMutability::None"),
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::Stmt> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Stmt::Local(_val) => {
+ let mut formatter = formatter.debug_struct("Stmt::Local");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ if let Some(val) = &_val.init {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::LocalInit);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("init", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::Stmt::Item(_val) => {
+ formatter.write_str("Stmt::Item")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::Stmt::Expr(_v0, _v1) => {
+ let mut formatter = formatter.debug_tuple("Stmt::Expr");
+ formatter.field(Lite(_v0));
+ formatter
+ .field(
+ &super::Option {
+ present: _v1.is_some(),
+ },
+ );
+ formatter.finish()
+ }
+ syn::Stmt::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("Stmt::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::StmtMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("StmtMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TraitBound> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitBound");
+ if self.value.paren_token.is_some() {
+ formatter.field("paren_token", &Present);
+ }
+ match self.value.modifier {
+ syn::TraitBoundModifier::None => {}
+ _ => {
+ formatter.field("modifier", Lite(&self.value.modifier));
+ }
+ }
+ if let Some(val) = &self.value.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TraitBoundModifier> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::TraitBoundModifier::None => {
+ formatter.write_str("TraitBoundModifier::None")
+ }
+ syn::TraitBoundModifier::Maybe(_val) => {
+ formatter.write_str("TraitBoundModifier::Maybe")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::TraitItem> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::TraitItem::Const(_val) => {
+ let mut formatter = formatter.debug_struct("TraitItem::Const");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ formatter.field("ty", Lite(&_val.ty));
+ if let Some(val) = &_val.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Eq, syn::Expr));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::TraitItem::Fn(_val) => {
+ let mut formatter = formatter.debug_struct("TraitItem::Fn");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("sig", Lite(&_val.sig));
+ if let Some(val) = &_val.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Block);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::TraitItem::Type(_val) => {
+ let mut formatter = formatter.debug_struct("TraitItem::Type");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.field("generics", Lite(&_val.generics));
+ if _val.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !_val.bounds.is_empty() {
+ formatter.field("bounds", Lite(&_val.bounds));
+ }
+ if let Some(val) = &_val.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Eq, syn::Type));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::TraitItem::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("TraitItem::Macro");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("mac", Lite(&_val.mac));
+ if _val.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("TraitItem::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::TraitItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitItemConst");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ formatter.field("ty", Lite(&self.value.ty));
+ if let Some(val) = &self.value.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Eq, syn::Expr));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TraitItemFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitItemFn");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("sig", Lite(&self.value.sig));
+ if let Some(val) = &self.value.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Block);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TraitItemMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitItemMacro");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("mac", Lite(&self.value.mac));
+ if self.value.semi_token.is_some() {
+ formatter.field("semi_token", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TraitItemType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TraitItemType");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("generics", Lite(&self.value.generics));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ if let Some(val) = &self.value.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Eq, syn::Type));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Type> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Type::Array(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Array");
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.field("len", Lite(&_val.len));
+ formatter.finish()
+ }
+ syn::Type::BareFn(_val) => {
+ let mut formatter = formatter.debug_struct("Type::BareFn");
+ if let Some(val) = &_val.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ if _val.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ if let Some(val) = &_val.abi {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Abi);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("abi", Print::ref_cast(val));
+ }
+ if !_val.inputs.is_empty() {
+ formatter.field("inputs", Lite(&_val.inputs));
+ }
+ if let Some(val) = &_val.variadic {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BareVariadic);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("variadic", Print::ref_cast(val));
+ }
+ formatter.field("output", Lite(&_val.output));
+ formatter.finish()
+ }
+ syn::Type::Group(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Group");
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.finish()
+ }
+ syn::Type::ImplTrait(_val) => {
+ let mut formatter = formatter.debug_struct("Type::ImplTrait");
+ if !_val.bounds.is_empty() {
+ formatter.field("bounds", Lite(&_val.bounds));
+ }
+ formatter.finish()
+ }
+ syn::Type::Infer(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Infer");
+ formatter.finish()
+ }
+ syn::Type::Macro(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Macro");
+ formatter.field("mac", Lite(&_val.mac));
+ formatter.finish()
+ }
+ syn::Type::Never(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Never");
+ formatter.finish()
+ }
+ syn::Type::Paren(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Paren");
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.finish()
+ }
+ syn::Type::Path(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Path");
+ if let Some(val) = &_val.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&_val.path));
+ formatter.finish()
+ }
+ syn::Type::Ptr(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Ptr");
+ if _val.const_token.is_some() {
+ formatter.field("const_token", &Present);
+ }
+ if _val.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.finish()
+ }
+ syn::Type::Reference(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Reference");
+ if let Some(val) = &_val.lifetime {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) ->
fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetime", Print::ref_cast(val));
+ }
+ if _val.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.finish()
+ }
+ syn::Type::Slice(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Slice");
+ formatter.field("elem", Lite(&_val.elem));
+ formatter.finish()
+ }
+ syn::Type::TraitObject(_val) => {
+ let mut formatter =
formatter.debug_struct("Type::TraitObject");
+ if _val.dyn_token.is_some() {
+ formatter.field("dyn_token", &Present);
+ }
+ if !_val.bounds.is_empty() {
+ formatter.field("bounds", Lite(&_val.bounds));
+ }
+ formatter.finish()
+ }
+ syn::Type::Tuple(_val) => {
+ let mut formatter = formatter.debug_struct("Type::Tuple");
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Type::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::TypeArray> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeArray");
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.field("len", Lite(&self.value.len));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeBareFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeBareFn");
+ if let Some(val) = &self.value.lifetimes {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BoundLifetimes);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetimes", Print::ref_cast(val));
+ }
+ if self.value.unsafety.is_some() {
+ formatter.field("unsafety", &Present);
+ }
+ if let Some(val) = &self.value.abi {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Abi);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("abi", Print::ref_cast(val));
+ }
+ if !self.value.inputs.is_empty() {
+ formatter.field("inputs", Lite(&self.value.inputs));
+ }
+ if let Some(val) = &self.value.variadic {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::BareVariadic);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("variadic", Print::ref_cast(val));
+ }
+ formatter.field("output", Lite(&self.value.output));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeGroup> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeGroup");
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeImplTrait> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeImplTrait");
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeInfer> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeInfer");
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeMacro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeMacro");
+ formatter.field("mac", Lite(&self.value.mac));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeNever> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeNever");
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeParam> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeParam");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ if self.value.colon_token.is_some() {
+ formatter.field("colon_token", &Present);
+ }
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ if self.value.eq_token.is_some() {
+ formatter.field("eq_token", &Present);
+ }
+ if let Some(val) = &self.value.default {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Type);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("default", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeParamBound> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::TypeParamBound::Trait(_val) => {
+ formatter.write_str("TypeParamBound::Trait")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::TypeParamBound::Lifetime(_val) => {
+ let mut formatter =
formatter.debug_struct("TypeParamBound::Lifetime");
+ formatter.field("ident", Lite(&_val.ident));
+ formatter.finish()
+ }
+ syn::TypeParamBound::Verbatim(_val) => {
+ formatter.write_str("TypeParamBound::Verbatim")?;
+ formatter.write_str("(`")?;
+ Display::fmt(_val, formatter)?;
+ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::TypeParen> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeParen");
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypePath> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypePath");
+ if let Some(val) = &self.value.qself {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::QSelf);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("qself", Print::ref_cast(val));
+ }
+ formatter.field("path", Lite(&self.value.path));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypePtr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypePtr");
+ if self.value.const_token.is_some() {
+ formatter.field("const_token", &Present);
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeReference> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeReference");
+ if let Some(val) = &self.value.lifetime {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::Lifetime);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("lifetime", Print::ref_cast(val));
+ }
+ if self.value.mutability.is_some() {
+ formatter.field("mutability", &Present);
+ }
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeSlice> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeSlice");
+ formatter.field("elem", Lite(&self.value.elem));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeTraitObject> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeTraitObject");
+ if self.value.dyn_token.is_some() {
+ formatter.field("dyn_token", &Present);
+ }
+ if !self.value.bounds.is_empty() {
+ formatter.field("bounds", Lite(&self.value.bounds));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::TypeTuple> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("TypeTuple");
+ if !self.value.elems.is_empty() {
+ formatter.field("elems", Lite(&self.value.elems));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UnOp> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::UnOp::Deref(_val) => {
+ formatter.write_str("UnOp::Deref")?;
+ Ok(())
+ }
+ syn::UnOp::Not(_val) => {
+ formatter.write_str("UnOp::Not")?;
+ Ok(())
+ }
+ syn::UnOp::Neg(_val) => {
+ formatter.write_str("UnOp::Neg")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::UseGlob> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseGlob");
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UseGroup> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseGroup");
+ if !self.value.items.is_empty() {
+ formatter.field("items", Lite(&self.value.items));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UseName> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseName");
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UsePath> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UsePath");
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("tree", Lite(&self.value.tree));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UseRename> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("UseRename");
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("rename", Lite(&self.value.rename));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::UseTree> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::UseTree::Path(_val) => {
+ formatter.write_str("UseTree::Path")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::UseTree::Name(_val) => {
+ formatter.write_str("UseTree::Name")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::UseTree::Rename(_val) => {
+ formatter.write_str("UseTree::Rename")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::UseTree::Glob(_val) => {
+ formatter.write_str("UseTree::Glob")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::UseTree::Group(_val) => {
+ formatter.write_str("UseTree::Group")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ }
+}
+impl Debug for Lite<syn::Variadic> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Variadic");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ if let Some(val) = &self.value.pat {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((Box<syn::Pat>, syn::token::Colon));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.0), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("pat", Print::ref_cast(val));
+ }
+ if self.value.comma.is_some() {
+ formatter.field("comma", &Present);
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Variant> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("Variant");
+ if !self.value.attrs.is_empty() {
+ formatter.field("attrs", Lite(&self.value.attrs));
+ }
+ formatter.field("ident", Lite(&self.value.ident));
+ formatter.field("fields", Lite(&self.value.fields));
+ if let Some(val) = &self.value.discriminant {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print((syn::token::Eq, syn::Expr));
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some(")?;
+ Debug::fmt(Lite(&self.0.1), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ }
+ formatter.field("discriminant", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::VisRestricted> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("VisRestricted");
+ if self.value.in_token.is_some() {
+ formatter.field("in_token", &Present);
+ }
+ formatter.field("path", Lite(&self.value.path));
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::Visibility> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::Visibility::Public(_val) => {
+ formatter.write_str("Visibility::Public")?;
+ Ok(())
+ }
+ syn::Visibility::Restricted(_val) => {
+ let mut formatter =
formatter.debug_struct("Visibility::Restricted");
+ if _val.in_token.is_some() {
+ formatter.field("in_token", &Present);
+ }
+ formatter.field("path", Lite(&_val.path));
+ formatter.finish()
+ }
+ syn::Visibility::Inherited =>
formatter.write_str("Visibility::Inherited"),
+ }
+ }
+}
+impl Debug for Lite<syn::WhereClause> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut formatter = formatter.debug_struct("WhereClause");
+ if !self.value.predicates.is_empty() {
+ formatter.field("predicates", Lite(&self.value.predicates));
+ }
+ formatter.finish()
+ }
+}
+impl Debug for Lite<syn::WherePredicate> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.value {
+ syn::WherePredicate::Lifetime(_val) => {
+ formatter.write_str("WherePredicate::Lifetime")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ syn::WherePredicate::Type(_val) => {
+ formatter.write_str("WherePredicate::Type")?;
+ formatter.write_str("(")?;
+ Debug::fmt(Lite(_val), formatter)?;
+ formatter.write_str(")")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+impl Debug for Lite<syn::token::Abstract> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![abstract]")
+ }
+}
+impl Debug for Lite<syn::token::And> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![&]")
+ }
+}
+impl Debug for Lite<syn::token::AndAnd> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![&&]")
+ }
+}
+impl Debug for Lite<syn::token::AndEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![&=]")
+ }
+}
+impl Debug for Lite<syn::token::As> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![as]")
+ }
+}
+impl Debug for Lite<syn::token::Async> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![async]")
+ }
+}
+impl Debug for Lite<syn::token::At> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![@]")
+ }
+}
+impl Debug for Lite<syn::token::Auto> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![auto]")
+ }
+}
+impl Debug for Lite<syn::token::Await> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![await]")
+ }
+}
+impl Debug for Lite<syn::token::Become> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![become]")
+ }
+}
+impl Debug for Lite<syn::token::Box> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![box]")
+ }
+}
+impl Debug for Lite<syn::token::Break> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![break]")
+ }
+}
+impl Debug for Lite<syn::token::Caret> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![^]")
+ }
+}
+impl Debug for Lite<syn::token::CaretEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![^=]")
+ }
+}
+impl Debug for Lite<syn::token::Colon> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![:]")
+ }
+}
+impl Debug for Lite<syn::token::Comma> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![,]")
+ }
+}
+impl Debug for Lite<syn::token::Const> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![const]")
+ }
+}
+impl Debug for Lite<syn::token::Continue> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![continue]")
+ }
+}
+impl Debug for Lite<syn::token::Crate> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![crate]")
+ }
+}
+impl Debug for Lite<syn::token::Default> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![default]")
+ }
+}
+impl Debug for Lite<syn::token::Do> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![do]")
+ }
+}
+impl Debug for Lite<syn::token::Dollar> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![$]")
+ }
+}
+impl Debug for Lite<syn::token::Dot> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![.]")
+ }
+}
+impl Debug for Lite<syn::token::DotDot> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![..]")
+ }
+}
+impl Debug for Lite<syn::token::DotDotDot> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![...]")
+ }
+}
+impl Debug for Lite<syn::token::DotDotEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![..=]")
+ }
+}
+impl Debug for Lite<syn::token::Dyn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![dyn]")
+ }
+}
+impl Debug for Lite<syn::token::Else> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![else]")
+ }
+}
+impl Debug for Lite<syn::token::Enum> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![enum]")
+ }
+}
+impl Debug for Lite<syn::token::Eq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![=]")
+ }
+}
+impl Debug for Lite<syn::token::EqEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![==]")
+ }
+}
+impl Debug for Lite<syn::token::Extern> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![extern]")
+ }
+}
+impl Debug for Lite<syn::token::FatArrow> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![=>]")
+ }
+}
+impl Debug for Lite<syn::token::Final> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![final]")
+ }
+}
+impl Debug for Lite<syn::token::Fn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![fn]")
+ }
+}
+impl Debug for Lite<syn::token::For> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![for]")
+ }
+}
+impl Debug for Lite<syn::token::Ge> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![>=]")
+ }
+}
+impl Debug for Lite<syn::token::Gt> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![>]")
+ }
+}
+impl Debug for Lite<syn::token::If> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![if]")
+ }
+}
+impl Debug for Lite<syn::token::Impl> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![impl]")
+ }
+}
+impl Debug for Lite<syn::token::In> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![in]")
+ }
+}
+impl Debug for Lite<syn::token::LArrow> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![<-]")
+ }
+}
+impl Debug for Lite<syn::token::Le> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![<=]")
+ }
+}
+impl Debug for Lite<syn::token::Let> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![let]")
+ }
+}
+impl Debug for Lite<syn::token::Loop> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![loop]")
+ }
+}
+impl Debug for Lite<syn::token::Lt> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![<]")
+ }
+}
+impl Debug for Lite<syn::token::Macro> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![macro]")
+ }
+}
+impl Debug for Lite<syn::token::Match> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![match]")
+ }
+}
+impl Debug for Lite<syn::token::Minus> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![-]")
+ }
+}
+impl Debug for Lite<syn::token::MinusEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![-=]")
+ }
+}
+impl Debug for Lite<syn::token::Mod> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![mod]")
+ }
+}
+impl Debug for Lite<syn::token::Move> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![move]")
+ }
+}
+impl Debug for Lite<syn::token::Mut> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![mut]")
+ }
+}
+impl Debug for Lite<syn::token::Ne> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![!=]")
+ }
+}
+impl Debug for Lite<syn::token::Not> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![!]")
+ }
+}
+impl Debug for Lite<syn::token::Or> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![|]")
+ }
+}
+impl Debug for Lite<syn::token::OrEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![|=]")
+ }
+}
+impl Debug for Lite<syn::token::OrOr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![||]")
+ }
+}
+impl Debug for Lite<syn::token::Override> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![override]")
+ }
+}
+impl Debug for Lite<syn::token::PathSep> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![::]")
+ }
+}
+impl Debug for Lite<syn::token::Percent> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![%]")
+ }
+}
+impl Debug for Lite<syn::token::PercentEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![%=]")
+ }
+}
+impl Debug for Lite<syn::token::Plus> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![+]")
+ }
+}
+impl Debug for Lite<syn::token::PlusEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![+=]")
+ }
+}
+impl Debug for Lite<syn::token::Pound> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![#]")
+ }
+}
+impl Debug for Lite<syn::token::Priv> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![priv]")
+ }
+}
+impl Debug for Lite<syn::token::Pub> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![pub]")
+ }
+}
+impl Debug for Lite<syn::token::Question> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![?]")
+ }
+}
+impl Debug for Lite<syn::token::RArrow> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![->]")
+ }
+}
+impl Debug for Lite<syn::token::Ref> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![ref]")
+ }
+}
+impl Debug for Lite<syn::token::Return> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![return]")
+ }
+}
+impl Debug for Lite<syn::token::SelfType> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![Self]")
+ }
+}
+impl Debug for Lite<syn::token::SelfValue> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![self]")
+ }
+}
+impl Debug for Lite<syn::token::Semi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![;]")
+ }
+}
+impl Debug for Lite<syn::token::Shl> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![<<]")
+ }
+}
+impl Debug for Lite<syn::token::ShlEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![<<=]")
+ }
+}
+impl Debug for Lite<syn::token::Shr> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![>>]")
+ }
+}
+impl Debug for Lite<syn::token::ShrEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![>>=]")
+ }
+}
+impl Debug for Lite<syn::token::Slash> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![/]")
+ }
+}
+impl Debug for Lite<syn::token::SlashEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![/=]")
+ }
+}
+impl Debug for Lite<syn::token::Star> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![*]")
+ }
+}
+impl Debug for Lite<syn::token::StarEq> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![*=]")
+ }
+}
+impl Debug for Lite<syn::token::Static> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![static]")
+ }
+}
+impl Debug for Lite<syn::token::Struct> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![struct]")
+ }
+}
+impl Debug for Lite<syn::token::Super> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![super]")
+ }
+}
+impl Debug for Lite<syn::token::Tilde> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![~]")
+ }
+}
+impl Debug for Lite<syn::token::Trait> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![trait]")
+ }
+}
+impl Debug for Lite<syn::token::Try> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![try]")
+ }
+}
+impl Debug for Lite<syn::token::Type> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![type]")
+ }
+}
+impl Debug for Lite<syn::token::Typeof> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![typeof]")
+ }
+}
+impl Debug for Lite<syn::token::Underscore> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![_]")
+ }
+}
+impl Debug for Lite<syn::token::Union> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![union]")
+ }
+}
+impl Debug for Lite<syn::token::Unsafe> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![unsafe]")
+ }
+}
+impl Debug for Lite<syn::token::Unsized> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![unsized]")
+ }
+}
+impl Debug for Lite<syn::token::Use> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![use]")
+ }
+}
+impl Debug for Lite<syn::token::Virtual> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![virtual]")
+ }
+}
+impl Debug for Lite<syn::token::Where> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![where]")
+ }
+}
+impl Debug for Lite<syn::token::While> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![while]")
+ }
+}
+impl Debug for Lite<syn::token::Yield> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Token![yield]")
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/debug/mod.rs
b/rust/hw/char/pl011/vendor/syn/tests/debug/mod.rs
new file mode 100644
index 0000000000..c9925a6d5c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/debug/mod.rs
@@ -0,0 +1,147 @@
+#![allow(
+ clippy::no_effect_underscore_binding,
+ clippy::too_many_lines,
+ clippy::used_underscore_binding
+)]
+
+#[rustfmt::skip]
+mod gen;
+
+use proc_macro2::{Ident, Literal, TokenStream};
+use ref_cast::RefCast;
+use std::fmt::{self, Debug};
+use std::ops::Deref;
+use syn::punctuated::Punctuated;
+
+#[derive(RefCast)]
+#[repr(transparent)]
+pub struct Lite<T: ?Sized> {
+ value: T,
+}
+
+#[allow(non_snake_case)]
+pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> {
+ Lite::ref_cast(value)
+}
+
+impl<T: ?Sized> Deref for Lite<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+impl Debug for Lite<bool> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", self.value)
+ }
+}
+
+impl Debug for Lite<u32> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", self.value)
+ }
+}
+
+impl Debug for Lite<usize> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", self.value)
+ }
+}
+
+impl Debug for Lite<String> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value)
+ }
+}
+
+impl Debug for Lite<Ident> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{:?}", self.value.to_string())
+ }
+}
+
+impl Debug for Lite<Literal> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", self.value)
+ }
+}
+
+impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let string = self.value.to_string();
+ if string.len() <= 80 {
+ write!(formatter, "TokenStream(`{}`)", self.value)
+ } else {
+ formatter
+ .debug_tuple("TokenStream")
+ .field(&format_args!("`{}`", string))
+ .finish()
+ }
+ }
+}
+
+impl<'a, T> Debug for Lite<&'a T>
+where
+ Lite<T>: Debug,
+{
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(Lite(self.value), formatter)
+ }
+}
+
+impl<T> Debug for Lite<Box<T>>
+where
+ Lite<T>: Debug,
+{
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(Lite(&*self.value), formatter)
+ }
+}
+
+impl<T> Debug for Lite<Vec<T>>
+where
+ Lite<T>: Debug,
+{
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_list()
+ .entries(self.value.iter().map(Lite))
+ .finish()
+ }
+}
+
+impl<T, P> Debug for Lite<Punctuated<T, P>>
+where
+ Lite<T>: Debug,
+ Lite<P>: Debug,
+{
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let mut list = formatter.debug_list();
+ for pair in self.pairs() {
+ let (node, punct) = pair.into_tuple();
+ list.entry(Lite(node));
+ list.entries(punct.map(Lite));
+ }
+ list.finish()
+ }
+}
+
+struct Present;
+
+impl Debug for Present {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some")
+ }
+}
+
+struct Option {
+ present: bool,
+}
+
+impl Debug for Option {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(if self.present { "Some" } else { "None" })
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/macros/mod.rs
b/rust/hw/char/pl011/vendor/syn/tests/macros/mod.rs
new file mode 100644
index 0000000000..1c1bacf459
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/macros/mod.rs
@@ -0,0 +1,93 @@
+#![allow(unused_macros, unused_macro_rules)]
+
+#[path = "../debug/mod.rs"]
+pub mod debug;
+
+use std::str::FromStr;
+use syn::parse::Result;
+
+macro_rules! errorf {
+ ($($tt:tt)*) => {{
+ use ::std::io::Write;
+ let stderr = ::std::io::stderr();
+ write!(stderr.lock(), $($tt)*).unwrap();
+ }};
+}
+
+macro_rules! punctuated {
+ ($($e:expr,)+) => {{
+ let mut seq = ::syn::punctuated::Punctuated::new();
+ $(
+ seq.push($e);
+ )+
+ seq
+ }};
+
+ ($($e:expr),+) => {
+ punctuated!($($e,)+)
+ };
+}
+
+macro_rules! snapshot {
+ ($($args:tt)*) => {
+ snapshot_impl!(() $($args)*)
+ };
+}
+
+macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let tokens =
crate::macros::TryIntoTokens::try_into_tokens($expr).unwrap();
+ let $expr: $t = syn::parse_quote!(#tokens);
+ let debug = crate::macros::debug::Lite(&$expr);
+ if !cfg!(miri) {
+ #[allow(clippy::needless_raw_string_hashes)] //
https://github.com/mitsuhiko/insta/issues/389
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
+ }
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let tokens =
crate::macros::TryIntoTokens::try_into_tokens($($expr)*).unwrap();
+ let syntax_tree: $t = syn::parse_quote!(#tokens);
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+ if !cfg!(miri) {
+ #[allow(clippy::needless_raw_string_hashes)]
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
+ }
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+ if !cfg!(miri) {
+ #[allow(clippy::needless_raw_string_hashes)]
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
+ }
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+ snapshot_impl!(($($expr)* $next) $($rest)*)
+ };
+}
+
+pub trait TryIntoTokens {
+ #[allow(dead_code)]
+ fn try_into_tokens(self) -> Result<proc_macro2::TokenStream>;
+}
+
+impl<'a> TryIntoTokens for &'a str {
+ fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
+ let tokens = proc_macro2::TokenStream::from_str(self)?;
+ Ok(tokens)
+ }
+}
+
+impl TryIntoTokens for proc_macro2::TokenStream {
+ fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
+ Ok(self)
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/regression.rs
b/rust/hw/char/pl011/vendor/syn/tests/regression.rs
new file mode 100644
index 0000000000..5c7fcddc8d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/regression.rs
@@ -0,0 +1,5 @@
+#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)]
+
+mod regression {
+ automod::dir!("tests/regression");
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/regression/issue1108.rs
b/rust/hw/char/pl011/vendor/syn/tests/regression/issue1108.rs
new file mode 100644
index 0000000000..11a82adaad
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/regression/issue1108.rs
@@ -0,0 +1,5 @@
+#[test]
+fn issue1108() {
+ let data = "impl<x<>>::x for";
+ let _ = syn::parse_file(data);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/regression/issue1235.rs
b/rust/hw/char/pl011/vendor/syn/tests/regression/issue1235.rs
new file mode 100644
index 0000000000..8836030664
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/regression/issue1235.rs
@@ -0,0 +1,32 @@
+use proc_macro2::{Delimiter, Group};
+use quote::quote;
+
+#[test]
+fn main() {
+ // Okay. Rustc allows top-level `static` with no value syntactically, but
+ // not semantically. Syn parses as Item::Verbatim.
+ let tokens = quote! {
+ pub static FOO: usize;
+ pub static BAR: usize;
+ };
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+
+ // Okay.
+ let inner = Group::new(
+ Delimiter::None,
+ quote!(static FOO: usize = 0; pub static BAR: usize = 0),
+ );
+ let tokens = quote!(pub #inner;);
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+
+ // Formerly parser crash.
+ let inner = Group::new(
+ Delimiter::None,
+ quote!(static FOO: usize; pub static BAR: usize),
+ );
+ let tokens = quote!(pub #inner;);
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/repo/mod.rs
b/rust/hw/char/pl011/vendor/syn/tests/repo/mod.rs
new file mode 100644
index 0000000000..c8400288d9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/repo/mod.rs
@@ -0,0 +1,461 @@
+#![allow(clippy::manual_assert)]
+
+mod progress;
+
+use self::progress::Progress;
+use anyhow::Result;
+use flate2::read::GzDecoder;
+use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
+use std::collections::BTreeSet;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use tar::Archive;
+use walkdir::{DirEntry, WalkDir};
+
+const REVISION: &str = "becebb3158149a115cad8a402612e25436a7e37b";
+
+#[rustfmt::skip]
+static EXCLUDE_FILES: &[&str] = &[
+ // TODO: explicit tail calls: `become _g()`
+ // https://github.com/dtolnay/syn/issues/1501
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs",
+ "tests/ui/explicit-tail-calls/return-lifetime-sub.rs",
+
+ // TODO: non-lifetime binders: `where for<'a, T> &'a Struct<T>: Trait`
+ // https://github.com/dtolnay/syn/issues/1435
+ "src/tools/rustfmt/tests/source/issue_5721.rs",
+ "src/tools/rustfmt/tests/source/non-lifetime-binders.rs",
+ "src/tools/rustfmt/tests/target/issue_5721.rs",
+ "src/tools/rustfmt/tests/target/non-lifetime-binders.rs",
+ "tests/rustdoc-json/non_lifetime_binders.rs",
+ "tests/rustdoc/inline_cross/auxiliary/non_lifetime_binders.rs",
+ "tests/rustdoc/non_lifetime_binders.rs",
+
+ // TODO: return type notation: `where T: Trait<method(): Send>`
+ // https://github.com/dtolnay/syn/issues/1434
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs",
+ "tests/ui/associated-type-bounds/return-type-notation/basic.rs",
+
"tests/ui/associated-type-bounds/return-type-notation/unpretty-parenthesized.rs",
+ "tests/ui/feature-gates/feature-gate-return_type_notation.rs",
+
+ // TODO: lazy type alias syntax with where-clause in trailing position
+ // https://github.com/dtolnay/syn/issues/1525
+ "tests/rustdoc/typedef-inner-variants-lazy_type_alias.rs",
+
+ // TODO: gen blocks and functions
+ // https://github.com/dtolnay/syn/issues/1526
+ "compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs",
+ "tests/ui/coroutine/async-gen-deduce-yield.rs",
+ "tests/ui/coroutine/async-gen-yield-ty-is-unit.rs",
+ "tests/ui/coroutine/async_gen_fn_iter.rs",
+ "tests/ui/coroutine/gen_block_is_fused_iter.rs",
+ "tests/ui/coroutine/gen_block_is_iter.rs",
+ "tests/ui/coroutine/gen_block_iterate.rs",
+ "tests/ui/coroutine/gen_fn_iter.rs",
+ "tests/ui/coroutine/gen_fn_lifetime_capture.rs",
+ "tests/ui/coroutine/return-types-diverge.rs",
+ "tests/ui/higher-ranked/builtin-closure-like-bounds.rs",
+ "tests/ui/sanitizer/cfi-coroutine.rs",
+
+ // TODO: `!` as a pattern
+ // https://github.com/dtolnay/syn/issues/1546
+ "tests/ui/rfcs/rfc-0000-never_patterns/diverges.rs",
+
+ // TODO: async trait bounds: `impl async Fn()`
+ // https://github.com/dtolnay/syn/issues/1628
+ "src/tools/miri/tests/pass/async-closure-captures.rs",
+ "src/tools/miri/tests/pass/async-closure-drop.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs",
+ "src/tools/rustfmt/tests/target/asyncness.rs",
+ "tests/codegen/async-closure-debug.rs",
+ "tests/ui/async-await/async-closures/async-fn-mut-for-async-fn.rs",
+ "tests/ui/async-await/async-closures/async-fn-once-for-async-fn.rs",
+ "tests/ui/async-await/async-closures/auxiliary/foreign.rs",
+ "tests/ui/async-await/async-closures/brand.rs",
+ "tests/ui/async-await/async-closures/captures.rs",
+ "tests/ui/async-await/async-closures/constrained-but-no-upvars-yet.rs",
+ "tests/ui/async-await/async-closures/drop.rs",
+ "tests/ui/async-await/async-closures/mangle.rs",
+ "tests/ui/async-await/async-closures/moro-example.rs",
+ "tests/ui/async-await/async-closures/move-is-async-fn.rs",
+ "tests/ui/async-await/async-closures/mut-ref-reborrow.rs",
+ "tests/ui/async-await/async-closures/no-borrow-from-env.rs",
+ "tests/ui/async-await/async-closures/overlapping-projs.rs",
+ "tests/ui/async-await/async-closures/precise-captures.rs",
+ "tests/ui/async-await/async-closures/refd.rs",
+ "tests/ui/async-await/async-closures/signature-deduction.rs",
+ "tests/ui/async-await/async-fn/edition-2015-not-async-bound.rs",
+ "tests/ui/async-await/async-fn/higher-ranked-async-fn.rs",
+ "tests/ui/async-await/async-fn/impl-trait.rs",
+ "tests/ui/async-await/async-fn/project.rs",
+ "tests/ui/async-await/async-fn/sugar.rs",
+
+ // TODO: mutable by-reference bindings (mut ref)
+ // https://github.com/dtolnay/syn/issues/1629
+ "src/tools/rustfmt/tests/source/mut_ref.rs",
+ "src/tools/rustfmt/tests/target/mut_ref.rs",
+ "tests/ui/mut/mut-ref.rs",
+
+ // TODO: postfix match
+ // https://github.com/dtolnay/syn/issues/1630
+ "src/tools/rustfmt/tests/source/postfix-match/pf-match.rs",
+ "src/tools/rustfmt/tests/target/postfix-match/pf-match.rs",
+ "tests/pretty/postfix-match.rs",
+ "tests/ui/match/postfix-match/no-unused-parens.rs",
+ "tests/ui/match/postfix-match/pf-match-chain.rs",
+ "tests/ui/match/postfix-match/postfix-match.rs",
+
+ // TODO: delegation
+ // https://github.com/dtolnay/syn/issues/1580
+ "tests/pretty/delegation.rs",
+ "tests/ui/delegation/explicit-paths-in-traits-pass.rs",
+ "tests/ui/delegation/explicit-paths-pass.rs",
+ "tests/ui/delegation/explicit-paths-signature-pass.rs",
+ "tests/ui/delegation/parse.rs",
+
+ // TODO: for await
+ // https://github.com/dtolnay/syn/issues/1631
+ "tests/ui/async-await/for-await-2015.rs",
+ "tests/ui/async-await/for-await-passthrough.rs",
+ "tests/ui/async-await/for-await.rs",
+
+ // TODO: const trait bound: `T: const Trait`
+ // https://github.com/dtolnay/syn/issues/1632
+ "tests/ui/generic-const-items/const-trait-impl.rs",
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/const-fns-are-early-bound.rs",
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds.rs",
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/effects/minicore.rs",
+
+ // TODO: `|| .. .method()`
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rs",
+ "src/tools/rustfmt/tests/source/issue-4808.rs",
+
+ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
+ "tests/ui/const-generics/early/closing-args-token.rs",
+ "tests/ui/const-generics/early/const-expression-parameter.rs",
+
+ // Compile-fail variadics in not the last position of a function parameter
list
+ "tests/ui/parser/variadic-ffi-syntactic-pass.rs",
+
+ // Need at least one trait in impl Trait, no such type as impl 'static
+ "tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
+
+ // Negative polarity trait bound: `where T: !Copy`
+ "src/tools/rustfmt/tests/target/negative-bounds.rs",
+ "tests/ui/traits/negative-bounds/supertrait.rs",
+
+ // Lifetime bound inside for<>: `T: ~const ?for<'a: 'b> Trait<'a>`
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-syntax.rs",
+
+ // Const impl that is not a trait impl: `impl ~const T {}`
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/syntax.rs",
+
+ // Lifetimes and types out of order in angle bracketed path arguments
+ "tests/ui/parser/constraints-before-generic-args-syntactic-pass.rs",
+
+ // Deprecated anonymous parameter syntax in traits
+ "src/tools/rustfmt/tests/source/trait.rs",
+ "src/tools/rustfmt/tests/target/trait.rs",
+ "tests/ui/issues/issue-13105.rs",
+ "tests/ui/issues/issue-13775.rs",
+ "tests/ui/issues/issue-34074.rs",
+ "tests/ui/proc-macro/trait-fn-args-2015.rs",
+
+ // Deprecated where-clause location
+ "src/tools/rustfmt/tests/source/issue_4257.rs",
+ "src/tools/rustfmt/tests/source/issue_4911.rs",
+ "src/tools/rustfmt/tests/target/issue_4257.rs",
+ "src/tools/rustfmt/tests/target/issue_4911.rs",
+ "tests/pretty/gat-bounds.rs",
+ "tests/rustdoc/generic-associated-types/gats.rs",
+
+ // Deprecated trait object syntax with parenthesized generic arguments and
no dyn keyword
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs",
+ "src/tools/rustfmt/tests/source/attrib.rs",
+ "src/tools/rustfmt/tests/source/closure.rs",
+ "src/tools/rustfmt/tests/source/existential_type.rs",
+ "src/tools/rustfmt/tests/source/fn-simple.rs",
+ "src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs",
+ "src/tools/rustfmt/tests/source/issue-4689/one.rs",
+ "src/tools/rustfmt/tests/source/issue-4689/two.rs",
+ "src/tools/rustfmt/tests/source/paths.rs",
+ "src/tools/rustfmt/tests/source/structs.rs",
+ "src/tools/rustfmt/tests/target/attrib.rs",
+ "src/tools/rustfmt/tests/target/closure.rs",
+ "src/tools/rustfmt/tests/target/existential_type.rs",
+ "src/tools/rustfmt/tests/target/fn-simple.rs",
+ "src/tools/rustfmt/tests/target/fn.rs",
+ "src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs",
+ "src/tools/rustfmt/tests/target/issue-4689/one.rs",
+ "src/tools/rustfmt/tests/target/issue-4689/two.rs",
+ "src/tools/rustfmt/tests/target/paths.rs",
+ "src/tools/rustfmt/tests/target/structs.rs",
+ "tests/codegen-units/item-collection/non-generic-closures.rs",
+ "tests/debuginfo/recursive-enum.rs",
+ "tests/pretty/closure-reform-pretty.rs",
+ "tests/run-make/reproducible-build-2/reproducible-build.rs",
+ "tests/run-make/reproducible-build/reproducible-build.rs",
+ "tests/ui/auxiliary/typeid-intrinsic-aux1.rs",
+ "tests/ui/auxiliary/typeid-intrinsic-aux2.rs",
+ "tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs",
+ "tests/ui/lifetimes/auxiliary/lifetime_bound_will_change_warning_lib.rs",
+ "tests/ui/lifetimes/bare-trait-object-borrowck.rs",
+ "tests/ui/lifetimes/bare-trait-object.rs",
+ "tests/ui/parser/bounds-obj-parens.rs",
+
+ // Invalid unparenthesized range pattern inside slice pattern: `[1..]`
+ "tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs",
+
+ // Various extensions to Rust syntax made up by rust-analyzer
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
+
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
+
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs",
+
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
+
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
+
+ // Placeholder syntax for "throw expressions"
+ "compiler/rustc_errors/src/translation.rs",
+ "compiler/rustc_expand/src/module.rs",
+ "compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs",
+ "src/tools/clippy/tests/ui/needless_return.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs",
+ "tests/pretty/yeet-expr.rs",
+ "tests/ui/try-trait/yeet-for-option.rs",
+ "tests/ui/try-trait/yeet-for-result.rs",
+
+ // Edition 2015 code using identifiers that are now keywords
+ // TODO: some of these we should probably parse
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs",
+
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs",
+ "src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs",
+ "src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs",
+ "src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs",
+ "src/tools/rustfmt/tests/source/issue_1306.rs",
+ "src/tools/rustfmt/tests/source/try-conversion.rs",
+ "src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs",
+ "src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs",
+ "src/tools/rustfmt/tests/target/issue-1681.rs",
+ "src/tools/rustfmt/tests/target/issue_1306.rs",
+ "tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs",
+ "tests/ui/editions/edition-keywords-2015-2015.rs",
+ "tests/ui/editions/edition-keywords-2015-2018.rs",
+ "tests/ui/lint/lint_pre_expansion_extern_module_aux.rs",
+ "tests/ui/macros/macro-comma-support-rpass.rs",
+ "tests/ui/macros/try-macro.rs",
+ "tests/ui/parser/extern-crate-async.rs",
+ "tests/ui/try-block/try-is-identifier-edition2015.rs",
+
+ // Excessive nesting
+ "tests/ui/issues/issue-74564-if-expr-stack-overflow.rs",
+
+ // Testing tools on invalid syntax
+ "src/tools/rustfmt/tests/coverage/target/comments.rs",
+ "src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
+ "src/tools/rustfmt/tests/parser/issue_4418.rs",
+ "src/tools/rustfmt/tests/parser/stashed-diag.rs",
+ "src/tools/rustfmt/tests/parser/stashed-diag2.rs",
+ "src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs",
+ "src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs",
+ "src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs",
+ "src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs",
+ "src/tools/rustfmt/tests/source/type.rs",
+ "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs",
+ "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
+ "src/tools/rustfmt/tests/target/type.rs",
+ "tests/run-make/translation/test.rs",
+ "tests/ui/generics/issue-94432-garbage-ice.rs",
+
+ // Generated file containing a top-level expression, used with `include!`
+ "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
+
+ // Clippy lint lists represented as expressions
+ "src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
+
+ // Not actually test cases
+ "tests/ui/lint/expansion-time-include.rs",
+ "tests/ui/macros/auxiliary/macro-comma-support.rs",
+ "tests/ui/macros/auxiliary/macro-include-items-expr.rs",
+ "tests/ui/macros/include-single-expr-helper.rs",
+ "tests/ui/macros/include-single-expr-helper-1.rs",
+ "tests/ui/parser/issues/auxiliary/issue-21146-inc.rs",
+];
+
+#[rustfmt::skip]
+static EXCLUDE_DIRS: &[&str] = &[
+ // Inputs that intentionally do not parse
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/err",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
+
+ // Inputs that lex but do not necessarily parse
+ "src/tools/rust-analyzer/crates/parser/test_data/lexer",
+
+ // Inputs that used to crash rust-analyzer, but aren't necessarily
supposed to parse
+ "src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
+ "src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
+
+ // Inputs that crash rustc, making no claim about whether they are valid
Rust
+ "tests/crashes",
+];
+
+// Directories in which a .stderr implies the corresponding .rs is not expected
+// to work.
+static UI_TEST_DIRS: &[&str] = &["tests/ui", "tests/rustdoc-ui"];
+
+pub fn for_each_rust_file(for_each: impl Fn(&Path) + Sync + Send) {
+ let mut rs_files = BTreeSet::new();
+
+ let repo_dir = Path::new("tests/rust");
+ for entry in WalkDir::new(repo_dir)
+ .into_iter()
+ .filter_entry(base_dir_filter)
+ {
+ let entry = entry.unwrap();
+ if !entry.file_type().is_dir() {
+ rs_files.insert(entry.into_path());
+ }
+ }
+
+ for ui_test_dir in UI_TEST_DIRS {
+ for entry in WalkDir::new(repo_dir.join(ui_test_dir)) {
+ let mut path = entry.unwrap().into_path();
+ if path.extension() == Some(OsStr::new("stderr")) {
+ loop {
+ rs_files.remove(&path.with_extension("rs"));
+ path = path.with_extension("");
+ if path.extension().is_none() {
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ rs_files.par_iter().map(PathBuf::as_path).for_each(for_each);
+}
+
+pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+
+ let mut path_string = path.to_string_lossy();
+ if cfg!(windows) {
+ path_string = path_string.replace('\\', "/").into();
+ }
+ let path_string = if path_string == "tests/rust" {
+ return true;
+ } else if let Some(path) = path_string.strip_prefix("tests/rust/") {
+ path
+ } else {
+ panic!("unexpected path in Rust dist: {}", path_string);
+ };
+
+ if path.is_dir() {
+ return !EXCLUDE_DIRS.contains(&path_string);
+ }
+
+ if path.extension() != Some(OsStr::new("rs")) {
+ return false;
+ }
+
+ !EXCLUDE_FILES.contains(&path_string)
+}
+
+#[allow(dead_code)]
+pub fn edition(path: &Path) -> &'static str {
+ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
+ "2015"
+ } else {
+ "2021"
+ }
+}
+
+pub fn clone_rust() {
+ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
+ Err(_) => true,
+ Ok(contents) => contents.trim() != REVISION,
+ };
+ if needs_clone {
+ download_and_unpack().unwrap();
+ }
+
+ let mut missing = String::new();
+ let test_src = Path::new("tests/rust");
+
+ let mut exclude_files_set = BTreeSet::new();
+ for exclude in EXCLUDE_FILES {
+ if !exclude_files_set.insert(exclude) {
+ panic!("duplicate path in EXCLUDE_FILES: {}", exclude);
+ }
+ for dir in EXCLUDE_DIRS {
+ if Path::new(exclude).starts_with(dir) {
+ panic!("excluded file {} is inside an excluded dir", exclude);
+ }
+ }
+ if !test_src.join(exclude).is_file() {
+ missing += "\ntests/rust/";
+ missing += exclude;
+ }
+ }
+
+ let mut exclude_dirs_set = BTreeSet::new();
+ for exclude in EXCLUDE_DIRS {
+ if !exclude_dirs_set.insert(exclude) {
+ panic!("duplicate path in EXCLUDE_DIRS: {}", exclude);
+ }
+ if !test_src.join(exclude).is_dir() {
+ missing += "\ntests/rust/";
+ missing += exclude;
+ missing += "/";
+ }
+ }
+
+ if !missing.is_empty() {
+ panic!("excluded test file does not exist:{}\n", missing);
+ }
+}
+
+fn download_and_unpack() -> Result<()> {
+ let url = format!(
+ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
+ REVISION
+ );
+ let response = reqwest::blocking::get(url)?.error_for_status()?;
+ let progress = Progress::new(response);
+ let decoder = GzDecoder::new(progress);
+ let mut archive = Archive::new(decoder);
+ let prefix = format!("rust-{}", REVISION);
+
+ let tests_rust = Path::new("tests/rust");
+ if tests_rust.exists() {
+ fs::remove_dir_all(tests_rust)?;
+ }
+
+ for entry in archive.entries()? {
+ let mut entry = entry?;
+ let path = entry.path()?;
+ if path == Path::new("pax_global_header") {
+ continue;
+ }
+ let relative = path.strip_prefix(&prefix)?;
+ let out = tests_rust.join(relative);
+ entry.unpack(&out)?;
+ }
+
+ fs::write("tests/rust/COMMIT", REVISION)?;
+ Ok(())
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/repo/progress.rs
b/rust/hw/char/pl011/vendor/syn/tests/repo/progress.rs
new file mode 100644
index 0000000000..28c8a44b12
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/repo/progress.rs
@@ -0,0 +1,37 @@
+use std::io::{Read, Result};
+use std::time::{Duration, Instant};
+
+pub struct Progress<R> {
+ bytes: usize,
+ tick: Instant,
+ stream: R,
+}
+
+impl<R> Progress<R> {
+ pub fn new(stream: R) -> Self {
+ Progress {
+ bytes: 0,
+ tick: Instant::now() + Duration::from_millis(2000),
+ stream,
+ }
+ }
+}
+
+impl<R: Read> Read for Progress<R> {
+ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
+ let num = self.stream.read(buf)?;
+ self.bytes += num;
+ let now = Instant::now();
+ if now > self.tick {
+ self.tick = now + Duration::from_millis(500);
+ errorf!("downloading... {} bytes\n", self.bytes);
+ }
+ Ok(num)
+ }
+}
+
+impl<R> Drop for Progress<R> {
+ fn drop(&mut self) {
+ errorf!("done ({} bytes)\n", self.bytes);
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_asyncness.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_asyncness.rs
new file mode 100644
index 0000000000..9968934490
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_asyncness.rs
@@ -0,0 +1,43 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use syn::{Expr, Item};
+
+#[test]
+fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+ Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ asyncness: Some,
+ ident: "process",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+ Expr::Closure {
+ asyncness: Some,
+ output: ReturnType::Default,
+ body: Expr::Block {
+ block: Block {
+ stmts: [],
+ },
+ },
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_attribute.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_attribute.rs
new file mode 100644
index 0000000000..597ae3adc8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_attribute.rs
@@ -0,0 +1,225 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use syn::parse::Parser;
+use syn::{Attribute, Meta};
+
+#[test]
+fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+ Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+ Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: 5,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+ Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: Lit::Bool {
+ value: true,
+ },
+ },
+ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+ Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: Lit::Bool {
+ value: false,
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`5`),
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`bar`),
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`bar = 5`),
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`bar = true`),
+ }
+ "###);
+}
+
+#[test]
+fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
+ }
+ "###);
+}
+
+#[test]
+fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`true`),
+ }
+ "###);
+}
+
+#[test]
+fn test_negative_lit() {
+ let meta = test("#[form(min = -1, max = 200)]");
+
+ snapshot!(meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "form",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`min = - 1 , max = 200`),
+ }
+ "###);
+}
+
+fn test(input: &str) -> Meta {
+ let attrs = Attribute::parse_outer.parse_str(input).unwrap();
+
+ assert_eq!(attrs.len(), 1);
+ let attr = attrs.into_iter().next().unwrap();
+
+ attr.meta
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_derive_input.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_derive_input.rs
new file mode 100644
index 0000000000..c3d31eb0e9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_derive_input.rs
@@ -0,0 +1,781 @@
+#![allow(
+ clippy::assertions_on_result_states,
+ clippy::manual_let_else,
+ clippy::too_many_lines,
+ clippy::uninlined_format_args
+)]
+
+#[macro_use]
+mod macros;
+
+use quote::quote;
+use syn::{Data, DeriveInput};
+
+#[test]
+fn test_unit() {
+ let input = quote! {
+ struct Unit;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "Unit",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_struct() {
+ let input = quote! {
+ #[derive(Debug, Clone)]
+ pub struct Item {
+ pub ident: Ident,
+ pub attrs: Vec<Attribute>
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "derive",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`Debug , Clone`),
+ },
+ },
+ ],
+ vis: Visibility::Public,
+ ident: "Item",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Named {
+ named: [
+ Field {
+ vis: Visibility::Public,
+ ident: Some("ident"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Ident",
+ },
+ ],
+ },
+ },
+ },
+ Token![,],
+ Field {
+ vis: Visibility::Public,
+ ident: Some("attrs"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Vec",
+ arguments:
PathArguments::AngleBracketed {
+ args: [
+
GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident:
"Attribute",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ }
+ "###);
+
+ snapshot!(&input.attrs[0].meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "derive",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`Debug , Clone`),
+ }
+ "###);
+}
+
+#[test]
+fn test_union() {
+ let input = quote! {
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "MaybeUninit",
+ generics: Generics {
+ lt_token: Some,
+ params: [
+ GenericParam::Type(TypeParam {
+ ident: "T",
+ }),
+ ],
+ gt_token: Some,
+ },
+ data: Data::Union {
+ fields: FieldsNamed {
+ named: [
+ Field {
+ vis: Visibility::Inherited,
+ ident: Some("uninit"),
+ colon_token: Some,
+ ty: Type::Tuple,
+ },
+ Token![,],
+ Field {
+ vis: Visibility::Inherited,
+ ident: Some("value"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+#[cfg(feature = "full")]
+fn test_enum() {
+ let input = quote! {
+ /// See the std::result module documentation for details.
+ #[must_use]
+ pub enum Result<T, E> {
+ Ok(T),
+ Err(E),
+ Surprise = 0isize,
+
+ // Smuggling data into a proc_macro_derive,
+ // in the style of https://github.com/dtolnay/proc-macro-hack
+ ProcMacroHack = (0, "data").0
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "doc",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: " See the std::result module documentation for
details.",
+ },
+ },
+ },
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "must_use",
+ },
+ ],
+ },
+ },
+ ],
+ vis: Visibility::Public,
+ ident: "Result",
+ generics: Generics {
+ lt_token: Some,
+ params: [
+ GenericParam::Type(TypeParam {
+ ident: "T",
+ }),
+ Token![,],
+ GenericParam::Type(TypeParam {
+ ident: "E",
+ }),
+ ],
+ gt_token: Some,
+ },
+ data: Data::Enum {
+ variants: [
+ Variant {
+ ident: "Ok",
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ Token![,],
+ Variant {
+ ident: "Err",
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "E",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ Token![,],
+ Variant {
+ ident: "Surprise",
+ fields: Fields::Unit,
+ discriminant: Some(Expr::Lit {
+ lit: 0isize,
+ }),
+ },
+ Token![,],
+ Variant {
+ ident: "ProcMacroHack",
+ fields: Fields::Unit,
+ discriminant: Some(Expr::Field {
+ base: Expr::Tuple {
+ elems: [
+ Expr::Lit {
+ lit: 0,
+ },
+ Token![,],
+ Expr::Lit {
+ lit: "data",
+ },
+ ],
+ },
+ member: Member::Unnamed(Index {
+ index: 0,
+ }),
+ }),
+ },
+ ],
+ },
+ }
+ "###);
+
+ let meta_items: Vec<_> = input.attrs.into_iter().map(|attr|
attr.meta).collect();
+
+ snapshot!(meta_items, @r###"
+ [
+ Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "doc",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: " See the std::result module documentation for details.",
+ },
+ },
+ Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "must_use",
+ },
+ ],
+ },
+ ]
+ "###);
+}
+
+#[test]
+fn test_attr_with_non_mod_style_path() {
+ let input = quote! {
+ #[inert <T>]
+ struct S;
+ };
+
+ syn::parse2::<DeriveInput>(input).unwrap_err();
+}
+
+#[test]
+fn test_attr_with_mod_style_path_with_self() {
+ let input = quote! {
+ #[foo::self]
+ struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ Token![::],
+ PathSegment {
+ ident: "self",
+ },
+ ],
+ },
+ },
+ ],
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+
+ snapshot!(&input.attrs[0].meta, @r###"
+ Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ Token![::],
+ PathSegment {
+ ident: "self",
+ },
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_pub_restricted() {
+ // Taken from
tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
+ let input = quote! {
+ pub(in m) struct Z(pub(in m::n) u8);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Restricted {
+ in_token: Some,
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "m",
+ },
+ ],
+ },
+ },
+ ident: "Z",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Restricted {
+ in_token: Some,
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "m",
+ },
+ Token![::],
+ PathSegment {
+ ident: "n",
+ },
+ ],
+ },
+ },
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "u8",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ semi_token: Some,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_pub_restricted_crate() {
+ let input = quote! {
+ pub(crate) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Restricted {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "crate",
+ },
+ ],
+ },
+ },
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_pub_restricted_super() {
+ let input = quote! {
+ pub(super) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Restricted {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "super",
+ },
+ ],
+ },
+ },
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_pub_restricted_in_super() {
+ let input = quote! {
+ pub(in super) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Restricted {
+ in_token: Some,
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "super",
+ },
+ ],
+ },
+ },
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_fields_on_unit_struct() {
+ let input = quote! {
+ struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ assert_eq!(0, data.fields.iter().count());
+}
+
+#[test]
+fn test_fields_on_named_struct() {
+ let input = quote! {
+ struct S {
+ foo: i32,
+ pub bar: String,
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Named {
+ named: [
+ Field {
+ vis: Visibility::Inherited,
+ ident: Some("foo"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "i32",
+ },
+ ],
+ },
+ },
+ },
+ Token![,],
+ Field {
+ vis: Visibility::Public,
+ ident: Some("bar"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "String",
+ },
+ ],
+ },
+ },
+ },
+ Token![,],
+ ],
+ },
+ },
+ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+ [
+ Field {
+ vis: Visibility::Inherited,
+ ident: Some("foo"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "i32",
+ },
+ ],
+ },
+ },
+ },
+ Field {
+ vis: Visibility::Public,
+ ident: Some("bar"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "String",
+ },
+ ],
+ },
+ },
+ },
+ ]
+ "###);
+}
+
+#[test]
+fn test_fields_on_tuple_struct() {
+ let input = quote! {
+ struct S(i32, pub String);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "i32",
+ },
+ ],
+ },
+ },
+ },
+ Token![,],
+ Field {
+ vis: Visibility::Public,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "String",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ semi_token: Some,
+ },
+ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+ [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "i32",
+ },
+ ],
+ },
+ },
+ },
+ Field {
+ vis: Visibility::Public,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "String",
+ },
+ ],
+ },
+ },
+ },
+ ]
+ "###);
+}
+
+#[test]
+fn test_ambiguous_crate() {
+ let input = quote! {
+ // The field type is `(crate::X)` not `crate (::X)`.
+ struct S(crate::X);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "crate",
+ },
+ Token![::],
+ PathSegment {
+ ident: "X",
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ semi_token: Some,
+ },
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_expr.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_expr.rs
new file mode 100644
index 0000000000..961e3c4ea3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_expr.rs
@@ -0,0 +1,692 @@
+#![allow(clippy::single_element_loop, clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group};
+use quote::{quote, ToTokens as _};
+use std::mem;
+use syn::punctuated::Punctuated;
+use syn::visit_mut::{self, VisitMut};
+use syn::{parse_quote, token, Expr, ExprRange, ExprTuple, Stmt, Token};
+
+#[test]
+fn test_expr_parse() {
+ let tokens = quote!(..100u32);
+ snapshot!(tokens as Expr, @r###"
+ Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Lit {
+ lit: 100u32,
+ }),
+ }
+ "###);
+
+ let tokens = quote!(..100u32);
+ snapshot!(tokens as ExprRange, @r###"
+ ExprRange {
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Lit {
+ lit: 100u32,
+ }),
+ }
+ "###);
+}
+
+#[test]
+fn test_await() {
+ // Must not parse as Expr::Field.
+ let tokens = quote!(fut.await);
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Await {
+ base: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "fut",
+ },
+ ],
+ },
+ },
+ }
+ "###);
+}
+
+#[rustfmt::skip]
+#[test]
+fn test_tuple_multi_index() {
+ let expected = snapshot!("tuple.0.0" as Expr, @r###"
+ Expr::Field {
+ base: Expr::Field {
+ base: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "tuple",
+ },
+ ],
+ },
+ },
+ member: Member::Unnamed(Index {
+ index: 0,
+ }),
+ },
+ member: Member::Unnamed(Index {
+ index: 0,
+ }),
+ }
+ "###);
+
+ for &input in &[
+ "tuple .0.0",
+ "tuple. 0.0",
+ "tuple.0 .0",
+ "tuple.0. 0",
+ "tuple . 0 . 0",
+ ] {
+ assert_eq!(expected, syn::parse_str(input).unwrap());
+ }
+
+ for tokens in [
+ quote!(tuple.0.0),
+ quote!(tuple .0.0),
+ quote!(tuple. 0.0),
+ quote!(tuple.0 .0),
+ quote!(tuple.0. 0),
+ quote!(tuple . 0 . 0),
+ ] {
+ assert_eq!(expected, syn::parse2(tokens).unwrap());
+ }
+}
+
+#[test]
+fn test_macro_variable_func() {
+ // mimics the token stream corresponding to `$fn()`
+ let path = Group::new(Delimiter::None, quote!(f));
+ let tokens = quote!(#path());
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Call {
+ func: Expr::Group {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "f",
+ },
+ ],
+ },
+ },
+ },
+ }
+ "###);
+
+ let path = Group::new(Delimiter::None, quote! { #[inside] f });
+ let tokens = quote!(#[outside] #path());
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Call {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "outside",
+ },
+ ],
+ },
+ },
+ ],
+ func: Expr::Group {
+ expr: Expr::Path {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "inside",
+ },
+ ],
+ },
+ },
+ ],
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "f",
+ },
+ ],
+ },
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_macro_variable_macro() {
+ // mimics the token stream corresponding to `$macro!()`
+ let mac = Group::new(Delimiter::None, quote!(m));
+ let tokens = quote!(#mac!());
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Macro {
+ mac: Macro {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "m",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(``),
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_macro_variable_struct() {
+ // mimics the token stream corresponding to `$struct {}`
+ let s = Group::new(Delimiter::None, quote! { S });
+ let tokens = quote!(#s {});
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Struct {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "S",
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_macro_variable_unary() {
+ // mimics the token stream corresponding to `$expr.method()` where expr is
`&self`
+ let inner = Group::new(Delimiter::None, quote!(&self));
+ let tokens = quote!(#inner.method());
+ snapshot!(tokens as Expr, @r###"
+ Expr::MethodCall {
+ receiver: Expr::Group {
+ expr: Expr::Reference {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "self",
+ },
+ ],
+ },
+ },
+ },
+ },
+ method: "method",
+ }
+ "###);
+}
+
+#[test]
+fn test_macro_variable_match_arm() {
+ // mimics the token stream corresponding to `match v { _ => $expr }`
+ let expr = Group::new(Delimiter::None, quote! { #[a] () });
+ let tokens = quote!(match v { _ => #expr });
+ snapshot!(tokens as Expr, @r###"
+ Expr::Match {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "v",
+ },
+ ],
+ },
+ },
+ arms: [
+ Arm {
+ pat: Pat::Wild,
+ body: Expr::Group {
+ expr: Expr::Tuple {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "a",
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ },
+ ],
+ }
+ "###);
+
+ let expr = Group::new(Delimiter::None, quote!(loop {} + 1));
+ let tokens = quote!(match v { _ => #expr });
+ snapshot!(tokens as Expr, @r###"
+ Expr::Match {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "v",
+ },
+ ],
+ },
+ },
+ arms: [
+ Arm {
+ pat: Pat::Wild,
+ body: Expr::Group {
+ expr: Expr::Binary {
+ left: Expr::Loop {
+ body: Block {
+ stmts: [],
+ },
+ },
+ op: BinOp::Add,
+ right: Expr::Lit {
+ lit: 1,
+ },
+ },
+ },
+ },
+ ],
+ }
+ "###);
+}
+
+// https://github.com/dtolnay/syn/issues/1019
+#[test]
+fn test_closure_vs_rangefull() {
+ #[rustfmt::skip] // rustfmt bug:
https://github.com/rust-lang/rustfmt/issues/4808
+ let tokens = quote!(|| .. .method());
+ snapshot!(tokens as Expr, @r###"
+ Expr::MethodCall {
+ receiver: Expr::Closure {
+ output: ReturnType::Default,
+ body: Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ },
+ },
+ method: "method",
+ }
+ "###);
+}
+
+#[test]
+fn test_postfix_operator_after_cast() {
+ syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err();
+ syn::parse_str::<Expr>("|| () as ()()").unwrap_err();
+}
+
+#[test]
+fn test_range_kinds() {
+ syn::parse_str::<Expr>("..").unwrap();
+ syn::parse_str::<Expr>("..hi").unwrap();
+ syn::parse_str::<Expr>("lo..").unwrap();
+ syn::parse_str::<Expr>("lo..hi").unwrap();
+
+ syn::parse_str::<Expr>("..=").unwrap_err();
+ syn::parse_str::<Expr>("..=hi").unwrap();
+ syn::parse_str::<Expr>("lo..=").unwrap_err();
+ syn::parse_str::<Expr>("lo..=hi").unwrap();
+
+ syn::parse_str::<Expr>("...").unwrap_err();
+ syn::parse_str::<Expr>("...hi").unwrap_err();
+ syn::parse_str::<Expr>("lo...").unwrap_err();
+ syn::parse_str::<Expr>("lo...hi").unwrap_err();
+}
+
+#[test]
+fn test_range_precedence() {
+ snapshot!(".. .." as Expr, @r###"
+ Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ }),
+ }
+ "###);
+
+ snapshot!(".. .. ()" as Expr, @r###"
+ Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Tuple),
+ }),
+ }
+ "###);
+
+ snapshot!("() .. .." as Expr, @r###"
+ Expr::Range {
+ start: Some(Expr::Tuple),
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Range {
+ limits: RangeLimits::HalfOpen,
+ }),
+ }
+ "###);
+
+ // A range with a lower bound cannot be the upper bound of another range,
+ // and a range with an upper bound cannot be the lower bound of another
+ // range.
+ syn::parse_str::<Expr>(".. x ..").unwrap_err();
+ syn::parse_str::<Expr>("x .. x ..").unwrap_err();
+}
+
+#[test]
+fn test_ambiguous_label() {
+ for stmt in [
+ quote! {
+ return 'label: loop { break 'label 42; };
+ },
+ quote! {
+ break ('label: loop { break 'label 42; });
+ },
+ quote! {
+ break 1 + 'label: loop { break 'label 42; };
+ },
+ quote! {
+ break 'outer 'inner: loop { break 'inner 42; };
+ },
+ ] {
+ syn::parse2::<Stmt>(stmt).unwrap();
+ }
+
+ for stmt in [
+ // Parentheses required. See
https://github.com/rust-lang/rust/pull/87026.
+ quote! {
+ break 'label: loop { break 'label 42; };
+ },
+ ] {
+ syn::parse2::<Stmt>(stmt).unwrap_err();
+ }
+}
+
+#[test]
+fn test_extended_interpolated_path() {
+ let path = Group::new(Delimiter::None, quote!(a::b));
+
+ let tokens = quote!(if #path {});
+ snapshot!(tokens as Expr, @r###"
+ Expr::If {
+ cond: Expr::Group {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "a",
+ },
+ Token![::],
+ PathSegment {
+ ident: "b",
+ },
+ ],
+ },
+ },
+ },
+ then_branch: Block {
+ stmts: [],
+ },
+ }
+ "###);
+
+ let tokens = quote!(#path {});
+ snapshot!(tokens as Expr, @r###"
+ Expr::Struct {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "a",
+ },
+ Token![::],
+ PathSegment {
+ ident: "b",
+ },
+ ],
+ },
+ }
+ "###);
+
+ let tokens = quote!(#path :: c);
+ snapshot!(tokens as Expr, @r###"
+ Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "a",
+ },
+ Token![::],
+ PathSegment {
+ ident: "b",
+ },
+ Token![::],
+ PathSegment {
+ ident: "c",
+ },
+ ],
+ },
+ }
+ "###);
+
+ let nested = Group::new(Delimiter::None, quote!(a::b || true));
+ let tokens = quote!(if #nested && false {});
+ snapshot!(tokens as Expr, @r###"
+ Expr::If {
+ cond: Expr::Binary {
+ left: Expr::Group {
+ expr: Expr::Binary {
+ left: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "a",
+ },
+ Token![::],
+ PathSegment {
+ ident: "b",
+ },
+ ],
+ },
+ },
+ op: BinOp::Or,
+ right: Expr::Lit {
+ lit: Lit::Bool {
+ value: true,
+ },
+ },
+ },
+ },
+ op: BinOp::And,
+ right: Expr::Lit {
+ lit: Lit::Bool {
+ value: false,
+ },
+ },
+ },
+ then_branch: Block {
+ stmts: [],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_tuple_comma() {
+ let mut expr = ExprTuple {
+ attrs: Vec::new(),
+ paren_token: token::Paren::default(),
+ elems: Punctuated::new(),
+ };
+ snapshot!(expr.to_token_stream() as Expr, @"Expr::Tuple");
+
+ expr.elems.push_value(parse_quote!(continue));
+ // Must not parse to Expr::Paren
+ snapshot!(expr.to_token_stream() as Expr, @r###"
+ Expr::Tuple {
+ elems: [
+ Expr::Continue,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Expr, @r###"
+ Expr::Tuple {
+ elems: [
+ Expr::Continue,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_value(parse_quote!(continue));
+ snapshot!(expr.to_token_stream() as Expr, @r###"
+ Expr::Tuple {
+ elems: [
+ Expr::Continue,
+ Token![,],
+ Expr::Continue,
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Expr, @r###"
+ Expr::Tuple {
+ elems: [
+ Expr::Continue,
+ Token![,],
+ Expr::Continue,
+ Token![,],
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_binop_associativity() {
+ // Left to right.
+ snapshot!("() + () + ()" as Expr, @r###"
+ Expr::Binary {
+ left: Expr::Binary {
+ left: Expr::Tuple,
+ op: BinOp::Add,
+ right: Expr::Tuple,
+ },
+ op: BinOp::Add,
+ right: Expr::Tuple,
+ }
+ "###);
+
+ // Right to left.
+ snapshot!("() += () += ()" as Expr, @r###"
+ Expr::Binary {
+ left: Expr::Tuple,
+ op: BinOp::AddAssign,
+ right: Expr::Binary {
+ left: Expr::Tuple,
+ op: BinOp::AddAssign,
+ right: Expr::Tuple,
+ },
+ }
+ "###);
+
+ // Parenthesization is required.
+ syn::parse_str::<Expr>("() == () == ()").unwrap_err();
+}
+
+#[test]
+fn test_assign_range_precedence() {
+ // Range has higher precedence as the right-hand of an assignment, but
+ // ambiguous precedence as the left-hand of an assignment.
+ snapshot!("() = () .. ()" as Expr, @r###"
+ Expr::Assign {
+ left: Expr::Tuple,
+ right: Expr::Range {
+ start: Some(Expr::Tuple),
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Tuple),
+ },
+ }
+ "###);
+
+ snapshot!("() += () .. ()" as Expr, @r###"
+ Expr::Binary {
+ left: Expr::Tuple,
+ op: BinOp::AddAssign,
+ right: Expr::Range {
+ start: Some(Expr::Tuple),
+ limits: RangeLimits::HalfOpen,
+ end: Some(Expr::Tuple),
+ },
+ }
+ "###);
+
+ syn::parse_str::<Expr>("() .. () = ()").unwrap_err();
+ syn::parse_str::<Expr>("() .. () += ()").unwrap_err();
+}
+
+#[test]
+fn test_fixup() {
+ struct FlattenParens;
+
+ impl VisitMut for FlattenParens {
+ fn visit_expr_mut(&mut self, e: &mut Expr) {
+ while let Expr::Paren(paren) = e {
+ *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER);
+ }
+ visit_mut::visit_expr_mut(self, e);
+ }
+ }
+
+ for tokens in [
+ quote! { 2 * (1 + 1) },
+ quote! { 0 + (0 + 0) },
+ quote! { (a = b) = c },
+ quote! { (x as i32) < 0 },
+ quote! { (1 + x as i32) < 0 },
+ quote! { (1 + 1).abs() },
+ quote! { (lo..hi)[..] },
+ quote! { (a..b)..(c..d) },
+ quote! { (&mut fut).await },
+ quote! { &mut (x as i32) },
+ quote! { -(x as i32) },
+ quote! { if (S {} == 1) {} },
+ quote! { { (m! {}) - 1 } },
+ quote! { match m { _ => ({}) - 1 } },
+ quote! { if let _ = (a && b) && c {} },
+ quote! { if let _ = (S {}) {} },
+ ] {
+ let original: Expr = syn::parse2(tokens).unwrap();
+
+ let mut flat = original.clone();
+ FlattenParens.visit_expr_mut(&mut flat);
+ let reconstructed: Expr = match syn::parse2(flat.to_token_stream()) {
+ Ok(reconstructed) => reconstructed,
+ Err(err) => panic!("failed to parse `{}`: {}",
flat.to_token_stream(), err),
+ };
+
+ assert!(
+ original == reconstructed,
+ "original: {}\nreconstructed: {}",
+ original.to_token_stream(),
+ reconstructed.to_token_stream(),
+ );
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_generics.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_generics.rs
new file mode 100644
index 0000000000..3faf0dba59
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_generics.rs
@@ -0,0 +1,282 @@
+#![allow(
+ clippy::manual_let_else,
+ clippy::too_many_lines,
+ clippy::uninlined_format_args
+)]
+
+#[macro_use]
+mod macros;
+
+use quote::quote;
+use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
+
+#[test]
+fn test_split_for_impl() {
+ let input = quote! {
+ struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics {
+ lt_token: Some,
+ params: [
+ GenericParam::Lifetime(LifetimeParam {
+ lifetime: Lifetime {
+ ident: "a",
+ },
+ }),
+ Token![,],
+ GenericParam::Lifetime(LifetimeParam {
+ lifetime: Lifetime {
+ ident: "b",
+ },
+ colon_token: Some,
+ bounds: [
+ Lifetime {
+ ident: "a",
+ },
+ ],
+ }),
+ Token![,],
+ GenericParam::Type(TypeParam {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "may_dangle",
+ },
+ ],
+ },
+ },
+ ],
+ ident: "T",
+ colon_token: Some,
+ bounds: [
+ TypeParamBound::Lifetime {
+ ident: "a",
+ },
+ ],
+ eq_token: Some,
+ default: Some(Type::Tuple),
+ }),
+ ],
+ gt_token: Some,
+ where_clause: Some(WhereClause {
+ predicates: [
+ WherePredicate::Type(PredicateType {
+ bounded_ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ },
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Debug",
+ },
+ ],
+ },
+ }),
+ ],
+ }),
+ ],
+ }),
+ },
+ data: Data::Struct {
+ fields: Fields::Unit,
+ semi_token: Some,
+ },
+ }
+ "###);
+
+ let generics = input.generics;
+ let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+
+ let generated = quote! {
+ impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
+ };
+ let expected = quote! {
+ impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait
+ for Test<'a, 'b, T>
+ where
+ T: Debug
+ {}
+ };
+ assert_eq!(generated.to_string(), expected.to_string());
+
+ let turbofish = ty_generics.as_turbofish();
+ let generated = quote! {
+ Test #turbofish
+ };
+ let expected = quote! {
+ Test::<'a, 'b, T>
+ };
+ assert_eq!(generated.to_string(), expected.to_string());
+}
+
+#[test]
+fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+ TypeParamBound::Lifetime {
+ ident: "a",
+ }
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+ TypeParamBound::Lifetime {
+ ident: "_",
+ }
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Debug",
+ },
+ ],
+ },
+ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+ TypeParamBound::Trait(TraitBound {
+ modifier: TraitBoundModifier::Maybe,
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Sized",
+ },
+ ],
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_fn_precedence_in_where_clause() {
+ // This should parse as two separate bounds, `FnOnce() -> i32` and `Send`
- not
+ // `FnOnce() -> (i32 + Send)`.
+ let input = quote! {
+ fn f<G>()
+ where
+ G: FnOnce() -> i32 + Send,
+ {
+ }
+ };
+
+ snapshot!(input as ItemFn, @r###"
+ ItemFn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "f",
+ generics: Generics {
+ lt_token: Some,
+ params: [
+ GenericParam::Type(TypeParam {
+ ident: "G",
+ }),
+ ],
+ gt_token: Some,
+ where_clause: Some(WhereClause {
+ predicates: [
+ WherePredicate::Type(PredicateType {
+ bounded_ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "G",
+ },
+ ],
+ },
+ },
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "FnOnce",
+ arguments:
PathArguments::Parenthesized {
+ output: ReturnType::Type(
+ Type::Path {
+ path: Path {
+ segments: [
+
PathSegment {
+ ident:
"i32",
+ },
+ ],
+ },
+ },
+ ),
+ },
+ },
+ ],
+ },
+ }),
+ Token![+],
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Send",
+ },
+ ],
+ },
+ }),
+ ],
+ }),
+ Token![,],
+ ],
+ }),
+ },
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+ assert_eq!(where_clause.predicates.len(), 1);
+
+ let predicate = match &where_clause.predicates[0] {
+ WherePredicate::Type(pred) => pred,
+ _ => panic!("wrong predicate kind"),
+ };
+
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+}
+
+#[test]
+fn test_where_clause_at_end_of_input() {
+ let input = quote! {
+ where
+ };
+
+ snapshot!(input as WhereClause, @"WhereClause");
+
+ assert_eq!(input.predicates.len(), 0);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_grouping.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_grouping.rs
new file mode 100644
index 0000000000..326909feb5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_grouping.rs
@@ -0,0 +1,53 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream,
TokenTree};
+use syn::Expr;
+
+#[test]
+fn test_grouping() {
+ let tokens: TokenStream = TokenStream::from_iter([
+ TokenTree::Literal(Literal::i32_suffixed(1)),
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
+ TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::from_iter([
+ TokenTree::Literal(Literal::i32_suffixed(2)),
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
+ TokenTree::Literal(Literal::i32_suffixed(3)),
+ ]),
+ )),
+ TokenTree::Punct(Punct::new('*', Spacing::Alone)),
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+ Expr::Binary {
+ left: Expr::Lit {
+ lit: 1i32,
+ },
+ op: BinOp::Add,
+ right: Expr::Binary {
+ left: Expr::Group {
+ expr: Expr::Binary {
+ left: Expr::Lit {
+ lit: 2i32,
+ },
+ op: BinOp::Add,
+ right: Expr::Lit {
+ lit: 3i32,
+ },
+ },
+ },
+ op: BinOp::Mul,
+ right: Expr::Lit {
+ lit: 4i32,
+ },
+ },
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_ident.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_ident.rs
new file mode 100644
index 0000000000..10df0ad56c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_ident.rs
@@ -0,0 +1,87 @@
+use proc_macro2::{Ident, Span, TokenStream};
+use std::str::FromStr;
+use syn::Result;
+
+#[track_caller]
+fn parse(s: &str) -> Result<Ident> {
+ syn::parse2(TokenStream::from_str(s).unwrap())
+}
+
+#[track_caller]
+fn new(s: &str) -> Ident {
+ Ident::new(s, Span::call_site())
+}
+
+#[test]
+fn ident_parse() {
+ parse("String").unwrap();
+}
+
+#[test]
+fn ident_parse_keyword() {
+ parse("abstract").unwrap_err();
+}
+
+#[test]
+fn ident_parse_empty() {
+ parse("").unwrap_err();
+}
+
+#[test]
+fn ident_parse_lifetime() {
+ parse("'static").unwrap_err();
+}
+
+#[test]
+fn ident_parse_underscore() {
+ parse("_").unwrap_err();
+}
+
+#[test]
+fn ident_parse_number() {
+ parse("255").unwrap_err();
+}
+
+#[test]
+fn ident_parse_invalid() {
+ parse("a#").unwrap_err();
+}
+
+#[test]
+fn ident_new() {
+ new("String");
+}
+
+#[test]
+fn ident_new_keyword() {
+ new("abstract");
+}
+
+#[test]
+#[should_panic(expected = "use Option<Ident>")]
+fn ident_new_empty() {
+ new("");
+}
+
+#[test]
+#[should_panic(expected = "not a valid Ident")]
+fn ident_new_lifetime() {
+ new("'static");
+}
+
+#[test]
+fn ident_new_underscore() {
+ new("_");
+}
+
+#[test]
+#[should_panic(expected = "use Literal instead")]
+fn ident_new_number() {
+ new("255");
+}
+
+#[test]
+#[should_panic(expected = "\"a#\" is not a valid Ident")]
+fn ident_new_invalid() {
+ new("a#");
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_item.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_item.rs
new file mode 100644
index 0000000000..5c088bbe60
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_item.rs
@@ -0,0 +1,332 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+use quote::quote;
+use syn::{Item, ItemTrait};
+
+#[test]
+fn test_macro_variable_attr() {
+ // mimics the token stream corresponding to `$attr fn f() {}`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
+ TokenTree::Ident(Ident::new("fn", Span::call_site())),
+ TokenTree::Ident(Ident::new("f", Span::call_site())),
+ TokenTree::Group(Group::new(Delimiter::Parenthesis,
TokenStream::new())),
+ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
+ ]);
+
+ snapshot!(tokens as Item, @r###"
+ Item::Fn {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "test",
+ },
+ ],
+ },
+ },
+ ],
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "f",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_negative_impl() {
+ // Rustc parses all of the following.
+
+ #[cfg(any())]
+ impl ! {}
+ let tokens = quote! {
+ impl ! {}
+ };
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics,
+ self_ty: Type::Never,
+ }
+ "###);
+
+ #[cfg(any())]
+ #[rustfmt::skip]
+ impl !Trait {}
+ let tokens = quote! {
+ impl !Trait {}
+ };
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics,
+ self_ty: Type::Verbatim(`! Trait`),
+ }
+ "###);
+
+ #[cfg(any())]
+ impl !Trait for T {}
+ let tokens = quote! {
+ impl !Trait for T {}
+ };
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics,
+ trait_: Some((
+ Some,
+ Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ )),
+ self_ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ },
+ }
+ "###);
+
+ #[cfg(any())]
+ #[rustfmt::skip]
+ impl !! {}
+ let tokens = quote! {
+ impl !! {}
+ };
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics,
+ self_ty: Type::Verbatim(`! !`),
+ }
+ "###);
+}
+
+#[test]
+fn test_macro_variable_impl() {
+ // mimics the token stream corresponding to `impl $trait for $ty {}`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("impl", Span::call_site())),
+ TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
+ TokenTree::Ident(Ident::new("for", Span::call_site())),
+ TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
+ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
+ ]);
+
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics,
+ trait_: Some((
+ None,
+ Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ )),
+ self_ty: Type::Group {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Type",
+ },
+ ],
+ },
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_supertraits() {
+ // Rustc parses all of the following.
+
+ #[rustfmt::skip]
+ let tokens = quote!(trait Trait where {});
+ snapshot!(tokens as ItemTrait, @r###"
+ ItemTrait {
+ vis: Visibility::Inherited,
+ ident: "Trait",
+ generics: Generics {
+ where_clause: Some(WhereClause),
+ },
+ }
+ "###);
+
+ #[rustfmt::skip]
+ let tokens = quote!(trait Trait: where {});
+ snapshot!(tokens as ItemTrait, @r###"
+ ItemTrait {
+ vis: Visibility::Inherited,
+ ident: "Trait",
+ generics: Generics {
+ where_clause: Some(WhereClause),
+ },
+ colon_token: Some,
+ }
+ "###);
+
+ #[rustfmt::skip]
+ let tokens = quote!(trait Trait: Sized where {});
+ snapshot!(tokens as ItemTrait, @r###"
+ ItemTrait {
+ vis: Visibility::Inherited,
+ ident: "Trait",
+ generics: Generics {
+ where_clause: Some(WhereClause),
+ },
+ colon_token: Some,
+ supertraits: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Sized",
+ },
+ ],
+ },
+ }),
+ ],
+ }
+ "###);
+
+ #[rustfmt::skip]
+ let tokens = quote!(trait Trait: Sized + where {});
+ snapshot!(tokens as ItemTrait, @r###"
+ ItemTrait {
+ vis: Visibility::Inherited,
+ ident: "Trait",
+ generics: Generics {
+ where_clause: Some(WhereClause),
+ },
+ colon_token: Some,
+ supertraits: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Sized",
+ },
+ ],
+ },
+ }),
+ Token![+],
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_type_empty_bounds() {
+ #[rustfmt::skip]
+ let tokens = quote! {
+ trait Foo {
+ type Bar: ;
+ }
+ };
+
+ snapshot!(tokens as ItemTrait, @r###"
+ ItemTrait {
+ vis: Visibility::Inherited,
+ ident: "Foo",
+ generics: Generics,
+ items: [
+ TraitItem::Type {
+ ident: "Bar",
+ generics: Generics,
+ colon_token: Some,
+ },
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_impl_visibility() {
+ let tokens = quote! {
+ pub default unsafe impl union {}
+ };
+
+ snapshot!(tokens as Item, @"Item::Verbatim(`pub default unsafe impl union
{ }`)");
+}
+
+#[test]
+fn test_impl_type_parameter_defaults() {
+ #[cfg(any())]
+ impl<T = ()> () {}
+ let tokens = quote! {
+ impl<T = ()> () {}
+ };
+ snapshot!(tokens as Item, @r###"
+ Item::Impl {
+ generics: Generics {
+ lt_token: Some,
+ params: [
+ GenericParam::Type(TypeParam {
+ ident: "T",
+ eq_token: Some,
+ default: Some(Type::Tuple),
+ }),
+ ],
+ gt_token: Some,
+ },
+ self_ty: Type::Tuple,
+ }
+ "###);
+}
+
+#[test]
+fn test_impl_trait_trailing_plus() {
+ let tokens = quote! {
+ fn f() -> impl Sized + {}
+ };
+
+ snapshot!(tokens as Item, @r###"
+ Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "f",
+ generics: Generics,
+ output: ReturnType::Type(
+ Type::ImplTrait {
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Sized",
+ },
+ ],
+ },
+ }),
+ Token![+],
+ ],
+ },
+ ),
+ },
+ block: Block {
+ stmts: [],
+ },
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_iterators.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_iterators.rs
new file mode 100644
index 0000000000..5f0eff59e6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_iterators.rs
@@ -0,0 +1,70 @@
+#![allow(clippy::uninlined_format_args)]
+
+use syn::punctuated::{Pair, Punctuated};
+use syn::Token;
+
+#[macro_use]
+mod macros;
+
+macro_rules! check_exact_size_iterator {
+ ($iter:expr) => {{
+ let iter = $iter;
+ let size_hint = iter.size_hint();
+ let len = iter.len();
+ let count = iter.count();
+ assert_eq!(len, count);
+ assert_eq!(size_hint, (count, Some(count)));
+ }};
+}
+
+#[test]
+fn pairs() {
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+
+ check_exact_size_iterator!(p.pairs());
+ check_exact_size_iterator!(p.pairs_mut());
+ check_exact_size_iterator!(p.into_pairs());
+
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+
+ assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4));
+ assert_eq!(
+ p.pairs_mut().next_back().map(Pair::into_value),
+ Some(&mut 4)
+ );
+ assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4));
+}
+
+#[test]
+fn iter() {
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+
+ check_exact_size_iterator!(p.iter());
+ check_exact_size_iterator!(p.iter_mut());
+ check_exact_size_iterator!(p.into_iter());
+
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+
+ assert_eq!(p.iter().next_back(), Some(&4));
+ assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
+ assert_eq!(p.into_iter().next_back(), Some(4));
+}
+
+#[test]
+fn may_dangle() {
+ let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+ for element in &p {
+ if *element == 2 {
+ drop(p);
+ break;
+ }
+ }
+
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+ for element in &mut p {
+ if *element == 2 {
+ drop(p);
+ break;
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_lit.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_lit.rs
new file mode 100644
index 0000000000..0d5ecdcef5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_lit.rs
@@ -0,0 +1,331 @@
+#![allow(
+ clippy::float_cmp,
+ clippy::needless_raw_string_hashes,
+ clippy::non_ascii_literal,
+ clippy::single_match_else,
+ clippy::uninlined_format_args
+)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+use quote::ToTokens;
+use std::ffi::CStr;
+use std::str::FromStr;
+use syn::{Lit, LitFloat, LitInt, LitStr};
+
+#[track_caller]
+fn lit(s: &str) -> Lit {
+ let mut tokens = TokenStream::from_str(s).unwrap().into_iter();
+ match tokens.next().unwrap() {
+ TokenTree::Literal(lit) => {
+ assert!(tokens.next().is_none());
+ Lit::new(lit)
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+}
+
+#[test]
+fn strings() {
+ #[track_caller]
+ fn test_string(s: &str, value: &str) {
+ let s = s.trim();
+ match lit(s) {
+ Lit::Str(lit) => {
+ assert_eq!(lit.value(), value);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_string(&again, value);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_string(r#" "" "#, "");
+ test_string(r#" "a" "#, "a");
+ test_string(r#" "\n" "#, "\n");
+ test_string(r#" "\r" "#, "\r");
+ test_string(r#" "\t" "#, "\t");
+ test_string(r#" "🐕" "#, "🐕"); // NOTE: This is an emoji
+ test_string(r#" "\"" "#, "\"");
+ test_string(r#" "'" "#, "'");
+ test_string(r#" "\u{1F415}" "#, "\u{1F415}");
+ test_string(r#" "\u{1_2__3_}" "#, "\u{123}");
+ test_string(
+ "\"contains\nnewlines\\\nescaped newlines\"",
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string(
+ "\"escaped newline\\\n \x0C unsupported whitespace\"",
+ "escaped newline\x0C unsupported whitespace",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
+ test_string("\"...\"q", "...");
+ test_string("r\"...\"q", "...");
+ test_string("r##\"...\"##q", "...");
+}
+
+#[test]
+fn byte_strings() {
+ #[track_caller]
+ fn test_byte_string(s: &str, value: &[u8]) {
+ let s = s.trim();
+ match lit(s) {
+ Lit::ByteStr(lit) => {
+ assert_eq!(lit.value(), value);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_byte_string(&again, value);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_byte_string(r#" b"" "#, b"");
+ test_byte_string(r#" b"a" "#, b"a");
+ test_byte_string(r#" b"\n" "#, b"\n");
+ test_byte_string(r#" b"\r" "#, b"\r");
+ test_byte_string(r#" b"\t" "#, b"\t");
+ test_byte_string(r#" b"\"" "#, b"\"");
+ test_byte_string(r#" b"'" "#, b"'");
+ test_byte_string(
+ "b\"contains\nnewlines\\\nescaped newlines\"",
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
+ test_byte_string("b\"...\"q", b"...");
+ test_byte_string("br\"...\"q", b"...");
+ test_byte_string("br##\"...\"##q", b"...");
+}
+
+#[test]
+fn c_strings() {
+ #[track_caller]
+ fn test_c_string(s: &str, value: &CStr) {
+ let s = s.trim();
+ match lit(s) {
+ Lit::CStr(lit) => {
+ assert_eq!(*lit.value(), *value);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_c_string(&again, value);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_c_string(r#" c"" "#, c"");
+ test_c_string(r#" c"a" "#, c"a");
+ test_c_string(r#" c"\n" "#, c"\n");
+ test_c_string(r#" c"\r" "#, c"\r");
+ test_c_string(r#" c"\t" "#, c"\t");
+ test_c_string(r#" c"\\" "#, c"\\");
+ test_c_string(r#" c"\'" "#, c"'");
+ test_c_string(r#" c"\"" "#, c"\"");
+ test_c_string(
+ "c\"contains\nnewlines\\\nescaped newlines\"",
+ c"contains\nnewlinesescaped newlines",
+ );
+ test_c_string("cr\"raw\nstring\\\nhere\"", c"raw\nstring\\\nhere");
+ test_c_string("c\"...\"q", c"...");
+ test_c_string("cr\"...\"", c"...");
+ test_c_string("cr##\"...\"##", c"...");
+ test_c_string(
+ r#" c"hello\x80我叫\u{1F980}" "#, // from the RFC
+ c"hello\x80我叫\u{1F980}",
+ );
+}
+
+#[test]
+fn bytes() {
+ #[track_caller]
+ fn test_byte(s: &str, value: u8) {
+ let s = s.trim();
+ match lit(s) {
+ Lit::Byte(lit) => {
+ assert_eq!(lit.value(), value);
+ let again = lit.into_token_stream().to_string();
+ assert_eq!(again, s);
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_byte(r#" b'a' "#, b'a');
+ test_byte(r#" b'\n' "#, b'\n');
+ test_byte(r#" b'\r' "#, b'\r');
+ test_byte(r#" b'\t' "#, b'\t');
+ test_byte(r#" b'\'' "#, b'\'');
+ test_byte(r#" b'"' "#, b'"');
+ test_byte(r#" b'a'q "#, b'a');
+}
+
+#[test]
+fn chars() {
+ #[track_caller]
+ fn test_char(s: &str, value: char) {
+ let s = s.trim();
+ match lit(s) {
+ Lit::Char(lit) => {
+ assert_eq!(lit.value(), value);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_char(&again, value);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_char(r#" 'a' "#, 'a');
+ test_char(r#" '\n' "#, '\n');
+ test_char(r#" '\r' "#, '\r');
+ test_char(r#" '\t' "#, '\t');
+ test_char(r#" '🐕' "#, '🐕'); // NOTE: This is an emoji
+ test_char(r#" '\'' "#, '\'');
+ test_char(r#" '"' "#, '"');
+ test_char(r#" '\u{1F415}' "#, '\u{1F415}');
+ test_char(r#" 'a'q "#, 'a');
+}
+
+#[test]
+fn ints() {
+ #[track_caller]
+ fn test_int(s: &str, value: u64, suffix: &str) {
+ match lit(s) {
+ Lit::Int(lit) => {
+ assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
+ assert_eq!(lit.suffix(), suffix);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_int(&again, value, suffix);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_int("5", 5, "");
+ test_int("5u32", 5, "u32");
+ test_int("0E", 0, "E");
+ test_int("0ECMA", 0, "ECMA");
+ test_int("0o0A", 0, "A");
+ test_int("5_0", 50, "");
+ test_int("5_____0_____", 50, "");
+ test_int("0x7f", 127, "");
+ test_int("0x7F", 127, "");
+ test_int("0b1001", 9, "");
+ test_int("0o73", 59, "");
+ test_int("0x7Fu8", 127, "u8");
+ test_int("0b1001i8", 9, "i8");
+ test_int("0o73u32", 59, "u32");
+ test_int("0x__7___f_", 127, "");
+ test_int("0x__7___F_", 127, "");
+ test_int("0b_1_0__01", 9, "");
+ test_int("0o_7__3", 59, "");
+ test_int("0x_7F__u8", 127, "u8");
+ test_int("0b__10__0_1i8", 9, "i8");
+ test_int("0o__7__________________3u32", 59, "u32");
+ test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
+}
+
+#[test]
+fn floats() {
+ #[track_caller]
+ fn test_float(s: &str, value: f64, suffix: &str) {
+ match lit(s) {
+ Lit::Float(lit) => {
+ assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
+ assert_eq!(lit.suffix(), suffix);
+ let again = lit.into_token_stream().to_string();
+ if again != s {
+ test_float(&again, value, suffix);
+ }
+ }
+ wrong => panic!("{:?}", wrong),
+ }
+ }
+
+ test_float("5.5", 5.5, "");
+ test_float("5.5E12", 5.5e12, "");
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
+ test_float("9e99e99", 9e99, "e99");
+ test_float("1e_0", 1.0, "");
+ test_float("0.0ECMA", 0.0, "ECMA");
+}
+
+#[test]
+fn negative() {
+ let span = Span::call_site();
+ assert_eq!("-1", LitInt::new("-1", span).to_string());
+ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
+ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
+ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
+ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
+ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
+ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
+ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
+}
+
+#[test]
+fn suffix() {
+ #[track_caller]
+ fn get_suffix(token: &str) -> String {
+ let lit = syn::parse_str::<Lit>(token).unwrap();
+ match lit {
+ Lit::Str(lit) => lit.suffix().to_owned(),
+ Lit::ByteStr(lit) => lit.suffix().to_owned(),
+ Lit::CStr(lit) => lit.suffix().to_owned(),
+ Lit::Byte(lit) => lit.suffix().to_owned(),
+ Lit::Char(lit) => lit.suffix().to_owned(),
+ Lit::Int(lit) => lit.suffix().to_owned(),
+ Lit::Float(lit) => lit.suffix().to_owned(),
+ _ => unimplemented!(),
+ }
+ }
+
+ assert_eq!(get_suffix("\"\"s"), "s");
+ assert_eq!(get_suffix("r\"\"r"), "r");
+ assert_eq!(get_suffix("r#\"\"#r"), "r");
+ assert_eq!(get_suffix("b\"\"b"), "b");
+ assert_eq!(get_suffix("br\"\"br"), "br");
+ assert_eq!(get_suffix("br#\"\"#br"), "br");
+ assert_eq!(get_suffix("c\"\"c"), "c");
+ assert_eq!(get_suffix("cr\"\"cr"), "cr");
+ assert_eq!(get_suffix("cr#\"\"#cr"), "cr");
+ assert_eq!(get_suffix("'c'c"), "c");
+ assert_eq!(get_suffix("b'b'b"), "b");
+ assert_eq!(get_suffix("1i32"), "i32");
+ assert_eq!(get_suffix("1_i32"), "i32");
+ assert_eq!(get_suffix("1.0f32"), "f32");
+ assert_eq!(get_suffix("1.0_f32"), "f32");
+}
+
+#[test]
+fn test_deep_group_empty() {
+ let tokens = TokenStream::from_iter([TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::from_iter([TokenTree::Group(Group::new(
+ Delimiter::None,
+
TokenStream::from_iter([TokenTree::Literal(Literal::string("hi"))]),
+ ))]),
+ ))]);
+
+ snapshot!(tokens as Lit, @r#""hi""# );
+}
+
+#[test]
+fn test_error() {
+ let err = syn::parse_str::<LitStr>("...").unwrap_err();
+ assert_eq!("expected string literal", err.to_string());
+
+ let err = syn::parse_str::<LitStr>("5").unwrap_err();
+ assert_eq!("expected string literal", err.to_string());
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_meta.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_meta.rs
new file mode 100644
index 0000000000..ea6093115a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_meta.rs
@@ -0,0 +1,154 @@
+#![allow(
+ clippy::shadow_unrelated,
+ clippy::too_many_lines,
+ clippy::uninlined_format_args
+)]
+
+#[macro_use]
+mod macros;
+
+use syn::{Meta, MetaList, MetaNameValue};
+
+#[test]
+fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+ Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "hello",
+ },
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_parse_meta_name_value() {
+ let input = "foo = 5";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+ MetaNameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: 5,
+ },
+ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+ Meta::NameValue {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ value: Expr::Lit {
+ lit: 5,
+ },
+ }
+ "###);
+
+ assert_eq!(meta, Meta::NameValue(inner));
+}
+
+#[test]
+fn test_parse_meta_item_list_lit() {
+ let input = "foo(5)";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+ MetaList {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`5`),
+ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`5`),
+ }
+ "###);
+
+ assert_eq!(meta, Meta::List(inner));
+}
+
+#[test]
+fn test_parse_meta_item_multiple() {
+ let input = "foo(word, name = 5, list(name2 = 6), word2)";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+ MetaList {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
+ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+ Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "foo",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
+ }
+ "###);
+
+ assert_eq!(meta, Meta::List(inner));
+}
+
+#[test]
+fn test_parse_path() {
+ let input = "::serde::Serialize";
+ snapshot!(input as Meta, @r###"
+ Meta::Path {
+ leading_colon: Some,
+ segments: [
+ PathSegment {
+ ident: "serde",
+ },
+ Token![::],
+ PathSegment {
+ ident: "Serialize",
+ },
+ ],
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_parse_buffer.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_parse_buffer.rs
new file mode 100644
index 0000000000..62abc6d282
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_parse_buffer.rs
@@ -0,0 +1,103 @@
+#![allow(clippy::non_ascii_literal)]
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, TokenStream,
TokenTree};
+use std::panic;
+use syn::parse::discouraged::Speculative as _;
+use syn::parse::{Parse, ParseStream, Parser, Result};
+use syn::{parenthesized, Token};
+
+#[test]
+#[should_panic(expected = "fork was not derived from the advancing parse
stream")]
+fn smuggled_speculative_cursor_between_sources() {
+ struct BreakRules;
+ impl Parse for BreakRules {
+ fn parse(input1: ParseStream) -> Result<Self> {
+ let nested = |input2: ParseStream| {
+ input1.advance_to(input2);
+ Ok(Self)
+ };
+ nested.parse_str("")
+ }
+ }
+
+ syn::parse_str::<BreakRules>("").unwrap();
+}
+
+#[test]
+#[should_panic(expected = "fork was not derived from the advancing parse
stream")]
+fn smuggled_speculative_cursor_between_brackets() {
+ struct BreakRules;
+ impl Parse for BreakRules {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let a;
+ let b;
+ parenthesized!(a in input);
+ parenthesized!(b in input);
+ a.advance_to(&b);
+ Ok(Self)
+ }
+ }
+
+ syn::parse_str::<BreakRules>("()()").unwrap();
+}
+
+#[test]
+#[should_panic(expected = "fork was not derived from the advancing parse
stream")]
+fn smuggled_speculative_cursor_into_brackets() {
+ struct BreakRules;
+ impl Parse for BreakRules {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let a;
+ parenthesized!(a in input);
+ input.advance_to(&a);
+ Ok(Self)
+ }
+ }
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+}
+
+#[test]
+fn trailing_empty_none_group() {
+ fn parse(input: ParseStream) -> Result<()> {
+ input.parse::<Token![+]>()?;
+
+ let content;
+ parenthesized!(content in input);
+ content.parse::<Token![+]>()?;
+
+ Ok(())
+ }
+
+ // `+ ( + «∅ ∅» ) «∅ «∅ ∅» ∅»`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
+ TokenTree::Group(Group::new(
+ Delimiter::Parenthesis,
+ TokenStream::from_iter([
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
+ TokenTree::Group(Group::new(Delimiter::None,
TokenStream::new())),
+ ]),
+ )),
+ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
+ TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::from_iter([TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::new(),
+ ))]),
+ )),
+ ]);
+
+ parse.parse2(tokens).unwrap();
+}
+
+#[test]
+fn test_unwind_safe() {
+ fn parse(input: ParseStream) -> Result<Ident> {
+ let thread_result = panic::catch_unwind(|| input.parse());
+ thread_result.unwrap()
+ }
+
+ parse.parse_str("throw").unwrap();
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_parse_quote.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_parse_quote.rs
new file mode 100644
index 0000000000..c0e753260a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_parse_quote.rs
@@ -0,0 +1,166 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use syn::punctuated::Punctuated;
+use syn::{parse_quote, Attribute, Field, Lit, Pat, Stmt, Token};
+
+#[test]
+fn test_attribute() {
+ let attr: Attribute = parse_quote!(#[test]);
+ snapshot!(attr, @r###"
+ Attribute {
+ style: AttrStyle::Outer,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "test",
+ },
+ ],
+ },
+ }
+ "###);
+
+ let attr: Attribute = parse_quote!(#![no_std]);
+ snapshot!(attr, @r###"
+ Attribute {
+ style: AttrStyle::Inner,
+ meta: Meta::Path {
+ segments: [
+ PathSegment {
+ ident: "no_std",
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_field() {
+ let field: Field = parse_quote!(pub enabled: bool);
+ snapshot!(field, @r###"
+ Field {
+ vis: Visibility::Public,
+ ident: Some("enabled"),
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "bool",
+ },
+ ],
+ },
+ },
+ }
+ "###);
+
+ let field: Field = parse_quote!(primitive::bool);
+ snapshot!(field, @r###"
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "primitive",
+ },
+ Token![::],
+ PathSegment {
+ ident: "bool",
+ },
+ ],
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_pat() {
+ let pat: Pat = parse_quote!(Some(false) | None);
+ snapshot!(&pat, @r###"
+ Pat::Or {
+ cases: [
+ Pat::TupleStruct {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Some",
+ },
+ ],
+ },
+ elems: [
+ Pat::Lit(ExprLit {
+ lit: Lit::Bool {
+ value: false,
+ },
+ }),
+ ],
+ },
+ Token![|],
+ Pat::Ident {
+ ident: "None",
+ },
+ ],
+ }
+ "###);
+
+ let boxed_pat: Box<Pat> = parse_quote!(Some(false) | None);
+ assert_eq!(*boxed_pat, pat);
+}
+
+#[test]
+fn test_punctuated() {
+ let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true);
+ snapshot!(punctuated, @r###"
+ [
+ Lit::Bool {
+ value: true,
+ },
+ Token![|],
+ Lit::Bool {
+ value: true,
+ },
+ ]
+ "###);
+
+ let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true |);
+ snapshot!(punctuated, @r###"
+ [
+ Lit::Bool {
+ value: true,
+ },
+ Token![|],
+ Lit::Bool {
+ value: true,
+ },
+ Token![|],
+ ]
+ "###);
+}
+
+#[test]
+fn test_vec_stmt() {
+ let stmts: Vec<Stmt> = parse_quote! {
+ let _;
+ true
+ };
+ snapshot!(stmts, @r###"
+ [
+ Stmt::Local {
+ pat: Pat::Wild,
+ },
+ Stmt::Expr(
+ Expr::Lit {
+ lit: Lit::Bool {
+ value: true,
+ },
+ },
+ None,
+ ),
+ ]
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_parse_stream.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_parse_stream.rs
new file mode 100644
index 0000000000..a650fc8534
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_parse_stream.rs
@@ -0,0 +1,187 @@
+#![allow(clippy::items_after_statements, clippy::let_underscore_untyped)]
+
+use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, TokenStream,
TokenTree};
+use quote::quote;
+use syn::ext::IdentExt as _;
+use syn::parse::discouraged::AnyDelimiter;
+use syn::parse::{ParseStream, Parser as _, Result};
+use syn::{parenthesized, token, Ident, Lifetime, Token};
+
+#[test]
+fn test_peek_punct() {
+ let tokens = quote!(+= + =);
+
+ fn assert(input: ParseStream) -> Result<()> {
+ assert!(input.peek(Token![+]));
+ assert!(input.peek(Token![+=]));
+
+ let _: Token![+] = input.parse()?;
+
+ assert!(input.peek(Token![=]));
+ assert!(!input.peek(Token![==]));
+ assert!(!input.peek(Token![+]));
+
+ let _: Token![=] = input.parse()?;
+
+ assert!(input.peek(Token![+]));
+ assert!(!input.peek(Token![+=]));
+
+ let _: Token![+] = input.parse()?;
+ let _: Token![=] = input.parse()?;
+ Ok(())
+ }
+
+ assert.parse2(tokens).unwrap();
+}
+
+#[test]
+fn test_peek_lifetime() {
+ // 'static ;
+ let tokens = TokenStream::from_iter([
+ TokenTree::Punct(Punct::new('\'', Spacing::Joint)),
+ TokenTree::Ident(Ident::new("static", Span::call_site())),
+ TokenTree::Punct(Punct::new(';', Spacing::Alone)),
+ ]);
+
+ fn assert(input: ParseStream) -> Result<()> {
+ assert!(input.peek(Lifetime));
+ assert!(input.peek2(Token![;]));
+ assert!(!input.peek2(Token![static]));
+
+ let _: Lifetime = input.parse()?;
+
+ assert!(input.peek(Token![;]));
+
+ let _: Token![;] = input.parse()?;
+ Ok(())
+ }
+
+ assert.parse2(tokens).unwrap();
+}
+
+#[test]
+fn test_peek_not_lifetime() {
+ // ' static
+ let tokens = TokenStream::from_iter([
+ TokenTree::Punct(Punct::new('\'', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("static", Span::call_site())),
+ ]);
+
+ fn assert(input: ParseStream) -> Result<()> {
+ assert!(!input.peek(Lifetime));
+ assert!(input.parse::<Option<Punct>>()?.is_none());
+
+ let _: TokenTree = input.parse()?;
+
+ assert!(input.peek(Token![static]));
+
+ let _: Token![static] = input.parse()?;
+ Ok(())
+ }
+
+ assert.parse2(tokens).unwrap();
+}
+
+#[test]
+fn test_peek_ident() {
+ let tokens = quote!(static var);
+
+ fn assert(input: ParseStream) -> Result<()> {
+ assert!(!input.peek(Ident));
+ assert!(input.peek(Ident::peek_any));
+ assert!(input.peek(Token![static]));
+
+ let _: Token![static] = input.parse()?;
+
+ assert!(input.peek(Ident));
+ assert!(input.peek(Ident::peek_any));
+
+ let _: Ident = input.parse()?;
+ Ok(())
+ }
+
+ assert.parse2(tokens).unwrap();
+}
+
+#[test]
+fn test_peek_groups() {
+ // pub ( :: ) «∅ ! = ∅» static
+ let tokens = TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("pub", Span::call_site())),
+ TokenTree::Group(Group::new(
+ Delimiter::Parenthesis,
+ TokenStream::from_iter([
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ ]),
+ )),
+ TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::from_iter([
+ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
+ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+ ]),
+ )),
+ TokenTree::Ident(Ident::new("static", Span::call_site())),
+ ]);
+
+ fn assert(input: ParseStream) -> Result<()> {
+ assert!(input.peek2(token::Paren));
+ assert!(input.peek3(token::Group));
+ assert!(input.peek3(Token![!]));
+
+ let _: Token![pub] = input.parse()?;
+
+ assert!(input.peek(token::Paren));
+ assert!(!input.peek(Token![::]));
+ assert!(!input.peek2(Token![::]));
+ assert!(input.peek2(Token![!]));
+ assert!(input.peek2(token::Group));
+ assert!(input.peek3(Token![=]));
+ assert!(!input.peek3(Token![static]));
+
+ let content;
+ parenthesized!(content in input);
+
+ assert!(content.peek(Token![::]));
+ assert!(content.peek2(Token![:]));
+ assert!(!content.peek3(token::Group));
+ assert!(!content.peek3(Token![!]));
+
+ assert!(input.peek(token::Group));
+ assert!(input.peek(Token![!]));
+
+ let _: Token![::] = content.parse()?;
+
+ assert!(input.peek(token::Group));
+ assert!(input.peek(Token![!]));
+ assert!(input.peek2(Token![=]));
+ assert!(input.peek3(Token![static]));
+ assert!(!input.peek2(Token![static]));
+
+ let implicit = input.fork();
+ let explicit = input.fork();
+
+ let _: Token![!] = implicit.parse()?;
+ assert!(implicit.peek(Token![=]));
+ assert!(implicit.peek2(Token![static]));
+ let _: Token![=] = implicit.parse()?;
+ assert!(implicit.peek(Token![static]));
+
+ let (delimiter, _span, grouped) = explicit.parse_any_delimiter()?;
+ assert_eq!(delimiter, Delimiter::None);
+ assert!(grouped.peek(Token![!]));
+ assert!(grouped.peek2(Token![=]));
+ assert!(!grouped.peek3(Token![static]));
+ let _: Token![!] = grouped.parse()?;
+ assert!(grouped.peek(Token![=]));
+ assert!(!grouped.peek2(Token![static]));
+ let _: Token![=] = grouped.parse()?;
+ assert!(!grouped.peek(Token![static]));
+
+ let _: TokenStream = input.parse()?;
+ Ok(())
+ }
+
+ assert.parse2(tokens).unwrap();
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_pat.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_pat.rs
new file mode 100644
index 0000000000..3d13385fd9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_pat.rs
@@ -0,0 +1,152 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
+use quote::{quote, ToTokens as _};
+use syn::parse::Parser;
+use syn::punctuated::Punctuated;
+use syn::{parse_quote, token, Item, Pat, PatTuple, Stmt, Token};
+
+#[test]
+fn test_pat_ident() {
+ match Pat::parse_single.parse2(quote!(self)).unwrap() {
+ Pat::Ident(_) => (),
+ value => panic!("expected PatIdent, got {:?}", value),
+ }
+}
+
+#[test]
+fn test_pat_path() {
+ match Pat::parse_single.parse2(quote!(self::CONST)).unwrap() {
+ Pat::Path(_) => (),
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+}
+
+#[test]
+fn test_leading_vert() {
+ //
https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
+
+ syn::parse_str::<Item>("fn f() {}").unwrap();
+ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
+ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
+
+ syn::parse_str::<Stmt>("let | () = ();").unwrap_err();
+ syn::parse_str::<Stmt>("let (| A): E;").unwrap();
+ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
+ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
+ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
+ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
+ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
+ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
+ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
+ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
+}
+
+#[test]
+fn test_group() {
+ let group = Group::new(Delimiter::None, quote!(Some(_)));
+ let tokens = TokenStream::from_iter([TokenTree::Group(group)]);
+ let pat = Pat::parse_single.parse2(tokens).unwrap();
+
+ snapshot!(pat, @r###"
+ Pat::TupleStruct {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Some",
+ },
+ ],
+ },
+ elems: [
+ Pat::Wild,
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_ranges() {
+ Pat::parse_single.parse_str("..").unwrap();
+ Pat::parse_single.parse_str("..hi").unwrap();
+ Pat::parse_single.parse_str("lo..").unwrap();
+ Pat::parse_single.parse_str("lo..hi").unwrap();
+
+ Pat::parse_single.parse_str("..=").unwrap_err();
+ Pat::parse_single.parse_str("..=hi").unwrap();
+ Pat::parse_single.parse_str("lo..=").unwrap_err();
+ Pat::parse_single.parse_str("lo..=hi").unwrap();
+
+ Pat::parse_single.parse_str("...").unwrap_err();
+ Pat::parse_single.parse_str("...hi").unwrap_err();
+ Pat::parse_single.parse_str("lo...").unwrap_err();
+ Pat::parse_single.parse_str("lo...hi").unwrap();
+
+ Pat::parse_single.parse_str("[lo..]").unwrap_err();
+ Pat::parse_single.parse_str("[..=hi]").unwrap_err();
+ Pat::parse_single.parse_str("[(lo..)]").unwrap();
+ Pat::parse_single.parse_str("[(..=hi)]").unwrap();
+ Pat::parse_single.parse_str("[lo..=hi]").unwrap();
+
+ Pat::parse_single.parse_str("[_, lo.., _]").unwrap_err();
+ Pat::parse_single.parse_str("[_, ..=hi, _]").unwrap_err();
+ Pat::parse_single.parse_str("[_, (lo..), _]").unwrap();
+ Pat::parse_single.parse_str("[_, (..=hi), _]").unwrap();
+ Pat::parse_single.parse_str("[_, lo..=hi, _]").unwrap();
+}
+
+#[test]
+fn test_tuple_comma() {
+ let mut expr = PatTuple {
+ attrs: Vec::new(),
+ paren_token: token::Paren::default(),
+ elems: Punctuated::new(),
+ };
+ snapshot!(expr.to_token_stream() as Pat, @"Pat::Tuple");
+
+ expr.elems.push_value(parse_quote!(_));
+ // Must not parse to Pat::Paren
+ snapshot!(expr.to_token_stream() as Pat, @r###"
+ Pat::Tuple {
+ elems: [
+ Pat::Wild,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Pat, @r###"
+ Pat::Tuple {
+ elems: [
+ Pat::Wild,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_value(parse_quote!(_));
+ snapshot!(expr.to_token_stream() as Pat, @r###"
+ Pat::Tuple {
+ elems: [
+ Pat::Wild,
+ Token![,],
+ Pat::Wild,
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Pat, @r###"
+ Pat::Tuple {
+ elems: [
+ Pat::Wild,
+ Token![,],
+ Pat::Wild,
+ Token![,],
+ ],
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_path.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_path.rs
new file mode 100644
index 0000000000..628b9f853a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_path.rs
@@ -0,0 +1,130 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream,
TokenTree};
+use quote::{quote, ToTokens};
+use syn::{parse_quote, Expr, Type, TypePath};
+
+#[test]
+fn parse_interpolated_leading_component() {
+ // mimics the token stream corresponding to `$mod::rest`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("rest", Span::call_site())),
+ ]);
+
+ snapshot!(tokens.clone() as Expr, @r###"
+ Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "first",
+ },
+ Token![::],
+ PathSegment {
+ ident: "rest",
+ },
+ ],
+ },
+ }
+ "###);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "first",
+ },
+ Token![::],
+ PathSegment {
+ ident: "rest",
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn print_incomplete_qpath() {
+ // qpath with `as` token
+ let mut ty: TypePath = parse_quote!(<Self as A>::Q);
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self as A > :: Q`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self as A > ::`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self >`)
+ "###);
+ assert!(ty.path.segments.pop().is_none());
+
+ // qpath without `as` token
+ let mut ty: TypePath = parse_quote!(<Self>::A::B);
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self > :: A :: B`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self > :: A ::`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`< Self > ::`)
+ "###);
+ assert!(ty.path.segments.pop().is_none());
+
+ // normal path
+ let mut ty: TypePath = parse_quote!(Self::A::B);
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`Self :: A :: B`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`Self :: A ::`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(`Self ::`)
+ "###);
+ assert!(ty.path.segments.pop().is_some());
+ snapshot!(ty.to_token_stream(), @r###"
+ TokenStream(``)
+ "###);
+ assert!(ty.path.segments.pop().is_none());
+}
+
+#[test]
+fn parse_parenthesized_path_arguments_with_disambiguator() {
+ #[rustfmt::skip]
+ let tokens = quote!(dyn FnOnce::() -> !);
+ snapshot!(tokens as Type, @r###"
+ Type::TraitObject {
+ dyn_token: Some,
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "FnOnce",
+ arguments: PathArguments::Parenthesized {
+ output: ReturnType::Type(
+ Type::Never,
+ ),
+ },
+ },
+ ],
+ },
+ }),
+ ],
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_precedence.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_precedence.rs
new file mode 100644
index 0000000000..02b87f37d8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_precedence.rs
@@ -0,0 +1,537 @@
+// This test does the following for every file in the rust-lang/rust repo:
+//
+// 1. Parse the file using syn into a syn::File.
+// 2. Extract every syn::Expr from the file.
+// 3. Print each expr to a string of source code.
+// 4. Parse the source code using librustc_parse into a rustc_ast::Expr.
+// 5. For both the syn::Expr and rustc_ast::Expr, crawl the syntax tree to
+// insert parentheses surrounding every subexpression.
+// 6. Serialize the fully parenthesized syn::Expr to a string of source code.
+// 7. Parse the fully parenthesized source code using librustc_parse.
+// 8. Compare the rustc_ast::Expr resulting from parenthesizing using rustc
data
+// structures vs syn data structures, ignoring spans. If they agree, rustc's
+// parser and syn's parser have identical handling of expression precedence.
+
+#![cfg(not(syn_disable_nightly_tests))]
+#![cfg(not(miri))]
+#![recursion_limit = "1024"]
+#![feature(rustc_private)]
+#![allow(
+ clippy::blocks_in_conditions,
+ clippy::doc_markdown,
+ clippy::explicit_deref_methods,
+ clippy::let_underscore_untyped,
+ clippy::manual_assert,
+ clippy::manual_let_else,
+ clippy::match_like_matches_macro,
+ clippy::match_wildcard_for_single_variants,
+ clippy::too_many_lines,
+ clippy::uninlined_format_args
+)]
+
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_data_structures;
+extern crate rustc_driver;
+extern crate rustc_span;
+extern crate smallvec;
+extern crate thin_vec;
+
+use crate::common::eq::SpanlessEq;
+use crate::common::parse;
+use quote::ToTokens;
+use rustc_ast::ast;
+use rustc_ast::ptr::P;
+use rustc_ast_pretty::pprust;
+use rustc_span::edition::Edition;
+use std::fs;
+use std::path::Path;
+use std::process;
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+#[macro_use]
+mod macros;
+
+mod common;
+mod repo;
+
+#[test]
+fn test_rustc_precedence() {
+ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+ panic!("skipping all precedence tests");
+ }
+
+ let passed = AtomicUsize::new(0);
+ let failed = AtomicUsize::new(0);
+
+ repo::for_each_rust_file(|path| {
+ let content = fs::read_to_string(path).unwrap();
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
+ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+ let (l_passed, l_failed) = test_expressions(path, edition,
exprs);
+ errorf!(
+ "=== {}: {} passed | {} failed\n",
+ path.display(),
+ l_passed,
+ l_failed,
+ );
+ (l_passed, l_failed)
+ }
+ Err(msg) => {
+ errorf!("\nFAIL {} - syn failed to parse: {}\n",
path.display(), msg);
+ (0, 1)
+ }
+ };
+
+ passed.fetch_add(l_passed, Ordering::Relaxed);
+ let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed);
+
+ if prev_failed + l_failed >= abort_after {
+ process::exit(1);
+ }
+ });
+
+ let passed = passed.load(Ordering::Relaxed);
+ let failed = failed.load(Ordering::Relaxed);
+
+ errorf!("\n===== Precedence Test Results =====\n");
+ errorf!("{} passed | {} failed\n", passed, failed);
+
+ if failed > 0 {
+ panic!("{} failures", failed);
+ }
+}
+
+fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) ->
(usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+ rustc_span::create_session_if_not_set_then(edition, |_| {
+ for expr in exprs {
+ let source_code = expr.to_token_stream().to_string();
+ let librustc_ast = if let Some(e) =
librustc_parse_and_rewrite(&source_code) {
+ e
+ } else {
+ failed += 1;
+ errorf!(
+ "\nFAIL {} - librustc failed to parse original\n",
+ path.display(),
+ );
+ continue;
+ };
+
+ let syn_parenthesized_code =
+ syn_parenthesize(expr.clone()).to_token_stream().to_string();
+ let syn_ast = if let Some(e) =
parse::librustc_expr(&syn_parenthesized_code) {
+ e
+ } else {
+ failed += 1;
+ errorf!(
+ "\nFAIL {} - librustc failed to parse parenthesized\n",
+ path.display(),
+ );
+ continue;
+ };
+
+ if !SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ failed += 1;
+ let syn_pretty = pprust::expr_to_string(&syn_ast);
+ let librustc_pretty = pprust::expr_to_string(&librustc_ast);
+ errorf!(
+ "\nFAIL {}\n{}\nsyn != rustc\n{}\n",
+ path.display(),
+ syn_pretty,
+ librustc_pretty,
+ );
+ continue;
+ }
+
+ let expr_invisible = make_parens_invisible(expr);
+ let Ok(reparsed_expr_invisible) =
syn::parse2(expr_invisible.to_token_stream()) else {
+ failed += 1;
+ errorf!(
+ "\nFAIL {} - syn failed to parse invisible
delimiters\n{}\n",
+ path.display(),
+ source_code,
+ );
+ continue;
+ };
+ if expr_invisible != reparsed_expr_invisible {
+ failed += 1;
+ errorf!(
+ "\nFAIL {} - mismatch after parsing invisible
delimiters\n{}\n",
+ path.display(),
+ source_code,
+ );
+ continue;
+ }
+
+ passed += 1;
+ }
+ });
+
+ (passed, failed)
+}
+
+fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+ parse::librustc_expr(input).map(librustc_parenthesize)
+}
+
+fn librustc_parenthesize(mut librustc_expr: P<ast::Expr>) -> P<ast::Expr> {
+ use rustc_ast::ast::{
+ AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BorrowKind,
BoundConstness, Expr,
+ ExprField, ExprKind, GenericArg, GenericBound, Local, LocalKind, Pat,
Stmt, StmtKind,
+ StructExpr, StructRest, TraitBoundModifiers, Ty,
+ };
+ use rustc_ast::mut_visit::{noop_flat_map_item, MutVisitor};
+ use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
+ use rustc_span::DUMMY_SP;
+ use smallvec::SmallVec;
+ use std::mem;
+ use std::ops::DerefMut;
+ use thin_vec::ThinVec;
+
+ struct FullyParenthesize;
+
+ fn contains_let_chain(expr: &Expr) -> bool {
+ match &expr.kind {
+ ExprKind::Let(..) => true,
+ ExprKind::Binary(binop, left, right) => {
+ binop.node == BinOpKind::And
+ && (contains_let_chain(left) || contains_let_chain(right))
+ }
+ _ => false,
+ }
+ }
+
+ fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) ->
Vec<ExprField> {
+ if f.is_shorthand {
+ noop_visit_expr(&mut f.expr, vis);
+ } else {
+ vis.visit_expr(&mut f.expr);
+ }
+ vec![f]
+ }
+
+ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
+ let kind = match stmt.kind {
+ // Don't wrap toplevel expressions in statements.
+ StmtKind::Expr(mut e) => {
+ noop_visit_expr(&mut e, vis);
+ StmtKind::Expr(e)
+ }
+ StmtKind::Semi(mut e) => {
+ noop_visit_expr(&mut e, vis);
+ StmtKind::Semi(e)
+ }
+ s => s,
+ };
+
+ vec![Stmt { kind, ..stmt }]
+ }
+
+ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
+ match &mut e.kind {
+ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
+ ExprKind::Struct(expr) => {
+ let StructExpr {
+ qself,
+ path,
+ fields,
+ rest,
+ } = expr.deref_mut();
+ vis.visit_qself(qself);
+ vis.visit_path(path);
+ fields.flat_map_in_place(|field| flat_map_field(field, vis));
+ if let StructRest::Base(rest) = rest {
+ vis.visit_expr(rest);
+ }
+ }
+ _ => rustc_ast::mut_visit::noop_visit_expr(e, vis),
+ }
+ }
+
+ impl MutVisitor for FullyParenthesize {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+ match e.kind {
+ ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) =>
{}
+ ExprKind::Binary(..) if contains_let_chain(e) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+ kind: ExprKind::Dummy,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
+ tokens: None,
+ }),
+ );
+ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
+ match arg {
+ GenericArg::Lifetime(_lifetime) => {}
+ GenericArg::Type(arg) => self.visit_ty(arg),
+ // Don't wrap unbraced const generic arg as that's invalid
syntax.
+ GenericArg::Const(anon_const) => {
+ if let ExprKind::Block(..) = &mut anon_const.value.kind {
+ noop_visit_expr(&mut anon_const.value, self);
+ }
+ }
+ }
+ }
+
+ fn visit_param_bound(&mut self, bound: &mut GenericBound) {
+ match bound {
+ GenericBound::Trait(
+ _,
+ TraitBoundModifiers {
+ constness: BoundConstness::Maybe(_),
+ ..
+ },
+ ) => {}
+ GenericBound::Trait(ty, _modifier) =>
self.visit_poly_trait_ref(ty),
+ GenericBound::Outlives(_lifetime) => {}
+ }
+ }
+
+ fn visit_block(&mut self, block: &mut P<Block>) {
+ self.visit_id(&mut block.id);
+ block
+ .stmts
+ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
+ self.visit_span(&mut block.span);
+ }
+
+ fn visit_local(&mut self, local: &mut P<Local>) {
+ match &mut local.kind {
+ LocalKind::Decl => {}
+ LocalKind::Init(init) => {
+ self.visit_expr(init);
+ }
+ LocalKind::InitElse(init, els) => {
+ self.visit_expr(init);
+ self.visit_block(els);
+ }
+ }
+ }
+
+ fn flat_map_trait_item(&mut self, item: P<AssocItem>) ->
SmallVec<[P<AssocItem>; 1]> {
+ match &item.kind {
+ AssocItemKind::Const(const_item)
+ if !const_item.generics.params.is_empty()
+ ||
!const_item.generics.where_clause.predicates.is_empty() =>
+ {
+ SmallVec::from([item])
+ }
+ _ => noop_flat_map_item(item, self),
+ }
+ }
+
+ fn flat_map_impl_item(&mut self, item: P<AssocItem>) ->
SmallVec<[P<AssocItem>; 1]> {
+ match &item.kind {
+ AssocItemKind::Const(const_item)
+ if !const_item.generics.params.is_empty()
+ ||
!const_item.generics.where_clause.predicates.is_empty() =>
+ {
+ SmallVec::from([item])
+ }
+ _ => noop_flat_map_item(item, self),
+ }
+ }
+
+ // We don't want to look at expressions that might appear in patterns
or
+ // types yet. We'll look into comparing those in the future. For now
+ // focus on expressions appearing in other places.
+ fn visit_pat(&mut self, pat: &mut P<Pat>) {
+ let _ = pat;
+ }
+
+ fn visit_ty(&mut self, ty: &mut P<Ty>) {
+ let _ = ty;
+ }
+
+ fn visit_attribute(&mut self, attr: &mut Attribute) {
+ let _ = attr;
+ }
+ }
+
+ let mut folder = FullyParenthesize;
+ folder.visit_expr(&mut librustc_expr);
+ librustc_expr
+}
+
+fn syn_parenthesize(syn_expr: syn::Expr) -> syn::Expr {
+ use syn::fold::{fold_expr, fold_generic_argument, Fold};
+ use syn::{token, BinOp, Expr, ExprParen, GenericArgument, MetaNameValue,
Pat, Stmt, Type};
+
+ struct FullyParenthesize;
+
+ fn parenthesize(expr: Expr) -> Expr {
+ Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+ expr: Box::new(expr),
+ paren_token: token::Paren::default(),
+ })
+ }
+
+ fn needs_paren(expr: &Expr) -> bool {
+ match expr {
+ Expr::Group(_) => unreachable!(),
+ Expr::If(_) | Expr::Unsafe(_) | Expr::Block(_) | Expr::Let(_) =>
false,
+ Expr::Binary(_) => !contains_let_chain(expr),
+ _ => true,
+ }
+ }
+
+ fn contains_let_chain(expr: &Expr) -> bool {
+ match expr {
+ Expr::Let(_) => true,
+ Expr::Binary(expr) => {
+ matches!(expr.op, BinOp::And(_))
+ && (contains_let_chain(&expr.left) ||
contains_let_chain(&expr.right))
+ }
+ _ => false,
+ }
+ }
+
+ impl Fold for FullyParenthesize {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+ let needs_paren = needs_paren(&expr);
+ let folded = fold_expr(self, expr);
+ if needs_paren {
+ parenthesize(folded)
+ } else {
+ folded
+ }
+ }
+
+ fn fold_generic_argument(&mut self, arg: GenericArgument) ->
GenericArgument {
+ match arg {
+ GenericArgument::Const(arg) => GenericArgument::Const(match
arg {
+ Expr::Block(_) => fold_expr(self, arg),
+ // Don't wrap unbraced const generic arg as that's invalid
syntax.
+ _ => arg,
+ }),
+ _ => fold_generic_argument(self, arg),
+ }
+ }
+
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+ Stmt::Expr(Expr::Verbatim(_), Some(_)) => stmt,
+ Stmt::Expr(e, semi) => Stmt::Expr(fold_expr(self, e), semi),
+ s => s,
+ }
+ }
+
+ fn fold_meta_name_value(&mut self, meta: MetaNameValue) ->
MetaNameValue {
+ // Don't turn #[p = "..."] into #[p = ("...")].
+ meta
+ }
+
+ // We don't want to look at expressions that might appear in patterns
or
+ // types yet. We'll look into comparing those in the future. For now
+ // focus on expressions appearing in other places.
+ fn fold_pat(&mut self, pat: Pat) -> Pat {
+ pat
+ }
+
+ fn fold_type(&mut self, ty: Type) -> Type {
+ ty
+ }
+ }
+
+ let mut folder = FullyParenthesize;
+ folder.fold_expr(syn_expr)
+}
+
+fn make_parens_invisible(expr: syn::Expr) -> syn::Expr {
+ use syn::fold::{fold_expr, fold_stmt, Fold};
+ use syn::{token, Expr, ExprGroup, ExprParen, Stmt};
+
+ struct MakeParensInvisible;
+
+ impl Fold for MakeParensInvisible {
+ fn fold_expr(&mut self, mut expr: Expr) -> Expr {
+ if let Expr::Paren(paren) = expr {
+ expr = Expr::Group(ExprGroup {
+ attrs: paren.attrs,
+ group_token: token::Group(paren.paren_token.span.join()),
+ expr: paren.expr,
+ });
+ }
+ fold_expr(self, expr)
+ }
+
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ if let Stmt::Expr(expr @ (Expr::Binary(_) | Expr::Cast(_)), None)
= stmt {
+ Stmt::Expr(
+ Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+ paren_token: token::Paren::default(),
+ expr: Box::new(fold_expr(self, expr)),
+ }),
+ None,
+ )
+ } else {
+ fold_stmt(self, stmt)
+ }
+ }
+ }
+
+ let mut folder = MakeParensInvisible;
+ folder.fold_expr(expr)
+}
+
+/// Walk through a crate collecting all expressions we can find in it.
+fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ use syn::fold::Fold;
+ use syn::punctuated::Punctuated;
+ use syn::{token, ConstParam, Expr, ExprTuple, Pat, Path};
+
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+ match expr {
+ Expr::Verbatim(_) => {}
+ _ => self.0.push(expr),
+ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+ elems: Punctuated::new(),
+ paren_token: token::Paren::default(),
+ })
+ }
+
+ fn fold_pat(&mut self, pat: Pat) -> Pat {
+ pat
+ }
+
+ fn fold_path(&mut self, path: Path) -> Path {
+ // Skip traversing into const generic path arguments
+ path
+ }
+
+ fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
+ const_param
+ }
+ }
+
+ let mut folder = CollectExprs(vec![]);
+ folder.fold_file(file);
+ folder.0
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_receiver.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_receiver.rs
new file mode 100644
index 0000000000..8decb555c5
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_receiver.rs
@@ -0,0 +1,321 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use syn::{parse_quote, TraitItemFn};
+
+#[test]
+fn test_by_value() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn by_value(self: Self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_by_mut_value() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn by_mut(mut self: Self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ mutability: Some,
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_by_ref() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn by_ref(self: &Self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ colon_token: Some,
+ ty: Type::Reference {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_by_box() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn by_box(self: Box<Self>);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Box",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_by_pin() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn by_pin(self: Pin<Self>);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Pin",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_explicit_type() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn explicit_type(self: Pin<MyType>);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ colon_token: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Pin",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "MyType",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_value_shorthand() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn value_shorthand(self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_mut_value_shorthand() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn mut_value_shorthand(mut self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ mutability: Some,
+ ty: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_ref_shorthand() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn ref_shorthand(&self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ reference: Some(None),
+ ty: Type::Reference {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_ref_shorthand_with_lifetime() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn ref_shorthand(&'a self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ reference: Some(Some(Lifetime {
+ ident: "a",
+ })),
+ ty: Type::Reference {
+ lifetime: Some(Lifetime {
+ ident: "a",
+ }),
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_ref_mut_shorthand() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn ref_mut_shorthand(&mut self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ reference: Some(None),
+ mutability: Some,
+ ty: Type::Reference {
+ mutability: Some,
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_ref_mut_shorthand_with_lifetime() {
+ let TraitItemFn { sig, .. } = parse_quote! {
+ fn ref_mut_shorthand(&'a mut self);
+ };
+ snapshot!(&sig.inputs[0], @r###"
+ FnArg::Receiver(Receiver {
+ reference: Some(Some(Lifetime {
+ ident: "a",
+ })),
+ mutability: Some,
+ ty: Type::Reference {
+ lifetime: Some(Lifetime {
+ ident: "a",
+ }),
+ mutability: Some,
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Self",
+ },
+ ],
+ },
+ },
+ },
+ })
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_round_trip.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_round_trip.rs
new file mode 100644
index 0000000000..9b089036d0
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_round_trip.rs
@@ -0,0 +1,253 @@
+#![cfg(not(syn_disable_nightly_tests))]
+#![cfg(not(miri))]
+#![recursion_limit = "1024"]
+#![feature(rustc_private)]
+#![allow(
+ clippy::blocks_in_conditions,
+ clippy::manual_assert,
+ clippy::manual_let_else,
+ clippy::match_like_matches_macro,
+ clippy::uninlined_format_args
+)]
+
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_data_structures;
+extern crate rustc_driver;
+extern crate rustc_error_messages;
+extern crate rustc_errors;
+extern crate rustc_expand;
+extern crate rustc_parse as parse;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use crate::common::eq::SpanlessEq;
+use quote::quote;
+use rustc_ast::ast::{
+ AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg,
GenericParamKind, Generics,
+ WhereClause,
+};
+use rustc_ast::mut_visit::MutVisitor;
+use rustc_ast_pretty::pprust;
+use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
+use rustc_error_messages::{DiagMessage, LazyFallbackBundle};
+use rustc_errors::{translation, Diag, PResult};
+use rustc_session::parse::ParseSess;
+use rustc_span::FileName;
+use std::borrow::Cow;
+use std::fs;
+use std::panic;
+use std::path::Path;
+use std::process;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::time::Instant;
+
+#[macro_use]
+mod macros;
+
+mod common;
+mod repo;
+
+#[test]
+fn test_round_trip() {
+ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+ panic!("skipping all round_trip tests");
+ }
+
+ let failed = AtomicUsize::new(0);
+
+ repo::for_each_rust_file(|path| test(path, &failed, abort_after));
+
+ let failed = failed.load(Ordering::Relaxed);
+ if failed > 0 {
+ panic!("{} failures", failed);
+ }
+}
+
+fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
+ let failed = || {
+ let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
+ if prev_failed + 1 >= abort_after {
+ process::exit(1);
+ }
+ };
+
+ let content = fs::read_to_string(path).unwrap();
+
+ let (back, elapsed) = match panic::catch_unwind(|| {
+ let start = Instant::now();
+ let result = syn::parse_file(&content);
+ let elapsed = start.elapsed();
+ result.map(|krate| (quote!(#krate).to_string(), elapsed))
+ }) {
+ Err(_) => {
+ errorf!("=== {}: syn panic\n", path.display());
+ failed();
+ return;
+ }
+ Ok(Err(msg)) => {
+ errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(),
msg);
+ failed();
+ return;
+ }
+ Ok(Ok(result)) => result,
+ };
+
+ let edition = repo::edition(path).parse().unwrap();
+
+ rustc_span::create_session_if_not_set_then(edition, |_| {
+ let equal = match panic::catch_unwind(|| {
+ let locale_resources =
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
+ let sess = ParseSess::new(locale_resources);
+ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(diagnostic) => {
+ errorf!(
+ "=== {}: ignore - librustc failed to parse original
content: {}\n",
+ path.display(),
+ translate_message(&diagnostic),
+ );
+ diagnostic.cancel();
+ return Err(true);
+ }
+ };
+ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(diagnostic) => {
+ errorf!("=== {}: librustc failed to parse",
path.display());
+ diagnostic.emit();
+ return Err(false);
+ }
+ };
+ Ok((before, after))
+ }) {
+ Err(_) => {
+ errorf!("=== {}: ignoring librustc panic\n", path.display());
+ true
+ }
+ Ok(Err(equal)) => equal,
+ Ok(Ok((mut before, mut after))) => {
+ normalize(&mut before);
+ normalize(&mut after);
+ if SpanlessEq::eq(&before, &after) {
+ errorf!(
+ "=== {}: pass in {}ms\n",
+ path.display(),
+ elapsed.as_secs() * 1000 +
u64::from(elapsed.subsec_nanos()) / 1_000_000
+ );
+ true
+ } else {
+ errorf!(
+ "=== {}: FAIL\n{}\n!=\n{}\n",
+ path.display(),
+ pprust::crate_to_string_for_macros(&before),
+ pprust::crate_to_string_for_macros(&after),
+ );
+ false
+ }
+ }
+ };
+ if !equal {
+ failed();
+ }
+ });
+}
+
+fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
+ static COUNTER: AtomicUsize = AtomicUsize::new(0);
+ let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
+ let name = FileName::Custom(format!("test_round_trip{}", counter));
+ parse::parse_crate_from_source_str(name, content, sess)
+}
+
+fn translate_message(diagnostic: &Diag) -> Cow<'static, str> {
+ thread_local! {
+ static FLUENT_BUNDLE: LazyFallbackBundle = {
+ let locale_resources =
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
+ let with_directionality_markers = false;
+ rustc_error_messages::fallback_fluent_bundle(locale_resources,
with_directionality_markers)
+ };
+ }
+
+ let message = &diagnostic.messages[0].0;
+ let args = translation::to_fluent_args(diagnostic.args.iter());
+
+ let (identifier, attr) = match message {
+ DiagMessage::Str(msg) | DiagMessage::Translated(msg) => return
msg.clone(),
+ DiagMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
+ };
+
+ FLUENT_BUNDLE.with(|fluent_bundle| {
+ let message = fluent_bundle
+ .get_message(identifier)
+ .expect("missing diagnostic in fluent bundle");
+ let value = match attr {
+ Some(attr) => message
+ .get_attribute(attr)
+ .expect("missing attribute in fluent message")
+ .value(),
+ None => message.value().expect("missing value in fluent message"),
+ };
+
+ let mut err = Vec::new();
+ let translated = fluent_bundle.format_pattern(value, Some(&args), &mut
err);
+ assert!(err.is_empty());
+ Cow::Owned(translated.into_owned())
+ })
+}
+
+fn normalize(krate: &mut Crate) {
+ struct NormalizeVisitor;
+
+ impl MutVisitor for NormalizeVisitor {
+ fn visit_angle_bracketed_parameter_data(&mut self, e: &mut
AngleBracketedArgs) {
+ #[derive(Ord, PartialOrd, Eq, PartialEq)]
+ enum Group {
+ Lifetimes,
+ TypesAndConsts,
+ Constraints,
+ }
+ e.args.sort_by_key(|arg| match arg {
+ AngleBracketedArg::Arg(arg) => match arg {
+ GenericArg::Lifetime(_) => Group::Lifetimes,
+ GenericArg::Type(_) | GenericArg::Const(_) =>
Group::TypesAndConsts,
+ },
+ AngleBracketedArg::Constraint(_) => Group::Constraints,
+ });
+ for arg in &mut e.args {
+ match arg {
+ AngleBracketedArg::Arg(arg) => self.visit_generic_arg(arg),
+ AngleBracketedArg::Constraint(constraint) =>
self.visit_constraint(constraint),
+ }
+ }
+ }
+
+ fn visit_generics(&mut self, e: &mut Generics) {
+ #[derive(Ord, PartialOrd, Eq, PartialEq)]
+ enum Group {
+ Lifetimes,
+ TypesAndConsts,
+ }
+ e.params.sort_by_key(|param| match param.kind {
+ GenericParamKind::Lifetime => Group::Lifetimes,
+ GenericParamKind::Type { .. } | GenericParamKind::Const { .. }
=> {
+ Group::TypesAndConsts
+ }
+ });
+ e.params
+ .flat_map_in_place(|param| self.flat_map_generic_param(param));
+ self.visit_where_clause(&mut e.where_clause);
+ }
+
+ fn visit_where_clause(&mut self, e: &mut WhereClause) {
+ if e.predicates.is_empty() {
+ e.has_where_token = false;
+ }
+ }
+ }
+
+ NormalizeVisitor.visit_crate(krate);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_shebang.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_shebang.rs
new file mode 100644
index 0000000000..843916180f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_shebang.rs
@@ -0,0 +1,67 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+#[test]
+fn test_basic() {
+ let content = "#!/usr/bin/env rustx\nfn main() {}";
+ let file = syn::parse_file(content).unwrap();
+ snapshot!(file, @r###"
+ File {
+ shebang: Some("#!/usr/bin/env rustx"),
+ items: [
+ Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "main",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ },
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_comment() {
+ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
+ let file = syn::parse_file(content).unwrap();
+ snapshot!(file, @r###"
+ File {
+ attrs: [
+ Attribute {
+ style: AttrStyle::Inner,
+ meta: Meta::List {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "allow",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`dead_code`),
+ },
+ },
+ ],
+ items: [
+ Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "main",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ },
+ ],
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_size.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_size.rs
new file mode 100644
index 0000000000..32e6119c98
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_size.rs
@@ -0,0 +1,36 @@
+// Assumes proc-macro2's "span-locations" feature is off.
+
+#![cfg(target_pointer_width = "64")]
+
+use std::mem;
+use syn::{Expr, Item, Lit, Pat, Type};
+
+#[rustversion::attr(before(2022-11-24), ignore)]
+#[test]
+fn test_expr_size() {
+ assert_eq!(mem::size_of::<Expr>(), 176);
+}
+
+#[rustversion::attr(before(2022-09-09), ignore)]
+#[test]
+fn test_item_size() {
+ assert_eq!(mem::size_of::<Item>(), 352);
+}
+
+#[rustversion::attr(before(2023-04-29), ignore)]
+#[test]
+fn test_type_size() {
+ assert_eq!(mem::size_of::<Type>(), 224);
+}
+
+#[rustversion::attr(before(2023-04-29), ignore)]
+#[test]
+fn test_pat_size() {
+ assert_eq!(mem::size_of::<Pat>(), 184);
+}
+
+#[rustversion::attr(before(2023-12-20), ignore)]
+#[test]
+fn test_lit_size() {
+ assert_eq!(mem::size_of::<Lit>(), 24);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_stmt.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_stmt.rs
new file mode 100644
index 0000000000..318e18b069
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_stmt.rs
@@ -0,0 +1,322 @@
+#![allow(
+ clippy::assertions_on_result_states,
+ clippy::non_ascii_literal,
+ clippy::uninlined_format_args
+)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+use quote::{quote, ToTokens as _};
+use syn::parse::Parser as _;
+use syn::{Block, Stmt};
+
+#[test]
+fn test_raw_operator() {
+ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
+
+ snapshot!(stmt, @r###"
+ Stmt::Local {
+ pat: Pat::Wild,
+ init: Some(LocalInit {
+ expr: Expr::Verbatim(`& raw const x`),
+ }),
+ }
+ "###);
+}
+
+#[test]
+fn test_raw_variable() {
+ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
+
+ snapshot!(stmt, @r###"
+ Stmt::Local {
+ pat: Pat::Wild,
+ init: Some(LocalInit {
+ expr: Expr::Reference {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "raw",
+ },
+ ],
+ },
+ },
+ },
+ }),
+ }
+ "###);
+}
+
+#[test]
+fn test_raw_invalid() {
+ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
+}
+
+#[test]
+fn test_none_group() {
+ // «∅ async fn f() {} ∅»
+ let tokens = TokenStream::from_iter([TokenTree::Group(Group::new(
+ Delimiter::None,
+ TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("async", Span::call_site())),
+ TokenTree::Ident(Ident::new("fn", Span::call_site())),
+ TokenTree::Ident(Ident::new("f", Span::call_site())),
+ TokenTree::Group(Group::new(Delimiter::Parenthesis,
TokenStream::new())),
+ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
+ ]),
+ ))]);
+ snapshot!(tokens as Stmt, @r###"
+ Stmt::Item(Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ asyncness: Some,
+ ident: "f",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [],
+ },
+ })
+ "###);
+
+ let tokens = Group::new(Delimiter::None, quote!(let None =
None)).to_token_stream();
+ let stmts = Block::parse_within.parse2(tokens).unwrap();
+ snapshot!(stmts, @r###"
+ [
+ Stmt::Expr(
+ Expr::Group {
+ expr: Expr::Let {
+ pat: Pat::Ident {
+ ident: "None",
+ },
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "None",
+ },
+ ],
+ },
+ },
+ },
+ },
+ None,
+ ),
+ ]
+ "###);
+}
+
+#[test]
+fn test_let_dot_dot() {
+ let tokens = quote! {
+ let .. = 10;
+ };
+
+ snapshot!(tokens as Stmt, @r###"
+ Stmt::Local {
+ pat: Pat::Rest,
+ init: Some(LocalInit {
+ expr: Expr::Lit {
+ lit: 10,
+ },
+ }),
+ }
+ "###);
+}
+
+#[test]
+fn test_let_else() {
+ let tokens = quote! {
+ let Some(x) = None else { return 0; };
+ };
+
+ snapshot!(tokens as Stmt, @r###"
+ Stmt::Local {
+ pat: Pat::TupleStruct {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Some",
+ },
+ ],
+ },
+ elems: [
+ Pat::Ident {
+ ident: "x",
+ },
+ ],
+ },
+ init: Some(LocalInit {
+ expr: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "None",
+ },
+ ],
+ },
+ },
+ diverge: Some(Expr::Block {
+ block: Block {
+ stmts: [
+ Stmt::Expr(
+ Expr::Return {
+ expr: Some(Expr::Lit {
+ lit: 0,
+ }),
+ },
+ Some,
+ ),
+ ],
+ },
+ }),
+ }),
+ }
+ "###);
+}
+
+#[test]
+fn test_macros() {
+ let tokens = quote! {
+ fn main() {
+ macro_rules! mac {}
+ thread_local! { static FOO }
+ println!("");
+ vec![]
+ }
+ };
+
+ snapshot!(tokens as Stmt, @r###"
+ Stmt::Item(Item::Fn {
+ vis: Visibility::Inherited,
+ sig: Signature {
+ ident: "main",
+ generics: Generics,
+ output: ReturnType::Default,
+ },
+ block: Block {
+ stmts: [
+ Stmt::Item(Item::Macro {
+ ident: Some("mac"),
+ mac: Macro {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "macro_rules",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Brace,
+ tokens: TokenStream(``),
+ },
+ }),
+ Stmt::Macro {
+ mac: Macro {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "thread_local",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Brace,
+ tokens: TokenStream(`static FOO`),
+ },
+ },
+ Stmt::Macro {
+ mac: Macro {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "println",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Paren,
+ tokens: TokenStream(`""`),
+ },
+ semi_token: Some,
+ },
+ Stmt::Expr(
+ Expr::Macro {
+ mac: Macro {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "vec",
+ },
+ ],
+ },
+ delimiter: MacroDelimiter::Bracket,
+ tokens: TokenStream(``),
+ },
+ },
+ None,
+ ),
+ ],
+ },
+ })
+ "###);
+}
+
+#[test]
+fn test_early_parse_loop() {
+ // The following is an Expr::Loop followed by Expr::Tuple. It is not an
+ // Expr::Call.
+ let tokens = quote! {
+ loop {}
+ ()
+ };
+
+ let stmts = Block::parse_within.parse2(tokens).unwrap();
+
+ snapshot!(stmts, @r###"
+ [
+ Stmt::Expr(
+ Expr::Loop {
+ body: Block {
+ stmts: [],
+ },
+ },
+ None,
+ ),
+ Stmt::Expr(
+ Expr::Tuple,
+ None,
+ ),
+ ]
+ "###);
+
+ let tokens = quote! {
+ 'a: loop {}
+ ()
+ };
+
+ let stmts = Block::parse_within.parse2(tokens).unwrap();
+
+ snapshot!(stmts, @r###"
+ [
+ Stmt::Expr(
+ Expr::Loop {
+ label: Some(Label {
+ name: Lifetime {
+ ident: "a",
+ },
+ }),
+ body: Block {
+ stmts: [],
+ },
+ },
+ None,
+ ),
+ Stmt::Expr(
+ Expr::Tuple,
+ None,
+ ),
+ ]
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_token_trees.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_token_trees.rs
new file mode 100644
index 0000000000..f5a067fd23
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_token_trees.rs
@@ -0,0 +1,32 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::TokenStream;
+use quote::quote;
+use syn::Lit;
+
+#[test]
+fn test_struct() {
+ let input = "
+ #[derive(Debug, Clone)]
+ pub struct Item {
+ pub ident: Ident,
+ pub attrs: Vec<Attribute>,
+ }
+ ";
+
+ snapshot!(input as TokenStream, @r###"
+ TokenStream(
+ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub
attrs : Vec < Attribute >, }`,
+ )
+ "###);
+}
+
+#[test]
+fn test_literal_mangling() {
+ let code = "0_4";
+ let parsed: Lit = syn::parse_str(code).unwrap();
+ assert_eq!(code, quote!(#parsed).to_string());
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_ty.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_ty.rs
new file mode 100644
index 0000000000..f1ced9ea37
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_ty.rs
@@ -0,0 +1,397 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream,
TokenTree};
+use quote::{quote, ToTokens as _};
+use syn::punctuated::Punctuated;
+use syn::{parse_quote, token, Token, Type, TypeTuple};
+
+#[test]
+fn test_mut_self() {
+ syn::parse_str::<Type>("fn(mut self)").unwrap();
+ syn::parse_str::<Type>("fn(mut self,)").unwrap();
+ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
+ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
+ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
+ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
+}
+
+#[test]
+fn test_macro_variable_type() {
+ // mimics the token stream corresponding to `$ty<T>`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("T", Span::call_site())),
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "ty",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }
+ "###);
+
+ // mimics the token stream corresponding to `$ty::<T>`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("T", Span::call_site())),
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "ty",
+ arguments: PathArguments::AngleBracketed {
+ colon2_token: Some,
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_group_angle_brackets() {
+ // mimics the token stream corresponding to `Option<$ty>`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("Option", Span::call_site())),
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
+ TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Option",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Group {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Vec",
+ arguments:
PathArguments::AngleBracketed {
+ args: [
+
GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+
PathSegment {
+ ident:
"u8",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_group_colons() {
+ // mimics the token stream corresponding to `$ty::Item`
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("Item", Span::call_site())),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Vec",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "u8",
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ Token![::],
+ PathSegment {
+ ident: "Item",
+ },
+ ],
+ },
+ }
+ "###);
+
+ let tokens = TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
+ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("Element", Span::call_site())),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ qself: Some(QSelf {
+ ty: Type::Slice {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ },
+ ],
+ },
+ },
+ },
+ position: 0,
+ }),
+ path: Path {
+ leading_colon: Some,
+ segments: [
+ PathSegment {
+ ident: "Element",
+ },
+ ],
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_trait_object() {
+ let tokens = quote!(dyn for<'a> Trait<'a> + 'static);
+ snapshot!(tokens as Type, @r###"
+ Type::TraitObject {
+ dyn_token: Some,
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ lifetimes: Some(BoundLifetimes {
+ lifetimes: [
+ GenericParam::Lifetime(LifetimeParam {
+ lifetime: Lifetime {
+ ident: "a",
+ },
+ }),
+ ],
+ }),
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ GenericArgument::Lifetime(Lifetime {
+ ident: "a",
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }),
+ Token![+],
+ TypeParamBound::Lifetime {
+ ident: "static",
+ },
+ ],
+ }
+ "###);
+
+ let tokens = quote!(dyn 'a + Trait);
+ snapshot!(tokens as Type, @r###"
+ Type::TraitObject {
+ dyn_token: Some,
+ bounds: [
+ TypeParamBound::Lifetime {
+ ident: "a",
+ },
+ Token![+],
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ }),
+ ],
+ }
+ "###);
+
+ // None of the following are valid Rust types.
+ syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err();
+ syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err();
+}
+
+#[test]
+fn test_trailing_plus() {
+ #[rustfmt::skip]
+ let tokens = quote!(impl Trait +);
+ snapshot!(tokens as Type, @r###"
+ Type::ImplTrait {
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ }),
+ Token![+],
+ ],
+ }
+ "###);
+
+ #[rustfmt::skip]
+ let tokens = quote!(dyn Trait +);
+ snapshot!(tokens as Type, @r###"
+ Type::TraitObject {
+ dyn_token: Some,
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ }),
+ Token![+],
+ ],
+ }
+ "###);
+
+ #[rustfmt::skip]
+ let tokens = quote!(Trait +);
+ snapshot!(tokens as Type, @r###"
+ Type::TraitObject {
+ bounds: [
+ TypeParamBound::Trait(TraitBound {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "Trait",
+ },
+ ],
+ },
+ }),
+ Token![+],
+ ],
+ }
+ "###);
+}
+
+#[test]
+fn test_tuple_comma() {
+ let mut expr = TypeTuple {
+ paren_token: token::Paren::default(),
+ elems: Punctuated::new(),
+ };
+ snapshot!(expr.to_token_stream() as Type, @"Type::Tuple");
+
+ expr.elems.push_value(parse_quote!(_));
+ // Must not parse to Type::Paren
+ snapshot!(expr.to_token_stream() as Type, @r###"
+ Type::Tuple {
+ elems: [
+ Type::Infer,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Type, @r###"
+ Type::Tuple {
+ elems: [
+ Type::Infer,
+ Token![,],
+ ],
+ }
+ "###);
+
+ expr.elems.push_value(parse_quote!(_));
+ snapshot!(expr.to_token_stream() as Type, @r###"
+ Type::Tuple {
+ elems: [
+ Type::Infer,
+ Token![,],
+ Type::Infer,
+ ],
+ }
+ "###);
+
+ expr.elems.push_punct(<Token![,]>::default());
+ snapshot!(expr.to_token_stream() as Type, @r###"
+ Type::Tuple {
+ elems: [
+ Type::Infer,
+ Token![,],
+ Type::Infer,
+ Token![,],
+ ],
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/test_visibility.rs
b/rust/hw/char/pl011/vendor/syn/tests/test_visibility.rs
new file mode 100644
index 0000000000..e6e8bd7291
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/test_visibility.rs
@@ -0,0 +1,185 @@
+#![allow(clippy::uninlined_format_args)]
+
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream,
TokenTree};
+use quote::quote;
+use syn::parse::{Parse, ParseStream};
+use syn::{DeriveInput, Result, Visibility};
+
+#[derive(Debug)]
+struct VisRest {
+ vis: Visibility,
+ rest: TokenStream,
+}
+
+impl Parse for VisRest {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(VisRest {
+ vis: input.parse()?,
+ rest: input.parse()?,
+ })
+ }
+}
+
+macro_rules! assert_vis_parse {
+ ($input:expr, Ok($p:pat)) => {
+ assert_vis_parse!($input, Ok($p) + "");
+ };
+
+ ($input:expr, Ok($p:pat) + $rest:expr) => {
+ let expected = $rest.parse::<TokenStream>().unwrap();
+ let parse: VisRest = syn::parse_str($input).unwrap();
+
+ match parse.vis {
+ $p => {}
+ _ => panic!("expected {}, got {:?}", stringify!($p), parse.vis),
+ }
+
+ // NOTE: Round-trips through `to_string` to avoid potential whitespace
+ // diffs.
+ assert_eq!(parse.rest.to_string(), expected.to_string());
+ };
+
+ ($input:expr, Err) => {
+ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
+ };
+}
+
+#[test]
+fn test_pub() {
+ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
+}
+
+#[test]
+fn test_inherited() {
+ assert_vis_parse!("", Ok(Visibility::Inherited));
+}
+
+#[test]
+fn test_in() {
+ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
+}
+
+#[test]
+fn test_pub_crate() {
+ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
+}
+
+#[test]
+fn test_pub_self() {
+ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
+}
+
+#[test]
+fn test_pub_super() {
+ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
+}
+
+#[test]
+fn test_missing_in() {
+ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) +
"(foo::bar)");
+}
+
+#[test]
+fn test_missing_in_path() {
+ assert_vis_parse!("pub(in)", Err);
+}
+
+#[test]
+fn test_crate_path() {
+ assert_vis_parse!(
+ "pub(crate::A, crate::B)",
+ Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
+ );
+}
+
+#[test]
+fn test_junk_after_in() {
+ assert_vis_parse!("pub(in some::path @@garbage)", Err);
+}
+
+#[test]
+fn test_inherited_vis_named_field() {
+ // mimics `struct S { $vis $field: () }` where $vis is empty
+ let tokens = TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("struct", Span::call_site())),
+ TokenTree::Ident(Ident::new("S", Span::call_site())),
+ TokenTree::Group(Group::new(
+ Delimiter::Brace,
+ TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None,
TokenStream::new())),
+ TokenTree::Group(Group::new(Delimiter::None, quote!(f))),
+ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
+ TokenTree::Group(Group::new(Delimiter::Parenthesis,
TokenStream::new())),
+ ]),
+ )),
+ ]);
+
+ snapshot!(tokens as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Named {
+ named: [
+ Field {
+ vis: Visibility::Inherited,
+ ident: Some("f"),
+ colon_token: Some,
+ ty: Type::Tuple,
+ },
+ ],
+ },
+ },
+ }
+ "###);
+}
+
+#[test]
+fn test_inherited_vis_unnamed_field() {
+ // mimics `struct S($vis $ty);` where $vis is empty
+ let tokens = TokenStream::from_iter([
+ TokenTree::Ident(Ident::new("struct", Span::call_site())),
+ TokenTree::Ident(Ident::new("S", Span::call_site())),
+ TokenTree::Group(Group::new(
+ Delimiter::Parenthesis,
+ TokenStream::from_iter([
+ TokenTree::Group(Group::new(Delimiter::None,
TokenStream::new())),
+ TokenTree::Group(Group::new(Delimiter::None, quote!(str))),
+ ]),
+ )),
+ TokenTree::Punct(Punct::new(';', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as DeriveInput, @r###"
+ DeriveInput {
+ vis: Visibility::Inherited,
+ ident: "S",
+ generics: Generics,
+ data: Data::Struct {
+ fields: Fields::Unnamed {
+ unnamed: [
+ Field {
+ vis: Visibility::Inherited,
+ ty: Type::Group {
+ elem: Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "str",
+ },
+ ],
+ },
+ },
+ },
+ },
+ ],
+ },
+ semi_token: Some,
+ },
+ }
+ "###);
+}
diff --git a/rust/hw/char/pl011/vendor/syn/tests/zzz_stable.rs
b/rust/hw/char/pl011/vendor/syn/tests/zzz_stable.rs
new file mode 100644
index 0000000000..a1a670d9ed
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/syn/tests/zzz_stable.rs
@@ -0,0 +1,33 @@
+#![cfg(syn_disable_nightly_tests)]
+
+use std::io::{self, Write};
+use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+const MSG: &str = "\
+‖
+‖ WARNING:
+‖ This is not a nightly compiler so not all tests were able to
+‖ run. Syn includes tests that compare Syn's parser against the
+‖ compiler's parser, which requires access to unstable librustc
+‖ data structures and a nightly compiler.
+‖
+";
+
+#[test]
+fn notice() -> io::Result<()> {
+ let header = "WARNING";
+ let index_of_header = MSG.find(header).unwrap();
+ let before = &MSG[..index_of_header];
+ let after = &MSG[index_of_header + header.len()..];
+
+ let mut stderr = StandardStream::stderr(ColorChoice::Auto);
+ stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
+ write!(&mut stderr, "{}", before)?;
+
stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?;
+ write!(&mut stderr, "{}", header)?;
+ stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
+ write!(&mut stderr, "{}", after)?;
+ stderr.reset()?;
+
+ Ok(())
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/unicode-ident/.cargo-checksum.json
new file mode 100644
index 0000000000..9a708d8fed
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"7b10355305359d5feefb120329396a8823ce903cd66612d7d27612d51e6ceced","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","LICENSE-UNICODE":"68f5b9f5ea36881a0942ba02f558e9e1faf76cc09cb165ad801744c61b738844","README.md":"eff1f30712e93cc160101c25bf31738448c284b90636deb3e3a651cb9ad20dd1","benches/xid.rs":"a61f61ecc7d5124c759cdeb55ab74470ab69f2f3ca37613da65f16e0e5e33487","src/lib.rs":"2673969775cff349816e3fb30f62476a802523fe4940482288b75bd747cbe748","src/tables.rs":"ffe8e252eabccf261385865cb781b3d76c9f32f6f9503d00196a30fb92d80b29","tests/compare.rs":"62471ffb157744cac6faae1adafdbdf785349d7eb6dc2ff4b4941c9d618397f9","tests/fst/mod.rs":"69a3aaf59acd8bca962ecc6234be56be8c0934ab79b253162f10eb881523901f","tests/fst/xid_continue.fst":"41fc751514b8bde658544d5fe7e100115d299d41897af855934b9f4ebda9d3a2","tests/fst/xid_start.fst":"ffa5e2bfe7dd5f6738fbe4b7a3e6e2083c9777191c54f8291a80d558ec4e2dd2","tests/roaring/mod.rs":"784f65a48477fab7549620c7843c7ad6da533f69a18abca1172f6acb95045e53","tests/static_size.rs":"4524332c1e424cb987d7cee1f47a98aea9ed7b256303a3828eda5aa1d06da240","tests/tables/mod.rs":"e6949172d10fc4b2431ce7546269bfd4f9146454c8c3e31faf5e5d80c16a8ab6","tests/tables/tables.rs":"011404dab8a3958da6e18a1fe9406c191675e6f49bf30ce813e3d05f582e750b","tests/trie/mod.rs":"d4acbb716bcbaf80660039797f45e138ed8bbd66749fa3b19b1a971574679cc9","tests/trie/trie.rs":"3c1ca56062f1b3ffdf2ae2063d3fee8d362b90082778056181b5c95e2e242ad8"},"package":"3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/Cargo.toml
b/rust/hw/char/pl011/vendor/unicode-ident/Cargo.toml
new file mode 100644
index 0000000000..1c40dde818
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/Cargo.toml
@@ -0,0 +1,63 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.31"
+name = "unicode-ident"
+version = "1.0.12"
+authors = ["David Tolnay <dtolnay@gmail.com>"]
+description = "Determine whether characters have the XID_Start or XID_Continue
properties according to Unicode Standard Annex #31"
+documentation = "https://docs.rs/unicode-ident"
+readme = "README.md"
+keywords = [
+ "unicode",
+ "xid",
+]
+categories = [
+ "development-tools::procedural-macro-helpers",
+ "no-std",
+ "no-std::no-alloc",
+]
+license = "(MIT OR Apache-2.0) AND Unicode-DFS-2016"
+repository = "https://github.com/dtolnay/unicode-ident"
+
+[package.metadata.docs.rs]
+rustdoc-args = ["--generate-link-to-definition"]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+doc-scrape-examples = false
+
+[[bench]]
+name = "xid"
+harness = false
+
+[dev-dependencies.criterion]
+version = "0.5"
+default-features = false
+
+[dev-dependencies.fst]
+version = "0.4"
+
+[dev-dependencies.rand]
+version = "0.8"
+features = ["small_rng"]
+
+[dev-dependencies.roaring]
+version = "0.10"
+
+[dev-dependencies.ucd-trie]
+version = "0.1"
+default-features = false
+
+[dev-dependencies.unicode-xid]
+version = "0.2.4"
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-APACHE
new file mode 100644
index 0000000000..1b5ec8b78e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-MIT
b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-MIT
new file mode 100644
index 0000000000..31aa79387f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-UNICODE
b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-UNICODE
new file mode 100644
index 0000000000..85d0d580d2
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/LICENSE-UNICODE
@@ -0,0 +1,46 @@
+UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE
+
+See Terms of Use <https://www.unicode.org/copyright.html>
+for definitions of Unicode Inc.’s Data Files and Software.
+
+NOTICE TO USER: Carefully read the following legal agreement.
+BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
+DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
+YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
+TERMS AND CONDITIONS OF THIS AGREEMENT.
+IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
+THE DATA FILES OR SOFTWARE.
+
+COPYRIGHT AND PERMISSION NOTICE
+
+Copyright © 1991-2022 Unicode, Inc. All rights reserved.
+Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Unicode data files and any associated documentation
+(the "Data Files") or Unicode software and any associated documentation
+(the "Software") to deal in the Data Files or Software
+without restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, and/or sell copies of
+the Data Files or Software, and to permit persons to whom the Data Files
+or Software are furnished to do so, provided that either
+(a) this copyright and permission notice appear with all copies
+of the Data Files or Software, or
+(b) this copyright and permission notice appear in associated
+Documentation.
+
+THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+Except as contained in this notice, the name of a copyright holder
+shall not be used in advertising or otherwise to promote the sale,
+use or other dealings in these Data Files or Software without prior
+written authorization of the copyright holder.
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/README.md
b/rust/hw/char/pl011/vendor/unicode-ident/README.md
new file mode 100644
index 0000000000..0e9af827f6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/README.md
@@ -0,0 +1,283 @@
+Unicode ident
+=============
+
+[<img alt="github"
src="https://img.shields.io/badge/github-dtolnay/unicode--ident-8da0cb?style=for-the-badge&labelColor=555555&logo=github"
height="20">](https://github.com/dtolnay/unicode-ident)
+[<img alt="crates.io"
src="https://img.shields.io/crates/v/unicode-ident.svg?style=for-the-badge&color=fc8d62&logo=rust"
height="20">](https://crates.io/crates/unicode-ident)
+[<img alt="docs.rs"
src="https://img.shields.io/badge/docs.rs-unicode--ident-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs"
height="20">](https://docs.rs/unicode-ident)
+[<img alt="build status"
src="https://img.shields.io/github/actions/workflow/status/dtolnay/unicode-ident/ci.yml?branch=master&style=for-the-badge"
height="20">](https://github.com/dtolnay/unicode-ident/actions?query=branch%3Amaster)
+
+Implementation of [Unicode Standard Annex #31][tr31] for determining which
+`char` values are valid in programming language identifiers.
+
+[tr31]: https://www.unicode.org/reports/tr31/
+
+This crate is a better optimized implementation of the older `unicode-xid`
+crate. This crate uses less static storage, and is able to classify both ASCII
+and non-ASCII codepoints with better performance, 2–10× faster than
+`unicode-xid`.
+
+<br>
+
+## Comparison of performance
+
+The following table shows a comparison between five Unicode identifier
+implementations.
+
+- `unicode-ident` is this crate;
+- [`unicode-xid`] is a widely used crate run by the "unicode-rs" org;
+- `ucd-trie` and `fst` are two data structures supported by the
[`ucd-generate`] tool;
+- [`roaring`] is a Rust implementation of Roaring bitmap.
+
+The *static storage* column shows the total size of `static` tables that the
+crate bakes into your binary, measured in 1000s of bytes.
+
+The remaining columns show the **cost per call** to evaluate whether a single
+`char` has the XID\_Start or XID\_Continue Unicode property, comparing across
+different ratios of ASCII to non-ASCII codepoints in the input data.
+
+[`unicode-xid`]: https://github.com/unicode-rs/unicode-xid
+[`ucd-generate`]: https://github.com/BurntSushi/ucd-generate
+[`roaring`]: https://github.com/RoaringBitmap/roaring-rs
+
+| | static storage | 0% nonascii | 1% | 10% | 100% nonascii |
+|---|---|---|---|---|---|
+| **`unicode-ident`** | 10.1 K | 0.96 ns | 0.95 ns | 1.09 ns | 1.55 ns |
+| **`unicode-xid`** | 11.5 K | 1.88 ns | 2.14 ns | 3.48 ns | 15.63 ns |
+| **`ucd-trie`** | 10.2 K | 1.29 ns | 1.28 ns | 1.36 ns | 2.15 ns |
+| **`fst`** | 139 K | 55.1 ns | 54.9 ns | 53.2 ns | 28.5 ns |
+| **`roaring`** | 66.1 K | 2.78 ns | 3.09 ns | 3.37 ns | 4.70 ns |
+
+Source code for the benchmark is provided in the *bench* directory of this repo
+and may be repeated by running `cargo criterion`.
+
+<br>
+
+## Comparison of data structures
+
+#### unicode-xid
+
+They use a sorted array of character ranges, and do a binary search to look up
+whether a given character lands inside one of those ranges.
+
+```rust
+static XID_Continue_table: [(char, char); 763] = [
+ ('\u{30}', '\u{39}'), // 0-9
+ ('\u{41}', '\u{5a}'), // A-Z
+ …
+ ('\u{e0100}', '\u{e01ef}'),
+];
+```
+
+The static storage used by this data structure scales with the number of
+contiguous ranges of identifier codepoints in Unicode. Every table entry
+consumes 8 bytes, because it consists of a pair of 32-bit `char` values.
+
+In some ranges of the Unicode codepoint space, this is quite a sparse
+representation – there are some ranges where tens of thousands of
adjacent
+codepoints are all valid identifier characters. In other places, the
+representation is quite inefficient. A characater like `µ` (U+00B5) which is
+surrounded by non-identifier codepoints consumes 64 bits in the table, while it
+would be just 1 bit in a dense bitmap.
+
+On a system with 64-byte cache lines, binary searching the table touches 7
cache
+lines on average. Each cache line fits only 8 table entries. Additionally, the
+branching performed during the binary search is probably mostly unpredictable
to
+the branch predictor.
+
+Overall, the crate ends up being about 10× slower on non-ASCII input
+compared to the fastest crate.
+
+A potential improvement would be to pack the table entries more compactly.
+Rust's `char` type is a 21-bit integer padded to 32 bits, which means every
+table entry is holding 22 bits of wasted space, adding up to 3.9 K. They could
+instead fit every table entry into 6 bytes, leaving out some of the padding,
for
+a 25% improvement in space used. With some cleverness it may be possible to fit
+in 5 bytes or even 4 bytes by storing a low char and an extent, instead of low
+char and high char. I don't expect that performance would improve much but this
+could be the most efficient for space across all the libraries, needing only
+about 7 K to store.
+
+#### ucd-trie
+
+Their data structure is a compressed trie set specifically tailored for Unicode
+codepoints. The design is credited to Raph Levien in [rust-lang/rust#33098].
+
+[rust-lang/rust#33098]: https://github.com/rust-lang/rust/pull/33098
+
+```rust
+pub struct TrieSet {
+ tree1_level1: &'static [u64; 32],
+ tree2_level1: &'static [u8; 992],
+ tree2_level2: &'static [u64],
+ tree3_level1: &'static [u8; 256],
+ tree3_level2: &'static [u8],
+ tree3_level3: &'static [u64],
+}
+```
+
+It represents codepoint sets using a trie to achieve prefix compression. The
+final states of the trie are embedded in leaves or "chunks", where each chunk
is
+a 64-bit integer. Each bit position of the integer corresponds to whether a
+particular codepoint is in the set or not. These chunks are not just a compact
+representation of the final states of the trie, but are also a form of suffix
+compression. In particular, if multiple ranges of 64 contiguous codepoints have
+the same Unicode properties, then they all map to the same chunk in the final
+level of the trie.
+
+Being tailored for Unicode codepoints, this trie is partitioned into three
+disjoint sets: tree1, tree2, tree3. The first set corresponds to codepoints
\[0,
+0x800), the second \[0x800, 0x10000) and the third \[0x10000, 0x110000). These
+partitions conveniently correspond to the space of 1 or 2 byte UTF-8 encoded
+codepoints, 3 byte UTF-8 encoded codepoints and 4 byte UTF-8 encoded
codepoints,
+respectively.
+
+Lookups in this data structure are significantly more efficient than binary
+search. A lookup touches either 1, 2, or 3 cache lines based on which of the
+trie partitions is being accessed.
+
+One possible performance improvement would be for this crate to expose a way to
+query based on a UTF-8 encoded string, returning the Unicode property
+corresponding to the first character in the string. Without such an API, the
+caller is required to tokenize their UTF-8 encoded input data into `char`, hand
+the `char` into `ucd-trie`, only for `ucd-trie` to undo that work by converting
+back into the variable-length representation for trie traversal.
+
+#### fst
+
+Uses a [finite state transducer][fst]. This representation is built into
+[ucd-generate] but I am not aware of any advantage over the `ucd-trie`
+representation. In particular `ucd-trie` is optimized for storing Unicode
+properties while `fst` is not.
+
+[fst]: https://github.com/BurntSushi/fst
+[ucd-generate]: https://github.com/BurntSushi/ucd-generate
+
+As far as I can tell, the main thing that causes `fst` to have large size and
+slow lookups for this use case relative to `ucd-trie` is that it does not
+specialize for the fact that only 21 of the 32 bits in a `char` are meaningful.
+There are some dense arrays in the structure with large ranges that could never
+possibly be used.
+
+#### roaring
+
+This crate is a pure-Rust implementation of [Roaring Bitmap], a data structure
+designed for storing sets of 32-bit unsigned integers.
+
+[Roaring Bitmap]: https://roaringbitmap.org/about/
+
+Roaring bitmaps are compressed bitmaps which tend to outperform conventional
+compressed bitmaps such as WAH, EWAH or Concise. In some instances, they can be
+hundreds of times faster and they often offer significantly better compression.
+
+In this use case the performance was reasonably competitive but still
+substantially slower than the Unicode-optimized crates. Meanwhile the
+compression was significantly worse, requiring 6× as much storage for the
+data structure.
+
+I also benchmarked the [`croaring`] crate which is an FFI wrapper around the C
+reference implementation of Roaring Bitmap. This crate was consistently about
+15% slower than pure-Rust `roaring`, which could just be FFI overhead. I did
not
+investigate further.
+
+[`croaring`]: https://crates.io/crates/croaring
+
+#### unicode-ident
+
+This crate is most similar to the `ucd-trie` library, in that it's based on
+bitmaps stored in the leafs of a trie representation, achieving both prefix
+compression and suffix compression.
+
+The key differences are:
+
+- Uses a single 2-level trie, rather than 3 disjoint partitions of different
+ depth each.
+- Uses significantly larger chunks: 512 bits rather than 64 bits.
+- Compresses the XID\_Start and XID\_Continue properties together
+ simultaneously, rather than duplicating identical trie leaf chunks across the
+ two.
+
+The following diagram show the XID\_Start and XID\_Continue Unicode boolean
+properties in uncompressed form, in row-major order:
+
+<table>
+<tr><th>XID_Start</th><th>XID_Continue</th></tr>
+<tr>
+<td><img alt="XID_Start bitmap" width="256"
src="https://user-images.githubusercontent.com/1940490/168647353-c6eeb922-afec-49b2-9ef5-c03e9d1e0760.png"></td>
+<td><img alt="XID_Continue bitmap" width="256"
src="https://user-images.githubusercontent.com/1940490/168647367-f447cca7-2362-4d7d-8cd7-d21c011d329b.png"></td>
+</tr>
+</table>
+
+Uncompressed, these would take 140 K to store, which is beyond what would be
+reasonable. However, as you can see there is a large degree of similarity
+between the two bitmaps and across the rows, which lends well to compression.
+
+This crate stores one 512-bit "row" of the above bitmaps in the leaf level of a
+trie, and a single additional level to index into the leafs. It turns out there
+are 124 unique 512-bit chunks across the two bitmaps so 7 bits are sufficient
to
+index them.
+
+The chunk size of 512 bits is selected as the size that minimizes the total
size
+of the data structure. A smaller chunk, like 256 or 128 bits, would achieve
+better deduplication but require a larger index. A larger chunk would increase
+redundancy in the leaf bitmaps. 512 bit chunks are the optimum for total size
of
+the index plus leaf bitmaps.
+
+In fact since there are only 124 unique chunks, we can use an 8-bit index with
a
+spare bit to index at the half-chunk level. This achieves an additional 8.5%
+compression by eliminating redundancies between the second half of any chunk
and
+the first half of any other chunk. Note that this is not the same as using
+chunks which are half the size, because it does not necessitate raising the
size
+of the trie's first level.
+
+In contrast to binary search or the `ucd-trie` crate, performing lookups in
this
+data structure is straight-line code with no need for branching.
+
+```asm
+is_xid_start:
+ mov eax, edi
+ shr eax, 9
+ lea rcx, [rip + unicode_ident::tables::TRIE_START]
+ add rcx, rax
+ xor eax, eax
+ cmp edi, 201728
+ cmovb rax, rcx
+ test rax, rax
+ lea rcx, [rip + .L__unnamed_1]
+ cmovne rcx, rax
+ movzx eax, byte ptr [rcx]
+ shl rax, 5
+ mov ecx, edi
+ shr ecx, 3
+ and ecx, 63
+ add rcx, rax
+ lea rax, [rip + unicode_ident::tables::LEAF]
+ mov al, byte ptr [rax + rcx]
+ and dil, 7
+ mov ecx, edi
+ shr al, cl
+ and al, 1
+ ret
+```
+
+<br>
+
+## License
+
+Use of the Unicode Character Database, as this crate does, is governed by the
<a
+href="LICENSE-UNICODE">Unicode License Agreement – Data Files and
Software
+(2016)</a>.
+
+All intellectual property within this crate that is **not generated** using the
+Unicode Character Database as input is licensed under either of <a
+href="LICENSE-APACHE">Apache License, Version 2.0</a> or <a
+href="LICENSE-MIT">MIT license</a> at your option.
+
+The **generated** files incorporate tabular data derived from the Unicode
+Character Database, together with intellectual property from the original
source
+code content of the crate. One must comply with the terms of both the Unicode
+License Agreement and either of the Apache license or MIT license when those
+generated files are involved.
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be licensed as just described, without any additional terms or conditions.
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/benches/xid.rs
b/rust/hw/char/pl011/vendor/unicode-ident/benches/xid.rs
new file mode 100644
index 0000000000..040c4e1818
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/benches/xid.rs
@@ -0,0 +1,124 @@
+// To run:
+//
+// cargo criterion --features criterion/real_blackbox
+//
+// This benchmarks each of the different libraries at several ratios of ASCII
to
+// non-ASCII content. There is one additional benchmark labeled "baseline"
which
+// just iterates over characters in a string, converting UTF-8 to 32-bit chars.
+//
+// Criterion will show a time in milliseconds. The non-baseline bench functions
+// each make one million function calls (2 calls per character, 500K characters
+// in the strings created by gen_string). The "time per call" listed in our
+// readme is computed by subtracting this baseline from the other bench
+// functions' time, then dividing by one million (ms -> ns).
+
+#![allow(clippy::needless_pass_by_value)]
+
+#[path = "../tests/fst/mod.rs"]
+mod fst;
+#[path = "../tests/roaring/mod.rs"]
+mod roaring;
+#[path = "../tests/trie/mod.rs"]
+mod trie;
+
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use rand::distributions::{Bernoulli, Distribution, Uniform};
+use rand::rngs::SmallRng;
+use rand::SeedableRng;
+use std::time::Duration;
+
+fn gen_string(p_nonascii: u32) -> String {
+ let mut rng = SmallRng::from_seed([b'!'; 32]);
+ let pick_nonascii = Bernoulli::from_ratio(p_nonascii, 100).unwrap();
+ let ascii = Uniform::new_inclusive('\0', '\x7f');
+ let nonascii = Uniform::new_inclusive(0x80 as char, char::MAX);
+
+ let mut string = String::new();
+ for _ in 0..500_000 {
+ let distribution = if pick_nonascii.sample(&mut rng) {
+ nonascii
+ } else {
+ ascii
+ };
+ string.push(distribution.sample(&mut rng));
+ }
+
+ string
+}
+
+fn bench(c: &mut Criterion, group_name: &str, string: String) {
+ let mut group = c.benchmark_group(group_name);
+ group.measurement_time(Duration::from_secs(10));
+ group.bench_function("baseline", |b| {
+ b.iter(|| {
+ for ch in string.chars() {
+ black_box(ch);
+ }
+ });
+ });
+ group.bench_function("unicode-ident", |b| {
+ b.iter(|| {
+ for ch in string.chars() {
+ black_box(unicode_ident::is_xid_start(ch));
+ black_box(unicode_ident::is_xid_continue(ch));
+ }
+ });
+ });
+ group.bench_function("unicode-xid", |b| {
+ b.iter(|| {
+ for ch in string.chars() {
+ black_box(unicode_xid::UnicodeXID::is_xid_start(ch));
+ black_box(unicode_xid::UnicodeXID::is_xid_continue(ch));
+ }
+ });
+ });
+ group.bench_function("ucd-trie", |b| {
+ b.iter(|| {
+ for ch in string.chars() {
+ black_box(trie::XID_START.contains_char(ch));
+ black_box(trie::XID_CONTINUE.contains_char(ch));
+ }
+ });
+ });
+ group.bench_function("fst", |b| {
+ let xid_start_fst = fst::xid_start_fst();
+ let xid_continue_fst = fst::xid_continue_fst();
+ b.iter(|| {
+ for ch in string.chars() {
+ let ch_bytes = (ch as u32).to_be_bytes();
+ black_box(xid_start_fst.contains(ch_bytes));
+ black_box(xid_continue_fst.contains(ch_bytes));
+ }
+ });
+ });
+ group.bench_function("roaring", |b| {
+ let xid_start_bitmap = roaring::xid_start_bitmap();
+ let xid_continue_bitmap = roaring::xid_continue_bitmap();
+ b.iter(|| {
+ for ch in string.chars() {
+ black_box(xid_start_bitmap.contains(ch as u32));
+ black_box(xid_continue_bitmap.contains(ch as u32));
+ }
+ });
+ });
+ group.finish();
+}
+
+fn bench0(c: &mut Criterion) {
+ bench(c, "0%-nonascii", gen_string(0));
+}
+
+fn bench1(c: &mut Criterion) {
+ bench(c, "1%-nonascii", gen_string(1));
+}
+
+fn bench10(c: &mut Criterion) {
+ bench(c, "10%-nonascii", gen_string(10));
+}
+
+fn bench100(c: &mut Criterion) {
+ bench(c, "100%-nonascii", gen_string(100));
+}
+
+criterion_group!(benches, bench0, bench1, bench10, bench100);
+criterion_main!(benches);
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/meson.build
b/rust/hw/char/pl011/vendor/unicode-ident/meson.build
new file mode 100644
index 0000000000..86b51af097
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/meson.build
@@ -0,0 +1,14 @@
+_unicode_ident_rs = static_library(
+ 'unicode_ident',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ ],
+ dependencies: [],
+)
+
+dep_unicode_ident = declare_dependency(
+ link_with: _unicode_ident_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/src/lib.rs
b/rust/hw/char/pl011/vendor/unicode-ident/src/lib.rs
new file mode 100644
index 0000000000..f8908866e6
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/src/lib.rs
@@ -0,0 +1,269 @@
+//!
[![github]](https://github.com/dtolnay/unicode-ident) [![crates-io]](https://crates.io/crates/unicode-ident) [![docs-rs]](https://docs.rs/unicode-ident)
+//!
+//! [github]:
https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]:
https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]:
https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! Implementation of [Unicode Standard Annex #31][tr31] for determining which
+//! `char` values are valid in programming language identifiers.
+//!
+//! [tr31]: https://www.unicode.org/reports/tr31/
+//!
+//! This crate is a better optimized implementation of the older `unicode-xid`
+//! crate. This crate uses less static storage, and is able to classify both
+//! ASCII and non-ASCII codepoints with better performance, 2–10×
+//! faster than `unicode-xid`.
+//!
+//! <br>
+//!
+//! ## Comparison of performance
+//!
+//! The following table shows a comparison between five Unicode identifier
+//! implementations.
+//!
+//! - `unicode-ident` is this crate;
+//! - [`unicode-xid`] is a widely used crate run by the "unicode-rs" org;
+//! - `ucd-trie` and `fst` are two data structures supported by the
+//! [`ucd-generate`] tool;
+//! - [`roaring`] is a Rust implementation of Roaring bitmap.
+//!
+//! The *static storage* column shows the total size of `static` tables that
the
+//! crate bakes into your binary, measured in 1000s of bytes.
+//!
+//! The remaining columns show the **cost per call** to evaluate whether a
+//! single `char` has the XID\_Start or XID\_Continue Unicode property,
+//! comparing across different ratios of ASCII to non-ASCII codepoints in the
+//! input data.
+//!
+//! [`unicode-xid`]: https://github.com/unicode-rs/unicode-xid
+//! [`ucd-generate`]: https://github.com/BurntSushi/ucd-generate
+//! [`roaring`]: https://github.com/RoaringBitmap/roaring-rs
+//!
+//! | | static storage | 0% nonascii | 1% | 10% | 100% nonascii |
+//! |---|---|---|---|---|---|
+//! | **`unicode-ident`** | 10.1 K | 0.96 ns | 0.95 ns | 1.09 ns | 1.55 ns |
+//! | **`unicode-xid`** | 11.5 K | 1.88 ns | 2.14 ns | 3.48 ns | 15.63 ns |
+//! | **`ucd-trie`** | 10.2 K | 1.29 ns | 1.28 ns | 1.36 ns | 2.15 ns |
+//! | **`fst`** | 139 K | 55.1 ns | 54.9 ns | 53.2 ns | 28.5 ns |
+//! | **`roaring`** | 66.1 K | 2.78 ns | 3.09 ns | 3.37 ns | 4.70 ns |
+//!
+//! Source code for the benchmark is provided in the *bench* directory of this
+//! repo and may be repeated by running `cargo criterion`.
+//!
+//! <br>
+//!
+//! ## Comparison of data structures
+//!
+//! #### unicode-xid
+//!
+//! They use a sorted array of character ranges, and do a binary search to look
+//! up whether a given character lands inside one of those ranges.
+//!
+//! ```rust
+//! # const _: &str = stringify! {
+//! static XID_Continue_table: [(char, char); 763] = [
+//! ('\u{30}', '\u{39}'), // 0-9
+//! ('\u{41}', '\u{5a}'), // A-Z
+//! # "
+//! …
+//! # "
+//! ('\u{e0100}', '\u{e01ef}'),
+//! ];
+//! # };
+//! ```
+//!
+//! The static storage used by this data structure scales with the number of
+//! contiguous ranges of identifier codepoints in Unicode. Every table entry
+//! consumes 8 bytes, because it consists of a pair of 32-bit `char` values.
+//!
+//! In some ranges of the Unicode codepoint space, this is quite a sparse
+//! representation – there are some ranges where tens of thousands of
+//! adjacent codepoints are all valid identifier characters. In other places,
+//! the representation is quite inefficient. A characater like `µ` (U+00B5)
+//! which is surrounded by non-identifier codepoints consumes 64 bits in the
+//! table, while it would be just 1 bit in a dense bitmap.
+//!
+//! On a system with 64-byte cache lines, binary searching the table touches 7
+//! cache lines on average. Each cache line fits only 8 table entries.
+//! Additionally, the branching performed during the binary search is probably
+//! mostly unpredictable to the branch predictor.
+//!
+//! Overall, the crate ends up being about 10× slower on non-ASCII input
+//! compared to the fastest crate.
+//!
+//! A potential improvement would be to pack the table entries more compactly.
+//! Rust's `char` type is a 21-bit integer padded to 32 bits, which means every
+//! table entry is holding 22 bits of wasted space, adding up to 3.9 K. They
+//! could instead fit every table entry into 6 bytes, leaving out some of the
+//! padding, for a 25% improvement in space used. With some cleverness it may
be
+//! possible to fit in 5 bytes or even 4 bytes by storing a low char and an
+//! extent, instead of low char and high char. I don't expect that performance
+//! would improve much but this could be the most efficient for space across
all
+//! the libraries, needing only about 7 K to store.
+//!
+//! #### ucd-trie
+//!
+//! Their data structure is a compressed trie set specifically tailored for
+//! Unicode codepoints. The design is credited to Raph Levien in
+//! [rust-lang/rust#33098].
+//!
+//! [rust-lang/rust#33098]: https://github.com/rust-lang/rust/pull/33098
+//!
+//! ```rust
+//! pub struct TrieSet {
+//! tree1_level1: &'static [u64; 32],
+//! tree2_level1: &'static [u8; 992],
+//! tree2_level2: &'static [u64],
+//! tree3_level1: &'static [u8; 256],
+//! tree3_level2: &'static [u8],
+//! tree3_level3: &'static [u64],
+//! }
+//! ```
+//!
+//! It represents codepoint sets using a trie to achieve prefix compression.
The
+//! final states of the trie are embedded in leaves or "chunks", where each
+//! chunk is a 64-bit integer. Each bit position of the integer corresponds to
+//! whether a particular codepoint is in the set or not. These chunks are not
+//! just a compact representation of the final states of the trie, but are also
+//! a form of suffix compression. In particular, if multiple ranges of 64
+//! contiguous codepoints have the same Unicode properties, then they all map
to
+//! the same chunk in the final level of the trie.
+//!
+//! Being tailored for Unicode codepoints, this trie is partitioned into three
+//! disjoint sets: tree1, tree2, tree3. The first set corresponds to codepoints
+//! \[0, 0x800), the second \[0x800, 0x10000) and the third \[0x10000,
+//! 0x110000). These partitions conveniently correspond to the space of 1 or 2
+//! byte UTF-8 encoded codepoints, 3 byte UTF-8 encoded codepoints and 4 byte
+//! UTF-8 encoded codepoints, respectively.
+//!
+//! Lookups in this data structure are significantly more efficient than binary
+//! search. A lookup touches either 1, 2, or 3 cache lines based on which of
the
+//! trie partitions is being accessed.
+//!
+//! One possible performance improvement would be for this crate to expose a
way
+//! to query based on a UTF-8 encoded string, returning the Unicode property
+//! corresponding to the first character in the string. Without such an API,
the
+//! caller is required to tokenize their UTF-8 encoded input data into `char`,
+//! hand the `char` into `ucd-trie`, only for `ucd-trie` to undo that work by
+//! converting back into the variable-length representation for trie traversal.
+//!
+//! #### fst
+//!
+//! Uses a [finite state transducer][fst]. This representation is built into
+//! [ucd-generate] but I am not aware of any advantage over the `ucd-trie`
+//! representation. In particular `ucd-trie` is optimized for storing Unicode
+//! properties while `fst` is not.
+//!
+//! [fst]: https://github.com/BurntSushi/fst
+//! [ucd-generate]: https://github.com/BurntSushi/ucd-generate
+//!
+//! As far as I can tell, the main thing that causes `fst` to have large size
+//! and slow lookups for this use case relative to `ucd-trie` is that it does
+//! not specialize for the fact that only 21 of the 32 bits in a `char` are
+//! meaningful. There are some dense arrays in the structure with large ranges
+//! that could never possibly be used.
+//!
+//! #### roaring
+//!
+//! This crate is a pure-Rust implementation of [Roaring Bitmap], a data
+//! structure designed for storing sets of 32-bit unsigned integers.
+//!
+//! [Roaring Bitmap]: https://roaringbitmap.org/about/
+//!
+//! Roaring bitmaps are compressed bitmaps which tend to outperform
conventional
+//! compressed bitmaps such as WAH, EWAH or Concise. In some instances, they
can
+//! be hundreds of times faster and they often offer significantly better
+//! compression.
+//!
+//! In this use case the performance was reasonably competitive but still
+//! substantially slower than the Unicode-optimized crates. Meanwhile the
+//! compression was significantly worse, requiring 6× as much storage for
+//! the data structure.
+//!
+//! I also benchmarked the [`croaring`] crate which is an FFI wrapper around
the
+//! C reference implementation of Roaring Bitmap. This crate was consistently
+//! about 15% slower than pure-Rust `roaring`, which could just be FFI
overhead.
+//! I did not investigate further.
+//!
+//! [`croaring`]: https://crates.io/crates/croaring
+//!
+//! #### unicode-ident
+//!
+//! This crate is most similar to the `ucd-trie` library, in that it's based on
+//! bitmaps stored in the leafs of a trie representation, achieving both prefix
+//! compression and suffix compression.
+//!
+//! The key differences are:
+//!
+//! - Uses a single 2-level trie, rather than 3 disjoint partitions of
different
+//! depth each.
+//! - Uses significantly larger chunks: 512 bits rather than 64 bits.
+//! - Compresses the XID\_Start and XID\_Continue properties together
+//! simultaneously, rather than duplicating identical trie leaf chunks across
+//! the two.
+//!
+//! The following diagram show the XID\_Start and XID\_Continue Unicode boolean
+//! properties in uncompressed form, in row-major order:
+//!
+//! <table>
+//! <tr><th>XID_Start</th><th>XID_Continue</th></tr>
+//! <tr>
+//! <td><img alt="XID_Start bitmap" width="256"
src="https://user-images.githubusercontent.com/1940490/168647353-c6eeb922-afec-49b2-9ef5-c03e9d1e0760.png"></td>
+//! <td><img alt="XID_Continue bitmap" width="256"
src="https://user-images.githubusercontent.com/1940490/168647367-f447cca7-2362-4d7d-8cd7-d21c011d329b.png"></td>
+//! </tr>
+//! </table>
+//!
+//! Uncompressed, these would take 140 K to store, which is beyond what would
be
+//! reasonable. However, as you can see there is a large degree of similarity
+//! between the two bitmaps and across the rows, which lends well to
+//! compression.
+//!
+//! This crate stores one 512-bit "row" of the above bitmaps in the leaf level
+//! of a trie, and a single additional level to index into the leafs. It turns
+//! out there are 124 unique 512-bit chunks across the two bitmaps so 7 bits
are
+//! sufficient to index them.
+//!
+//! The chunk size of 512 bits is selected as the size that minimizes the total
+//! size of the data structure. A smaller chunk, like 256 or 128 bits, would
+//! achieve better deduplication but require a larger index. A larger chunk
+//! would increase redundancy in the leaf bitmaps. 512 bit chunks are the
+//! optimum for total size of the index plus leaf bitmaps.
+//!
+//! In fact since there are only 124 unique chunks, we can use an 8-bit index
+//! with a spare bit to index at the half-chunk level. This achieves an
+//! additional 8.5% compression by eliminating redundancies between the second
+//! half of any chunk and the first half of any other chunk. Note that this is
+//! not the same as using chunks which are half the size, because it does not
+//! necessitate raising the size of the trie's first level.
+//!
+//! In contrast to binary search or the `ucd-trie` crate, performing lookups in
+//! this data structure is straight-line code with no need for branching.
+
+#![no_std]
+#![doc(html_root_url = "https://docs.rs/unicode-ident/1.0.12")]
+#![allow(clippy::doc_markdown, clippy::must_use_candidate)]
+
+#[rustfmt::skip]
+mod tables;
+
+use crate::tables::{ASCII_CONTINUE, ASCII_START, CHUNK, LEAF, TRIE_CONTINUE,
TRIE_START};
+
+pub fn is_xid_start(ch: char) -> bool {
+ if ch.is_ascii() {
+ return ASCII_START.0[ch as usize];
+ }
+ let chunk = *TRIE_START.0.get(ch as usize / 8 / CHUNK).unwrap_or(&0);
+ let offset = chunk as usize * CHUNK / 2 + ch as usize / 8 % CHUNK;
+ unsafe { LEAF.0.get_unchecked(offset) }.wrapping_shr(ch as u32 % 8) & 1 != 0
+}
+
+pub fn is_xid_continue(ch: char) -> bool {
+ if ch.is_ascii() {
+ return ASCII_CONTINUE.0[ch as usize];
+ }
+ let chunk = *TRIE_CONTINUE.0.get(ch as usize / 8 / CHUNK).unwrap_or(&0);
+ let offset = chunk as usize * CHUNK / 2 + ch as usize / 8 % CHUNK;
+ unsafe { LEAF.0.get_unchecked(offset) }.wrapping_shr(ch as u32 % 8) & 1 != 0
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/src/tables.rs
b/rust/hw/char/pl011/vendor/unicode-ident/src/tables.rs
new file mode 100644
index 0000000000..bb691b5e9c
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/src/tables.rs
@@ -0,0 +1,651 @@
+// @generated by ../generate. To regenerate, run the following in the repo
root:
+//
+// $ curl -LO https://www.unicode.org/Public/zipped/15.0.0/UCD.zip
+// $ unzip UCD.zip -d UCD
+// $ cargo run --manifest-path generate/Cargo.toml
+
+const T: bool = true;
+const F: bool = false;
+
+#[repr(C, align(8))]
+pub(crate) struct Align8<T>(pub(crate) T);
+#[repr(C, align(64))]
+pub(crate) struct Align64<T>(pub(crate) T);
+
+pub(crate) static ASCII_START: Align64<[bool; 128]> = Align64([
+ F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F,
F, F, F, F, F, F, F,
+ F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F,
F, F, F, F, F, F, F,
+ F, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T,
T, T, F, F, F, F, F,
+ F, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T,
T, T, F, F, F, F, F,
+]);
+
+pub(crate) static ASCII_CONTINUE: Align64<[bool; 128]> = Align64([
+ F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F,
F, F, F, F, F, F, F,
+ F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, F, T, T, T, T, T, T, T, T, T,
T, F, F, F, F, F, F,
+ F, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T,
T, T, F, F, F, F, T,
+ F, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T,
T, T, F, F, F, F, F,
+]);
+
+pub(crate) const CHUNK: usize = 64;
+
+pub(crate) static TRIE_START: Align8<[u8; 402]> = Align8([
+ 0x04, 0x0B, 0x0F, 0x13, 0x17, 0x1B, 0x1F, 0x23, 0x27, 0x2D, 0x31, 0x34,
0x38, 0x3C, 0x40, 0x02,
+ 0x45, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49, 0x00, 0x4D, 0x00, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x06, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x51, 0x54, 0x58, 0x5C, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x09,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x60, 0x64, 0x66,
+ 0x6A, 0x6E, 0x72, 0x28, 0x76, 0x78, 0x7C, 0x80, 0x84, 0x88, 0x8C, 0x90,
0x94, 0x98, 0x9E, 0xA2,
+ 0x05, 0x2B, 0xA6, 0x00, 0x00, 0x00, 0x00, 0x99, 0x05, 0x05, 0xA8, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x05, 0xAE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x05, 0xB1, 0x00, 0xB5, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x32, 0x05, 0x05, 0xB9, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9C, 0x43, 0xBB, 0x00, 0x00,
0x00, 0x00, 0xBE, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC6, 0xC8,
0x00, 0x00, 0x00, 0xAF,
+ 0xCE, 0xD2, 0xD6, 0xBC, 0xDA, 0x00, 0x00, 0xDE, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0xE0, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x52,
0xE3, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0xE6, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0xE1, 0x05, 0xE9, 0x00, 0x00, 0x00, 0x00,
0x05, 0xEB, 0x00, 0x00,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0xE4, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0xE7,
+]);
+
+pub(crate) static TRIE_CONTINUE: Align8<[u8; 1793]> = Align8([
+ 0x08, 0x0D, 0x11, 0x15, 0x19, 0x1D, 0x21, 0x25, 0x2A, 0x2F, 0x31, 0x36,
0x3A, 0x3E, 0x42, 0x02,
+ 0x47, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4B, 0x00, 0x4F, 0x00, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x06, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x51, 0x56, 0x5A, 0x5E, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x09,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x62, 0x64, 0x68,
+ 0x6C, 0x70, 0x74, 0x28, 0x76, 0x7A, 0x7E, 0x82, 0x86, 0x8A, 0x8E, 0x92,
0x96, 0x9B, 0xA0, 0xA4,
+ 0x05, 0x2B, 0xA6, 0x00, 0x00, 0x00, 0x00, 0x99, 0x05, 0x05, 0xAB, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x05, 0xAE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x05, 0xB3, 0x00, 0xB7, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x32, 0x05, 0x05, 0xB9, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9C, 0x43, 0xBB, 0x00, 0x00,
0x00, 0x00, 0xC1, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xA9, 0xAC, 0xC4, 0xC6, 0xCA,
0x00, 0xCC, 0x00, 0xAF,
+ 0xD0, 0xD4, 0xD8, 0xBC, 0xDC, 0x00, 0x00, 0xDE, 0x00, 0x00, 0x00, 0x00,
0x00, 0xBF, 0x00, 0x00,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0xE0, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x52,
0xE3, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0xE6, 0x05, 0x05, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0xE1, 0x05, 0xE9, 0x00, 0x00, 0x00, 0x00,
0x05, 0xEB, 0x00, 0x00,
+ 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0xE4, 0x05, 0x05,
0x05, 0x05, 0x05, 0x05,
+ 0x05, 0xE7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xC2,
+]);
+
+pub(crate) static LEAF: Align64<[u8; 7584]> = Align64([
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0x3F, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x3F, 0xFF, 0xAA,
0xFF, 0xFF, 0xFF, 0x3F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xDF, 0x5F, 0xDC, 0x1F, 0xCF, 0x0F,
0xFF, 0x1F, 0xDC, 0x1F,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x20, 0x04, 0xFF, 0xFF, 0x7F, 0xFF,
0xFF, 0xFF, 0x7F, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xA0, 0x04, 0xFF, 0xFF, 0x7F, 0xFF,
0xFF, 0xFF, 0x7F, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0xFF, 0xFF, 0x7F, 0xF8, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x0F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xC3, 0xFF, 0x03, 0x00,
0x1F, 0x50, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xDF, 0xB8,
+ 0x40, 0xD7, 0xFF, 0xFF, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xBF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xC3, 0xFF, 0x03, 0x00,
0x1F, 0x50, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xDF, 0xB8,
+ 0xC0, 0xD7, 0xFF, 0xFF, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xBF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0x03, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0x7F, 0x02,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0x87, 0x07, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFB, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0x7F, 0x02,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x01, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0xB6, 0x00, 0xFF, 0xFF,
0xFF, 0x87, 0x07, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00,
0x00, 0xC0, 0xFE, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x2F, 0x00,
0x60, 0xC0, 0x00, 0x9C,
+ 0x00, 0x00, 0xFD, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xE0, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x02, 0x00, 0x00, 0xFC, 0xFF, 0xFF,
0xFF, 0x07, 0x30, 0x04,
+ 0x00, 0x00, 0xFF, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xC3, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xEF, 0x9F,
0xFF, 0xFD, 0xFF, 0x9F,
+ 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xE7, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x24,
+ 0xFF, 0xFF, 0x3F, 0x04, 0x10, 0x01, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x01,
0xFF, 0x07, 0xFF, 0xFF,
+ 0xFF, 0x7E, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xF0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x23, 0x00, 0x00, 0x01, 0xFF,
0x03, 0x00, 0xFE, 0xFF,
+ 0xE1, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xC5, 0x23, 0x00, 0x40, 0x00, 0xB0,
0x03, 0x00, 0x03, 0x10,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x0F,
0xFF, 0x07, 0xFF, 0xFF,
+ 0xFF, 0x7E, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFB, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xCF, 0xFF, 0xFE, 0xFF,
+ 0xEF, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xC5, 0xF3, 0x9F, 0x79, 0x80, 0xB0,
0xCF, 0xFF, 0x03, 0x50,
+ 0xE0, 0x87, 0xF9, 0xFF, 0xFF, 0xFD, 0x6D, 0x03, 0x00, 0x00, 0x00, 0x5E,
0x00, 0x00, 0x1C, 0x00,
+ 0xE0, 0xBF, 0xFB, 0xFF, 0xFF, 0xFD, 0xED, 0x23, 0x00, 0x00, 0x01, 0x00,
0x03, 0x00, 0x00, 0x02,
+ 0xE0, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xED, 0x23, 0x00, 0x00, 0x00, 0xB0,
0x03, 0x00, 0x02, 0x00,
+ 0xE8, 0xC7, 0x3D, 0xD6, 0x18, 0xC7, 0xFF, 0x03, 0x00, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xEE, 0x87, 0xF9, 0xFF, 0xFF, 0xFD, 0x6D, 0xD3, 0x87, 0x39, 0x02, 0x5E,
0xC0, 0xFF, 0x3F, 0x00,
+ 0xEE, 0xBF, 0xFB, 0xFF, 0xFF, 0xFD, 0xED, 0xF3, 0xBF, 0x3B, 0x01, 0x00,
0xCF, 0xFF, 0x00, 0xFE,
+ 0xEE, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xED, 0xF3, 0x9F, 0x39, 0xE0, 0xB0,
0xCF, 0xFF, 0x02, 0x00,
+ 0xEC, 0xC7, 0x3D, 0xD6, 0x18, 0xC7, 0xFF, 0xC3, 0xC7, 0x3D, 0x81, 0x00,
0xC0, 0xFF, 0x00, 0x00,
+ 0xE0, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xFF, 0x23, 0x00, 0x00, 0x00, 0x27,
0x03, 0x00, 0x00, 0x00,
+ 0xE1, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xEF, 0x23, 0x00, 0x00, 0x00, 0x60,
0x03, 0x00, 0x06, 0x00,
+ 0xF0, 0xDF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0x27, 0x00, 0x40, 0x70, 0x80,
0x03, 0x00, 0x00, 0xFC,
+ 0xE0, 0xFF, 0x7F, 0xFC, 0xFF, 0xFF, 0xFB, 0x2F, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xFF, 0xF3, 0xDF, 0x3D, 0x60, 0x27,
0xCF, 0xFF, 0x00, 0x00,
+ 0xEF, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xEF, 0xF3, 0xDF, 0x3D, 0x60, 0x60,
0xCF, 0xFF, 0x0E, 0x00,
+ 0xFF, 0xDF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xDF, 0x7D, 0xF0, 0x80,
0xCF, 0xFF, 0x00, 0xFC,
+ 0xEE, 0xFF, 0x7F, 0xFC, 0xFF, 0xFF, 0xFB, 0x2F, 0x7F, 0x84, 0x5F, 0xFF,
0xC0, 0xFF, 0x0C, 0x00,
+ 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x05, 0x00, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xD6, 0xF7, 0xFF, 0xFF, 0xAF, 0xFF, 0x05, 0x20, 0x5F, 0x00, 0x00, 0xF0,
0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFE, 0xFF, 0xFF,
0xFF, 0x1F, 0x00, 0x00,
+ 0x00, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0x7F, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0xD6, 0xF7, 0xFF, 0xFF, 0xAF, 0xFF, 0xFF, 0x3F, 0x5F, 0x7F, 0xFF, 0xF3,
0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x03, 0xFF, 0x03, 0xA0, 0xC2, 0xFF, 0xFE, 0xFF, 0xFF,
0xFF, 0x1F, 0xFE, 0xFF,
+ 0xDF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0x1F, 0x40, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x80, 0x00, 0x00, 0x3F, 0x3C,
0x62, 0xC0, 0xE1, 0xFF,
+ 0x03, 0x40, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x20, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xF7,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0xFF, 0xFF, 0x3F, 0x00,
0xFF, 0x00, 0x00, 0x00,
+ 0xBF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFD, 0x07, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x20, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xF7,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3D, 0x7F, 0x3D,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x3D, 0xFF, 0xFF, 0xFF, 0xFF, 0x3D, 0x7F, 0x3D, 0xFF, 0x7F, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0x3D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x3F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3D, 0x7F, 0x3D,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x3D, 0xFF, 0xFF, 0xFF, 0xFF, 0x3D, 0x7F, 0x3D, 0xFF, 0x7F, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0x3D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xE7,
0x00, 0xFE, 0x03, 0x00,
+ 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x3F,
+ 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x9F, 0xFF, 0xFF,
+ 0xFE, 0xFF, 0xFF, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xC7, 0xFF, 0x01,
+ 0xFF, 0xFF, 0x03, 0x80, 0xFF, 0xFF, 0x03, 0x00, 0xFF, 0xFF, 0x03, 0x00,
0xFF, 0xDF, 0x01, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x80, 0x10,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x9F, 0xFF, 0xFF,
+ 0xFE, 0xFF, 0xFF, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xC7, 0xFF, 0x01,
+ 0xFF, 0xFF, 0x3F, 0x80, 0xFF, 0xFF, 0x1F, 0x00, 0xFF, 0xFF, 0x0F, 0x00,
0xFF, 0xDF, 0x0D, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x8F, 0x30,
0xFF, 0x03, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x01,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x05, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0x3F, 0x1F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0xB8, 0xFF, 0x03, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x01,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0x0F, 0xFF, 0x0F, 0xC0, 0xFF, 0xFF, 0xFF,
0xFF, 0x3F, 0x1F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0xFF, 0xFF, 0x03, 0xFF, 0x07,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0x7F, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0xE0, 0x1F, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xF8, 0xFF, 0xFF, 0xFF, 0x01, 0xC0, 0x00, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF,
0x3F, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F,
0xFF, 0xFF, 0xFF, 0x9F,
+ 0xFF, 0x03, 0xFF, 0x03, 0x80, 0x00, 0xFF, 0xBF, 0xFF, 0x7F, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0xFF, 0x03,
0x00, 0xF8, 0x0F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x0F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x00, 0xFC,
0xFF, 0xFF, 0xFF, 0x3F,
+ 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xE7, 0x00, 0x00, 0x00, 0x00,
0x00, 0xDE, 0x6F, 0x04,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xE3, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x3F,
+ 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xE7, 0x00, 0x00, 0xF7, 0xFF,
0xFF, 0xFF, 0xFF, 0x07,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, 0x27, 0x00,
0xF0, 0x00, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, 0x80,
+ 0x00, 0x00, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x84, 0xFC, 0x2F, 0x3F, 0x50, 0xFD, 0xFF, 0xF3, 0xE0, 0x43, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x01, 0x00, 0x10, 0x00,
0x00, 0x00, 0x02, 0x80,
+ 0x00, 0x00, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x1F,
0xE2, 0xFF, 0x01, 0x00,
+ 0x84, 0xFC, 0x2F, 0x3F, 0x50, 0xFD, 0xFF, 0xF3, 0xE0, 0x43, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x1F, 0x78, 0x0C, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x20, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x80, 0x00, 0x00,
+ 0xFF, 0xFF, 0x7F, 0x00, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x1F, 0xF8, 0x0F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x20, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x80, 0x00, 0x80,
+ 0xFF, 0xFF, 0x7F, 0x00, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xE0, 0x00, 0x00, 0x00, 0xFE, 0x03, 0x3E, 0x1F, 0xFE, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0x7F, 0xE0, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xF7,
+ 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x7F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0xFF,
+ 0xE0, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0x3E, 0x1F, 0xFE, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0x7F, 0xE6, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x7F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x3F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x1F, 0xFF, 0xFF, 0x00, 0x0C, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x7F, 0x00, 0x80,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x00, 0x00,
+ 0x00, 0x00, 0x80, 0xFF, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xF9, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xEB, 0x03,
0x00, 0x00, 0xFC, 0xFF,
+ 0xFF, 0x1F, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xF0, 0xBF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x03, 0x00,
+ 0x00, 0x00, 0x80, 0xFF, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xF9, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xEB, 0x03,
0x00, 0x00, 0xFC, 0xFF,
+ 0xBB, 0xF7, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x0F, 0x00,
+ 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFC, 0x68,
+ 0x00, 0xFC, 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0xF0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x80, 0x00, 0x00,
0xDF, 0xFF, 0x00, 0x7C,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x10, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x0F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0x03,
0xFF, 0xFF, 0xFF, 0xE8,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x80, 0xFF, 0x03,
0xFF, 0xFF, 0xFF, 0x7F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0xF7, 0x0F, 0x00, 0x00,
0xFF, 0xFF, 0x7F, 0xC4,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x62, 0x3E, 0x05, 0x00, 0x00, 0x38,
0xFF, 0x07, 0x1C, 0x00,
+ 0x7E, 0x7E, 0x7E, 0x00, 0x7F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF7,
0xFF, 0x03, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x07, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0xFF, 0x3F, 0xFF, 0x03,
0xFF, 0xFF, 0x7F, 0xFC,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x38,
0xFF, 0xFF, 0x7C, 0x00,
+ 0x7E, 0x7E, 0x7E, 0x00, 0x7F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF7,
0xFF, 0x03, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x37, 0xFF, 0x03,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x3F, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0x00, 0xF8, 0xA0, 0xFF, 0xFD, 0x7F, 0x5F, 0xDB, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0xF8, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x3F, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0x00, 0xF8, 0xE0, 0xFF, 0xFD, 0x7F, 0x5F, 0xDB, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0xF8, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F,
0xF0, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0x03,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x8A, 0xAA,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x07, 0xFE, 0xFF, 0xFF, 0x07,
0xC0, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0x7F, 0xFC, 0xFC, 0xFC, 0x1C,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFF, 0x18, 0x00, 0x00, 0xE0, 0x00, 0x00,
0x00, 0x00, 0x8A, 0xAA,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0x00, 0x00, 0xFF, 0x03, 0xFE, 0xFF, 0xFF, 0x87, 0xFE, 0xFF, 0xFF, 0x07,
0xE0, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xFC, 0xFC, 0xFC, 0x1C,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xEF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xB7, 0xFF, 0x3F, 0xFF, 0x3F,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x07,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x1F, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xEF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xB7, 0xFF, 0x3F, 0xFF, 0x3F,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x07,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x1F, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x20,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xE0, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0xFF,
0xFF, 0xFF, 0x3F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0x3E, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xE0, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x07,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0x3E, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF,
0xFF, 0xFF, 0xFF, 0x0F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x0F, 0x00, 0xFF, 0xF7,
+ 0xFF, 0xF7, 0xB7, 0xFF, 0xFB, 0xFF, 0xFB, 0x1B, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0x03, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF,
0xFF, 0xFF, 0xFF, 0x0F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x0F, 0x00, 0xFF, 0xF7,
+ 0xFF, 0xF7, 0xB7, 0xFF, 0xFB, 0xFF, 0xFB, 0x1B, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x3F, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x91, 0xFF, 0xFF, 0x3F, 0x00,
0xFF, 0xFF, 0x7F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x37, 0x00,
+ 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xC0, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0xEF, 0xFE, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFE, 0xFF, 0xFF,
0x1F, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0xFF, 0x3F, 0x00,
0xFF, 0xFF, 0x07, 0x00,
+ 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x6F, 0xF0, 0xEF, 0xFE, 0xFF, 0xFF, 0x3F, 0x87, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0x1F,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFE, 0xFF, 0xFF,
0x7F, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0xFF, 0x3F, 0x00,
0xFF, 0xFF, 0x07, 0x00,
+ 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x07, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x07, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0x80, 0x00, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0xFF,
+ 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x7F, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1B, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0xE0,
+ 0xFF, 0xFF, 0xFF, 0x1F, 0x80, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00,
0x00, 0x00, 0xFF, 0xFF,
+ 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x7F, 0x00,
+ 0xF8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x26, 0x00,
+ 0xF8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0x01, 0x00, 0x00,
+ 0xF8, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00, 0x90, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0x47, 0x00,
+ 0xF8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x1E, 0x00, 0x00, 0x14,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00,
0xC0, 0xFF, 0x3F, 0x80,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x04, 0x00, 0xFF, 0xFF,
0xFF, 0x01, 0xFF, 0x03,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xDF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0x4F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0xDE, 0xFF, 0x17,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFB, 0xFF, 0xFF, 0x0F, 0x00, 0x80, 0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0xBD, 0xFF, 0xBF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F,
0x00, 0x00, 0x00, 0x00,
+ 0xE0, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xED, 0x23, 0x00, 0x00, 0x01, 0xE0,
0x03, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0xC0, 0x03, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0xBD, 0xFF, 0xBF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x07, 0xFF, 0x03,
+ 0xEF, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xED, 0xFB, 0x9F, 0x39, 0x81, 0xE0,
0xCF, 0x1F, 0x1F, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x80, 0x07, 0x00, 0x80,
0x03, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0xB0, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0xC3,
0x03, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xBF, 0x00, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0x01, 0x00, 0x00, 0x3F,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x11, 0x00, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0x03, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xE7, 0xFF, 0x0F, 0xFF, 0x03, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x80,
+ 0x7F, 0xF2, 0x6F, 0xFF, 0xFF, 0xFF, 0x00, 0x80, 0x02, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00,
0x0A, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x03, 0x00, 0x80,
+ 0x7F, 0xF2, 0x6F, 0xFF, 0xFF, 0xFF, 0xBF, 0xF9, 0x0F, 0x00, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC,
0x1B, 0x00, 0x00, 0x00,
+ 0x01, 0xF8, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x04, 0x00, 0x00, 0x01, 0xF0,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x03, 0x00, 0x20, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x01,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x01, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x80, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x23, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x01,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xEF, 0x6F,
+ 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFC, 0xFF,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x40, 0x00, 0x00, 0x00,
0xBF, 0xFD, 0xFF, 0xFF,
+ 0xFF, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0x01, 0x00, 0xFF, 0x03,
0x00, 0x00, 0xFC, 0xFF,
+ 0xFF, 0xFF, 0xFC, 0xFF, 0xFF, 0xFE, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xB4, 0xFF, 0x00, 0xFF, 0x03,
0xBF, 0xFD, 0xFF, 0xFF,
+ 0xFF, 0x7F, 0xFB, 0x01, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x07, 0x00,
+ 0xF4, 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x7F, 0x00,
+ 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0xC7, 0x07, 0x00, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x7F, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x7E, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFF, 0x3F, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xE0, 0xE3, 0x07, 0xF8,
+ 0xE7, 0x0F, 0x00, 0x00, 0x00, 0x3C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x7F, 0xE0, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x7F,
0x00, 0x00, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0x3F, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00,
0xF8, 0xFF, 0xFF, 0xE0,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x7F,
0xFF, 0x03, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0x03, 0xFF, 0xFF,
0xFF, 0x3F, 0x1F, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x0F, 0x00, 0xFF, 0x03,
0xF8, 0xFF, 0xFF, 0xE0,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xF8, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0B, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x87, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0x80, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x1B, 0x00, 0x03, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x0F,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x7F, 0x6F, 0xFF, 0x7F,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x07, 0xFF, 0x1F,
+ 0xFF, 0x01, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0x03,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x07, 0xFF, 0x1F,
+ 0xFF, 0x01, 0xFF, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xDF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xDF, 0x64, 0xDE, 0xFF, 0xEB, 0xEF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xBF, 0xE7, 0xDF, 0xDF, 0xFF, 0xFF, 0xFF, 0x7B, 0x5F, 0xFC, 0xFD, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xF7,
0xFF, 0xFF, 0xFF, 0xF7,
+ 0xFF, 0xFF, 0xDF, 0xFF, 0xFF, 0xFF, 0xDF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF,
0xFF, 0x7F, 0xFF, 0xFF,
+ 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xF7, 0x0F, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xF7,
0xFF, 0xFF, 0xFF, 0xF7,
+ 0xFF, 0xFF, 0xDF, 0xFF, 0xFF, 0xFF, 0xDF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF,
0xFF, 0x7F, 0xFF, 0xFF,
+ 0xFF, 0xFD, 0xFF, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xF7, 0xCF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xF8, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x1F, 0x20, 0x00,
+ 0x10, 0x00, 0x00, 0xF8, 0xFE, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x3F, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x80, 0x3F, 0x00, 0x40, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x7F, 0xFF, 0xFF, 0xF9, 0xDB, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x3F, 0x00, 0x00,
+ 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0xFF, 0x3F, 0xFF, 0x43, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x0F, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x03,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0x0F, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x03,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x08, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x7F, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0x03,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xEF, 0xFF, 0xFF, 0xFF, 0x96, 0xFE, 0xF7, 0x0A, 0x84, 0xEA, 0x96, 0xAA,
0x96, 0xF7, 0xF7, 0x5E,
+ 0xFF, 0xFB, 0xFF, 0x0F, 0xEE, 0xFB, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x01, 0x00, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
+]);
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/compare.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/compare.rs
new file mode 100644
index 0000000000..14ee22fa5a
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/compare.rs
@@ -0,0 +1,67 @@
+mod fst;
+mod roaring;
+mod trie;
+
+#[test]
+fn compare_all_implementations() {
+ let xid_start_fst = fst::xid_start_fst();
+ let xid_continue_fst = fst::xid_continue_fst();
+ let xid_start_roaring = roaring::xid_start_bitmap();
+ let xid_continue_roaring = roaring::xid_continue_bitmap();
+
+ for ch in '\0'..=char::MAX {
+ let thought_to_be_start = unicode_ident::is_xid_start(ch);
+ let thought_to_be_continue = unicode_ident::is_xid_continue(ch);
+
+ // unicode-xid
+ // FIXME: unicode-xid does not support Unicode 15.1.0 yet.
+ /*
+ assert_eq!(
+ thought_to_be_start,
+ unicode_xid::UnicodeXID::is_xid_start(ch),
+ "{ch:?}",
+ );
+ assert_eq!(
+ thought_to_be_continue,
+ unicode_xid::UnicodeXID::is_xid_continue(ch),
+ "{ch:?}",
+ );
+ */
+
+ // ucd-trie
+ assert_eq!(
+ thought_to_be_start,
+ trie::XID_START.contains_char(ch),
+ "{ch:?}",
+ );
+ assert_eq!(
+ thought_to_be_continue,
+ trie::XID_CONTINUE.contains_char(ch),
+ "{ch:?}",
+ );
+
+ // fst
+ assert_eq!(
+ thought_to_be_start,
+ xid_start_fst.contains((ch as u32).to_be_bytes()),
+ "{ch:?}",
+ );
+ assert_eq!(
+ thought_to_be_continue,
+ xid_continue_fst.contains((ch as u32).to_be_bytes()),
+ "{ch:?}",
+ );
+
+ // roaring
+ assert_eq!(
+ thought_to_be_start,
+ xid_start_roaring.contains(ch as u32),
+ "{ch:?}",
+ );
+ assert_eq!(
+ thought_to_be_continue,
+ xid_continue_roaring.contains(ch as u32),
+ "{ch:?}",
+ );
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/mod.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/mod.rs
new file mode 100644
index 0000000000..5195efb13d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/mod.rs
@@ -0,0 +1,11 @@
+#![allow(clippy::module_name_repetitions)]
+
+pub fn xid_start_fst() -> fst::Set<&'static [u8]> {
+ let data = include_bytes!("xid_start.fst");
+ fst::Set::from(fst::raw::Fst::new(data.as_slice()).unwrap())
+}
+
+pub fn xid_continue_fst() -> fst::Set<&'static [u8]> {
+ let data = include_bytes!("xid_continue.fst");
+ fst::Set::from(fst::raw::Fst::new(data.as_slice()).unwrap())
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_continue.fst
b/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_continue.fst
new file mode 100644
index
0000000000000000000000000000000000000000..3024c8189a9f5a1feeeca3cb0957fa21ac08d9f9
GIT binary patch
literal 73249
zcmeHQ3E)*@7XO#6qWWxOnJ6tvidLk|zl~%qN<t}W6dKC3X;73I*|Ud`HA_m88L2SV
zv1H3Kn36#wm2EPVGw0iv``zum``-V(^x}W_@7{O6^Uim^^E>Bvwx}qv5B#!m!_Pnc
z@5lfC@cnn+ezR`vny*)X^`BK=e)0LgKU?|fisk?M<l~S2`QZoeFMIc$x8Hhm=^L-V
z_Ub=gdHJOmUwHoS&prFhlD{pO|KR!K$Bi8``j6+08hP$He;9H0S!WI(cE<0A4mtg_
z!Ka?o|HOVL^zC!}alLyTd(6>C9eG60!+RWdX!mYiyL33{zysR1Yul!ED@ggNs8)T8
zEw|cwn;JF6g|)Wbu6CWeWcwX<+-c|E?6T`_yZ^S{9(&eruvf!I^n}LHWbdZU_G!Lv
zi<bNC->P++w(Z&<aNt254({0Lkl&@ebEQ7NGjsvy%C@_8KlHF3hxa_<$fJ%v=Gb1n
zk2}6k-xK<s*dI<h`IG?zPaQP)w9|(S{rwrkhM#%X*(3gN&bcE;o%hGlW5$ky@#jzY
z(*+k^G?7dyF^MSt<474?A0veCg-GGzMNcn$>TiF2a={bxAD{Qw+&PcVe&peY9-Q^S
z{rBB_&)qZcy7P`1x8HW_^jmJe>Bbwbzi!&K*Ia$ol~-Io^|HVG`O+zqFS&Tq#EUMx
z;7=36#dy%E0|%UPa=0vauKe90ojM+@MSB1JTDI7?`995>?%kwuqlSAmsK4hP^?tki
zZoBUCo1J&safj_m-8!|m+qPEC8ry8W)s|aSuU4T&I$V|;hl_D#=Pq4Cq%cMZr3_BG
z_>##}F8%XgE}MG!6<1z$^)=T{yYBiMZoKK{Tc+Q7+wC*%xbv==ci(gGefK{w>%oT}
zeq{EebLKua@A3IhEO_#-e|u`-(~A}_dFI*Y{{H+6FTV8hEB|=)wb$QR`sQ12zw_?0
z_ul{D!+(DC@hAUUzT(rBpZ)vuFTPy$pRZPby=LvYZ@&HR`yc+h{>T6Q^z(*|zW~8k
zuql4ltY$jYRGZH%*(|J8Ev!^+n*yuVI$dMpD|WpN@7M=%i@dujifZ(CQVKfSNXSV)
z`_%IlOgUeKT6ydkpJB0SYGfEa2IXt)xbbj4mpW!3z(nw=XDEPv2Bo0MH_-yIsUKAW
zUsQ{P*(00Hk%^Ol*&_f|W08=Jo1l2pxt(WTCsp=~&;R||XBMY2qZ6CY{ryuw*K97d
z)eO$;FT`vHIDu<loWvzFpZVzk6McF*OijZhF#A!MGZ!9{mq(b?^b~5B$Wz&pytN66
zsq>Y9`6ldnq4rHM&jd=a&ohy|nXTL!d0QXGRM6^C+%Tqs>`<t0BlUccHpe^@$%HzU
zFxzuNkds0HJuL*!ybxeHS>g2frn9;6r`)XNqWCYgC(c#Ft>W}BlS|`|A+l+u6@a?o
zLKHFeqGcni#>3)jZr%*q#xj8Sa02-LrX!<)ZxU(vYIm?S8(e>IQmxA!aAE#8?Li(C
z<#sEilg$<xx)tyq8)8e38xCN)8LDf6dx6<G^bdCjXW@y1NLL*BFwoiMzvGt;CS<_?
zi;q8gkAf60y~qFxDv;DM{FrbG`+dUKTrK^<9(S+?H4xIsd+aX-Uoe1%B*4HVq!22H
z1Hffd0sKB;5Kduu0EL)3h5?ir0Pw^D0O}r=V!n{BJiQ2*uNc7e7RyrrmcpA*MpmuR
zl^Qam^0yRa5AiV$k4fp9KQCJT5Ry=c1WKR_rVXU%LvO^-4)4*m3y#;FIvjLB`?hVw
zNbQFiNL>yvAihA2O5m6oeb+T>0r&@MK+KHVmC$S-XudDBXo*@67-&YXYu6qCp)*|T
zKsWJ2K{SvknxPJvD&(iIzX*$g#Klem#Kpi5i~&Q&#-9%p{sa^nn*`huz}x_U$OIrX
z0Wi&Inyyl;0zjh`>adVd@<8b+IxRHsQHgm45mkg!7cVkFEJU#Gx%-Y8w@tr=!d8e{
zamebDi$^*tX}~FnvNG|Y&?+<N7#I~RA`*qNg@g+TfFD%_CL&Toa8v|Ps45i+0FA7o
zOwi8g+6@>;3ZP+9I9_#=Sn1OjxXFjx2%Q4OPI2}0Autqve+I4%8;*YvM#aAtl8W0*
zbKw$&hAA(ADY%87OZh_n)ea@OM=r^=r6(jwo(MsEElX4Zee&_LcZEb*BOvM^IkWH&
zf>grHVbB(`0f>JxAv*#gd|S*IjL&BceqFQ!BA!yzf`BKLGw{LpsEWXa?g1otUkM0%
z0>wQ=7xp-uiWk6902GM_P+0s)vXkYKLi0?%C^ioW&Qq~6WhyWd;yB2LK>!7g0-~cp
z;Zf1*op<q-nRf#PO93=o3W%4=d($PEpQLL`_?+*))k3F2M0kYAD<U-HX@$s3v2y|W
zAR3QjAIIWt**ld6k+Ap?Q}&W0?m3H}D}74#gy;c_{9rvx!#_k9vnUV%9tT2auwp+h
z__#;SFDiA8(BW|@cdjQ96e17P0R$+I=g*`~b8H@Ffkfx&PM|U;FnWN6PxF2JFIiN=
zqGdiB6|E26my=Bqd-MWRYay=cd&A;A*w{Jw;z>Fp$qq>BoO}F+>9q43R>H0dh9R;3
zV6jNrd@=_~v)97ckM9Fw?4-t(*S3Q5q;bWgj<kRY`yQ^5G6p*A0|g9)EBwa%2)WPf
zv#MQxAxR)mFb>EU24qIEg-!cJG#i>+9S&@-1AcbfwW1>At)VH-J|cz<1AdGXYeOBV
zOSkoK2s)fC+~rFz@ww5o)ve^hnEH8UenKp0d+hTUxsrNGE~{EWDE{5)bPxk6ZLlI3
zNh!72YsMT=4VB77(_>|}p=&AtGqu~da6xNawhaGS1~3H}S}ZDf4HmJxF9t{v#0E5q
zjjS!ch*jwwBVEJcp~BP?vP1-E((ugK=uGMfNsKMiz)YN%un=P!U@YT{OGmufaL%VB
zI3WQXs&SSJkQmxvE)bl^>Zz-;1+C~zpy8PSB0v*iWorHbI6<0_G)j}KsZwDp#Lao2
zdv2gy9I)Yr6suTxj2+G(a+t;q$A=9Aje!SnqTs{L2A|2m{d53`y{R!@8xLw1UC~Zt
zZb?`NaYK0XT!Yi^PEH(R48Vho(MVca2q`J2Hj}DkX>;TX*98sck_ep7W8&q^xwKzW
zq;F&cJm&^ACk~t^P)@>{!Lu61bwn&%S}70D3<)HLB$*SIo(Ymfx^162_oq_`#-AC&
zRK4~SL|NF0(nGL4BSS<CB;{#>=oud((S?XiJ8u-`*;#_<$*=2dIOVlpqJEnEBhd>0
zc2b=*C)8m;pfDI5VH~E8dYzv{o%4Wzpz%@IpfWUE(;~eofz97XZ*f1u(>iU*xT	
z=4GIpO0$V<tlbp3bYZ!mVIH)^+R{j$!|XYu_cXd)lcvw{BmRU?DZ4&_wS?44<+N-8
zapR!(KFoEGfHi)yNvIPy$3?jYp<ybqG)8q6u{*g+le@{%gRHFM2uU*`BQrrWzx+&K
z$;LvqXcw8e#=6T)@JOq}pripuCiR7ZiB^<0q?aj(GLXN?8<jYbgGw5Av{%avu5@zL
zoveoYbsn&h0UVkCn`ioOuBpGY6J#oTj+!eJ4`NY=0OPdQ;$5!ufGrWYQf}GF!3_R3
zCpq%8#7pvUX^xwDK2;Qzg4BO$rzvq+?23RvL!2h#Lg{!!>2w4IL~Fi)w5Z5$^GGO_
zf|g+|y%}Y(oA)R0<S7)Td3i#}3duikm`*{mpj`N=xfL`<B{@RkQWCPlret>ueh?nM
zX3Sl*$SMi-NTm_eEgED*A0bm|XBJIoEVJZu$R&-gRl&8<ybJ7-O;l9*4@jhhAzh8d
zR6|*6-djc|+=8J7ai>Kh8XzZDkQligaB_diZNr&u7_UvNmgP(!>E_{l0-n?upz)S8
z+!8>t(m*f309=T1Z2|~tnsoY<;ZBG;!E6*gqy~|3N4Q1lL1@%B4f>{Xsk6Z_8I0}#
zWC{U66@pu-Qxr_Pq7^b5kcFXHd@mP`?9;ijwi#%Fo?~|ZG=HjZ;W>Lt&px$lE44;f
zwzjr#11XF&--~-N!L~(rM9<#~|K}0hJkrQ%gX~wZ5lAWffXCEkgc@`VQ|^>7$EC|@
zws&Lk+t3s_+hDK^3#7r`lB`d)wgv1-9-GX+;Eu2p>>Th?0Z`s2b_OGVqHFu}3yi<X
za)KFRYg|>lRnos;eO8(cE6drUA1jp294D@unV<4@?XUcd|5_CPllkK*PZxjmFDDL^
zwHftfP%BRu5^*wu$4t<e2^O<pxHOMiJV&tYMfkn^rb)}DS<;ii-mGJr0%i|++4gjV
zYX}}C1Y?jXAh;gNpeN50_KM_RkgOpfd=-)@M>;D+ytE3JhB!7e@@900m+jFwNtKcP
zIci^YSWLeqOltF^++%cmj6c$u8X~106AKCogne}AEwdtk5k~?r>PUJ`o<6q_5R01E
zEGC9BJPGc{WK+VFynAF>*$QNZE0HcT;4(WFmdMeOttcKxCTl5}5G;%!DKZewCh7`Q
zK=k979RnA{?9h-Z&oUu@EnlG%PaY?0asJoMwxmB+#Bp$iYO@%Li#<%1l(Pu(;MAna
z12Rw3m$%}wFsHT)Q>eXQHm5&nSmX@nW*2%~tl^{?v0@T`LOSdK-BwSPv*1NMyUDF#
z(}Cw`5ymqyYp1JD0cBj`u~KZ}`7X&uMDnq8B?ZXC8RF^_mb9}Q+J=ty$9#4l9k@Gy
z$BozS_#4T@Y=4V#wht{~wT(p<Sn~FpOAU7VoJRD-uFU*=LrO&L&QV*+mZXFoDYHs*
z3vLEb<n%d8BYSv8X~>L1^<A75n>L)hV0CI)Z=gi}95#gOz$eWO6P5QDXKpIE-(bs%
z6Ui1?Gt$$ZZRT_@-At<^seUw@@h35dkNW;0Teqm`>-oMgFcptJPgA7G%VjNx(GM>w
ze#8sv8+nt}M}}4T!j|1q3Weo(3x+KRqd>I~fhV2X7|iiRUEU)f4XW;F)LG#BB(bIi
zr0Qx5=>gPeL8-!{jCWgejBx7{OVKcrq!8|hRMkHG6>A3R*eMR`%LIQ>tfoj)BP9X0
zX9z%-n&GDy1!v4CWjyF9Ig^`As%LRlDAX^per0`%!V2nC@?sUrP;qXpGycC`oncZ^
z-0<&un{0OC6e*{oR*fiC!z<MA`ZT;cjYzV?_yrcP0Kls)sV){}mq65A0$O+}VYDjP
zHIsOgda6|PnG(G7E|R>evNim9$nxUIQHk7z)&dK&47_-dpnG!XT46jqo9ixN@vjt`
zSpCj$C8P!uJtf962-abu7m?K^B6Qh5hMsL@9cJw+-lVxFehj`y28C@$Xe%Pbevrlk
z_}F0!0$MIZV(lcKa?N}4NVx63it)9Eap>t}7MFp-mDA+@)b8E7bp9Pf*e$Eq<aT70
z?Ck1wX73400e6|D8Nsa0DC#mob3{U+rJ$KLYBR=0i=;vYZ2y4ysNVHdRqx14p3eQX
z>`EoXo&ut-+28{FY0Y3$%FDTEBNt0Am0wyfnD%{zdA)Kf-AH~aa~aomuB>L8P8jo5
zxK&v*G^q8$xqy;Y;b7d$IhY2)C-Lu*T&9xAv}`y`@mJz0Vx(CjypyVYK`gVl?FAFY
zk3DZB)?aSd*0na{@}}Ib(6p4Cl@gCPph*Yb54r)o`^}wASkx#I&I2;e7dR#=4pPPE
zBD2a&r(w<uhvNAxhbyweRYMMUD^-Tm=W5~xv*+o$!Z@=m<y|<#5ucbR)C2I6$N;=3
zGGpCyzno{A%&w?bF{t|dE!%CgZ>>e!tr|D7?y7Nx_;=I}b$7mVmwvmit~a6ns&UO5
z7UCX-YV+>K>092r>8<-5(PHg>ueZLW?TU8v`kN2zcJP``Ju9E+GQ9g+hpienx94L=
z^*wggxOT_w+_!nZ@%?{1dFa5YgFZfO^3aaM)|@$S#7XD&JFnB2RpVAq5EgC0B3$wd
ze75onTeoW2B8%$cT2vc00%+WXr%n|iSj(Q4rJ8lU^s;{HkTp?^yul_JzMs}jo1sNv
z&#P>ltn!dfo{>O+3o7P?qYRYLa$8=dCJ;wxISwqp0h$a!T7Kwouv<#bs>p?$t06*S
z>`rFrhLfTt4RrM|4YQ{pZ|7)zY|N=fw(-q#TGgX>iuk0Q?o@z_u$B=yAjj7ocG>c!
zX?@LBma;paEWo!;)nsJ<VoQ7MY4RSKoJS$vX6Np*T!yn-ZIWZdUUbRtf0yQ<OJR5}
zLCiul$@}@^6SJyC*Y)%$;PB8`Pb9bM8@dt=^s-Y^I2G9WZbcLuCrefjoHis#usH`d
zR-#M#Ay1aPXl4C?N$}nzES7~TSb%<u(q%D9#s`zaVGA&WPc2yiDJr(Qh?K79v(PE@
zNzZ%LQ(t-cicbwz)irDBd1PJI5>$Zp7Ct5F;5o3@T{?l_hU=$c)kMtyGhJOcw=#5P
zF6BB45g<!ndoDTA^g8)5%KEsKqd)`{V97)<2zz0wO|&k27k6O&W7%jPOER+n-JVaA
zikT|5zJ#)~IET5+p64|rTy<k1pazj{evFY}hLh?LzXhcZEABMC$6n(VRHE7ctSA(i
zA`48|4du8+uF9t`3wrGj-+vp#+Gj)?U?IVxevzr{bkKn;H9fYHI(NU<*vgpwQko!(
z7sw*{35M1|+jiJRyfSMCPu_>_zmTy?onS;V`@)#jdAY85xcT75n{;dB=7M&+-T2}~
z3k1hTTEN+*Ez&w69jBN4;(l^%N>D7%$ZwgL;K+%)<acEySMz!Ra;E*}OJeKu$ZSRl
z2S?7%BQqN)8WO1jFn&g|Ty4D-R^P3llWo|Nb64;{TV%3E5^Gk{CA6Rmt4dG1r*&K(
zwP_m>TZ0!z27<*kj>l#7)R>|#p)HYV#TN{2Kbirlqg2_RVbNwmWasY6qN8@$KE7ZN
zt=ALUxiUFLzcZ}9?a}f2IF=xF`%xdnwIF|lb-3UgWMuI>Ilnu4OoceS-c!OaE{G{H
z2Y$Z(n`oDf;jPJ06herdqC)koJEh=Bl{6C8_2khUx?7Lvg`fOcPg}Mdq)qDAjHv3|
zIS)Uiy+m=piK>1%k=}31^$pGU#fNRtGQ%fE)lXWde#co~z;f_{b0t+(-ppS1*8}V2
zPN&`U&ZhNpCC9I)U-uqzH5QPNCx$Hjp(7zDdv<&4mU_;UyPW#p(%UT$1DTX{mlmr%
z9b5V37a_*>v&v)RIQeE8UcNB)xX}*cd00thSEvs3uk?_-%)PK6C{~GAXGM{U$2pNj
zBiE_M=*sms+<220l@@2|fMC9ZEGU+M6zV=783ktEc%3qVd(~CiS^t<MYzs73SSvVx
z0j@&$4WYf77|?E{T1@mWMAey*vjKoJ<moUh8i9+eLCcLY+8UKCn^;p+T2{C6u8_rr
z5{zuy)%q=ul-~#Lml^Ah_mNdVzLXECT`=M+Q5l3J(@jow$%rl^NG^HL(7C3!j}q_f
z*y#{DrGTzX48c)*fTY^O=OkK;nkK}|3|I>~evCIr?#;H-RcmbU5f%6Z$keYF!jy;i
z=-#a>?Jx}OXS0g7ZG;cl%$?FE52-8Bcf$pw)2x9yfIKb&jL>5tP&{*L|5mL*dv^dj
z9Rh%_9iR(;3tSfnMWBMB@E99>_&Fu<mz<zpaOtn;{L&Y*q4ODx1ZSFjB07wtMHJUr
zTui@DqLSw8Q#sK&`*e#JGpB1|AIGnp;|K$H$o6%)DY++lNgm8bnvi@1Z41SSLrdAG
zH(aA1weUxi&uY!+UjWO%@H{4;Ie88^yU3y0bVhcq=qiLy<_ZbvpKW(5JiBe1)(IiN
znl+HO+CTv%0^ryGk-t`a;X?7VSFei~IXw}w>2V=TJ2ATCrD#vV$XA8jAZ9nYI-jl;
z(?IRfa}?$7Iu|8{ngsZ*Z7tF7pno6y8wq$N=IfR2VbU?57oXj=kX>e+ClEbr53eVz
zrzl-4Ol$q?&i8j~KlK!HX?8uDPfMSzT(O)3F6r`d<jhK|W@VVi1!rhvwx+F<v0xK`
zEZGEP`<TfTpk1yFbMyV`&NKJszAouJ%#w;~ZWS@90+=ACRdC7Vh})AX`E+5)UYm~Q
zC?bF6_UuSFdeEt3)#xoMl~M>Q<w}qaG1pq{*y2>#s$SY5ih**)Mxo;Q3CbQ~bZbe~
z=(IzOq)xJtFBOd_{5>{pi`HMtvP>0`7_=+Xu?dG7{u(OLooTN{W5MWj^v(n^h}DQ;
z^+=3;lf+x(@)FWR8U?-iCW>%edrhJ`Cd1C)Oz2uDBs(R|x=SM$HETFS+cq^)$pyal
z_9z;l!ck;DbWs)w-@}~+=xT~9KFLc@Vm)f$rEx@q6BVO`yhFB2GBr#rLcmY5OV|bJ
z_s`KC$-IV%^?rO{QFPdtj4YzFdVHTOY}9(M@xN~C^wb+YQ(qB%0XZW5+b`M`*;xm1
zXu3=9QM7QOXkG8u^tPgR41wtG&gv+cUNj*nksDPtU6)f0Hjb}eqp8IVxKz$gtp<|`
zct9?Ff%Kj@_z;j@qw@>wk_kvq|FR%Ac=Ih$J~_znx%%rhWNjvLr{>OyN2nxVWRlod
zyDI|kw~Tg>?wNE&WFuxd?1GffEw%TZ8=PSa6)lzS1mtMviSvcY;K=u%{GdN`sL<oL
z^KH;WrGaCP)l>7`J*#xL$zjRgc=VZN6+c$O9nvHEHJAO-?`e7RuUCH+5%r@1#f*8_
zG{gTun}Nrj795l)UMREt6jY}|B0b3|CN2Ro-e=HYe!*$kGW;9yX1x7qQgnb1sL^En
zn#d=`6;O}wZ>%rXRQn02C6>m~6T+@!vNGG`N&#*q^?KZRFb|619Z6XOW^ochF0L9b
z(h3=ocjwx^*<h|%+;k@2Z<?>UlkffWy`FT=y9)}~z&GnC+Gu;{F-7ptUdMcb&M(~<
zE*-+<%~%LYOF&nw5P-=#%H!&5uAO$>_4x|M4L8C~Hye%IGTqp~RdQ<q`@OWAPF0H~
z0;EhE@t&`)D>K(kReNgIuKVWO@4o-xzw4#olOIc4E^~f5#*LPmDKDUw6Cq~WWR4q}
zH;e1Do}z+M7~_{_QDFr-lbV*4U~yE=T1qETq_%ae%r>gpkM3$`%2v?#xZb_mG|Ykz
zxS`YEND3VB!EjW7ruGRmE>7;DTqb@`Q-tGfaK>}PxtcwVEG#o&%l2`T5dNhLw$tGZ
zo3(V|#MUMKOG5Z3jfx-t_tVex=SVhWD@*ydHp2*=Q+3xZW}Otpt;=W#XR7fm$(i%V
z(PPHq9>vGTjsmFA7a9e1`4wVs;TGh|e7lohAL2`|l1k2nds_u=IW|$|k%u38P*4|L
zJZU1c2RM5Xvw6v$*cX_LNVdqPPs^*7JghT8)-3r)S&A3F=~?&PJM)eik%VBbNoH0e
zvzbWyBq@AeXe=<1MaI(mGDzTq+ipi@TUs^+;I5f>-y`a#aGGYAE6YfOsw3^2jOfC=
zF0U;e=E(QuC8HTVZ62bY2{A%)si%;);Db4grAezS=@6y_kte;AAj+dqhM6PB>QMv8
z+5<8hL)M}8Qe82o1gsT-*agsFcM;$NRWlLE)uQ{Ain{p4P>GKzeqwQeyhcetUgDgN
zDtp&W^}P3<&Go9YDmDplpCo5{bdE9aMsbJ5p4ulYfDPVfVC*OV^67T;i^6)G!;qgd
z(wb%0OnNmhg+8?Sgk~S{k{_$-n5*GqLO4U|`a`t*9*&xqj2PkB&=A9z3874<iZ(k0
z_EIdSbVjD8V{{&->9Mckh?hgG6jqx3sf8f9`T^3HStaYMT2dX@h&gyBj~3h~JRK#G
zYrXhE3tp~NGaN-nqX?&soYD=KD|~sTy(zcY^c~I}$un1kw8(>5Ey+d61fV&iq0Wz7
zia#@_rqAm6OR62xB-zW%^Q@~!r2lysfz_-l8#fYu`~a75AaSdvw19a$HOMOKP`?2%
zzIfILfw-PjKabdM91?p*#wU=WdG?aL{Ft7Keb-LCEgZ=Xz6u)>0i$*ueNSZ+f-AZ;
zyGUHwFM6rD*4D0x4!x8Ww<Ll4Z0aNnM<>Lym&_t2yesgWVO2!pf7-k$(qF&i*tO)n
z5O~fq-x1R-MlXoh+C1uI;_TH#;MROiV~GVtDybKBWw{+&#v?tdz|GV<mUp-$ysIIz
z5EB-%CLwOroihiyhZm!kweRLV?ouWgze6z<G4@2Sl2;2PeLA%H$kL3LJd(Z`J#C17
zO{H?Q)Tk6qrf@c+WP>ug+-{xjWJh17{0<jV9GHf~i=rG92_ACV;8O>j+`nI+<9cJ#
z!;w9EbnV!ICOc@NwWf_5?iG>t9N^jXM8Y$}8K5CFQbcRaUc(|QZFmQ_rHgiy0s(NW
z#r%2b1VBB?mCza4*SdCt?r6lJQyE0wC{U(nK&t0{a3W|t1p@gGtbO!80CPV;fc2E$
zKfk{g&Z~!YQmb0UE*0&nuiWCot)JYwPL1bk9=C1%+Go`{n+(|Tgx_@8twp_d^+wf$
zJ?b^srO}l2)0)g~@@$hS>-oQt&E#6L<&b@EZ`ok~jjia}3)|AaN4J+(essWH2eml3
z_91+`d*wenZ`-9|w~mK?-s8KT)sH&==v$9%*88I4_v<^M--iApP8l_D*{Pid-+ubV
zLuZ{aarnHmrmTN!#BS$q7<JBQGWNo8Kd(VHvClq54yg}mNDJ0vT}Tl<%CD;h&2?~{
z^M|ug))2+!9;+se8;PK*9NFVEd7SDu!QZO3*$P(BwRd>7Xe5=vDy4HUtket7h4wB=
z;-mIx{&BSn#k4=roDG11bSI?%gwp}AMi|}He9^cujP7YLyg=ha=`<Tx_hoU$>0aUZ
zu|IjVjKW{6$d^n_oCp^y71O8>SzbD_R`e%Hk1v?8+B2cJnxFG3Sa>U_gVQ<DG~;LS
zEPF0Yo|=onDqP7ZRP(j(BZNN$RYOpX{8V&dE3F%ryXeR1eF?CoDNiYjQUb)5)Je;c
zQZk7)G$wY#M)FH(Nn=r-CTo#TP_PFpbK`VwoHxDsG|GJ+ozfmB<PlT8a3DDbM-GLN
z3!>j`(*$GW;=DPjv_1K@D1uc|H*uw05DDwb4K7@;*@9Q>jX7rs2U3I6Iw_;X+Ww8e
ziceR5_V3TX_;S^MzCy06wd=??1(v^Z)=|#8E5S`NFsA#WEg7;WR#85*RyF*uQm*_`
zP3+<Ov--Po%Y2)CWt)nZYo%^vN$u2^Wc8Idw^LPb0ApQK!rq(5u@WE0?14+EV&ni|
z>#cr;#+p|BuGyA0=%d)uUz)Hb6Qt6#;%w<x#24im-|fW5-nlsgI@=6rr5rO_A#*Cw
zOgww1YRUf_Qa#=IX3G*f%(=?7s5G9@w=c<NS~b?n|JSir>O-1KD7oo--?H_$xj!?q
suA<tKY89VXf2ZbN^{3ochen|^*s5Ylhep+zz;8#VuK2mKPJ;{n4>vWH!~g&Q
literal 0
HcmV?d00001
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_start.fst
b/rust/hw/char/pl011/vendor/unicode-ident/tests/fst/xid_start.fst
new file mode 100644
index
0000000000000000000000000000000000000000..3f5a46b168cc233fe408f420dfdc921768e85ba2
GIT binary patch
literal 65487
zcmeHQ2b>f&7e9)?o%j(HD^i4`Nbf}j4mm(Y5Rl#xMC2%f0%8MHz=C2yMMO{$3l^{!
zis)zWidaEGuwX$zK)~|8w3$gV$+Vrl-SxZW$8C~HGPB8><mHwBE0L83ANc3*zy3UU
zVE?|odw$=&Yv*q}e*NX=pSJ(_!}s59+xqR6&EI^zY2#O4e)0LI8$Mb8@kbwi@cw)6
zzVr55Z@%&RYp?$Im6zAO_{{Q4rd>RB%H)eKoHX%*3FF6&Ef{nD=<`OMJ95N1!w2;5
z*SAmaUT630(fzD$UC-=t#_6YZ?$j~A!>R2d(N9*f;w4HRQmS;B?3_c(9`>JdsQlqa
zR5<deqmMb3^?Y2#<11CJQuTyt)pKjqtX2EOI(6&SZ_uz&<0ef*-np4mzd1>^p!t@q
zTAy_CDQ()e%WHpXhy0G6I-hp>8C}lo3f<1?-lON)y?XcQ+pqtCfrADQ89HqEIU`1%
zJL<gA=Z`5E3**L5xM1R>3ok;G(_rRP`v?}@{ozA+pBX6crL}8TuX^!?=bwA_nWvw6
z@`;rz9)Il7N0vYQ(1Q;wyMO6@_ujK)@!gB=T6pJzJ8r*i{;jv%eAB!eZ<u@ib=O`q
zXZEbCuex&PjOkZge%YmF+8s7@$lyT(%@my1u5FuBPClu1tClUAH*4CYaifL}>es7V
z=fv8zYSzfDUhRacRVr6HzT$Dm9&_|jM^-rE@bakKe-0~qXij#S(xnb5S)zEcEHee0
zX}4MP76!`mAv}xjCQq4q@w7`Wz3lQUrq7sp<yBYDnmy;5Yp=V0?hQB2yXodzZk>PI
z?RPA=bKzZ!?q0m)o_p_GdjGNq9(?HG<&Ql2*yAf!KJnyJPe1eQbI-r<;;Pka*1ojv
z<yZdu>T9pR@#b4^zw_>U?|<;&M<1{MWW%SQeg4IlUv1p<^*5Whe7kkqci;c;<My9^
z{^i#lzwO+$`}aM2_x-W|z`;NN`uiV1cnJQDpB=x@byHj_UtU*4tl~163SuS8wn(sa
z8MpsX^5wbp@QGb+CD4YHld8R5dhgSbmP_4gITx>$v!~-Va_SViw_8I|Sl@JgghNW<
z`{7+4a@c(j1-#osIdJxSXSD=%tB+y11be+iGg3|hsd@mL^a^#WF504J%(Py%)NIvg
zf2t@h=zYn7MRQoO7Qw*GUVr^DMP>4m&Eo+%p!ly^0DTx?=ua6MX3wId1iw9aM$$9k
z8>Ks-B|#^$@aq9S_5QZw`|mdBj<qrKYZJyhM_-kuTn7hf1T~ou2M0}SwWw}A;0I%W
zx-*<=HT0$u3f!dJ_vTx8!L9S1EBN#QumIgz_{0$AjtBSd@mO-0faHm!ok6{qAmj`t
zOJ#vkP(m3ewZ4?s%1`1T6X7lw3u!6odrY-!FT9H+7tuVrJIqujudX8@^IJ-oSi&Tg
zaD-{YOtQ4R+PTMt5iyW)I*JY!`ExY>e~g`z6UREdwai-cYQkPcOI-3We$WFZ50g8`
zIk$k!w3N7-1}Z?ak~hI2d;V+JPAp5^wDJ8lL}K+MmsaV8RX5GE1XZe#YPc<x>fR=r
z<cW-1hz-P+nl>ZCPbT}6hQ1}wIr32HGJtySCzmY=DZqe%04^yhB?7=wQX;K_X}AHA
zSmCcLNMrFcUCaPxmoc9#6~V08PJRwfqH97<T=eP(wru{8&h|4;dCxbNP1>SKeAFI7
z#Ez|7wP@b73Fi&;GDTF5NfYT0WOGWArAh<1rQ+mG1}+eX8a1I-Z8)(GZE5~PCLbDV
zu467$Ew#B;z*2Eg#NzTYFc8l;P{BCvWQ5ugz(}R&+)+w_l|7Fo(CFxM<lFK?QNWC)
z%l9u`vUt(LJBdgcOO%;7`KpQdy&OD<y^+3?W(k4m!&|Q+nKA%XmBE%@;93keXLwR@
z%LdjM=)W5_0=K3^&}8sd&4G!Nan_nOoa~Sf9idZaoI34v{0Hkb@ZYY2%noNYpZ=OM
zuOc+Scktt{uzS~*&0lY_-bzbppkIc`1ltSo@L_|oF2J?u>fg-6v9Mo1&G^LxMPFs@
zv7oA|@Zh;#GiN~Y3r82^maUvJU8*%Hv~*QuH3nN-B}^$+Rtf7cP<0h<SD@=rC>uKt
z#!nFPBzeI^sQ^uie-=aa$Q6}qY+aEx>#C~AttVnCBK&e==|HzOfptB??3W~CVZG#K
z04pnr{8G?XTC=ZCK4z@|sKQYkO<VbB72=7tlIk~O#%QUap|qA_;5?a|1+#UOlVJ*2
z;!m~nz<G2<sL6_<GErE+PIc33tZf(1^fHr5kS^g@UpbTMrSq&UH?SVhnO&DAT0ghY
zNE$pY@R7<wWm>g{lYr`?K~_dPuQ&7qM;k3}xp=Fks%aRby=B4JapScYghDhS!kp1%
zRkRea`C7$ZfSU{CWn~GsjFPlY!Q>V6eFHsiKxHK!*OPS#HHk3Hh1-N^39ubWP<{S;
zZ!ilI-}AAg2f!vKqs|>pH1o`^q+N0}?z(BLs}?eg5>iSI!uX3U&-Q+d4MfN#$w-70
zSRcY$$sb}t;=Ht40}^~Rf(vLXyvy<vlCPiy>+^vq^wGVAOAsl)TTs1~Vqrq)avirW
zA+{yHrX}R`7Fg9lXty6%HxQdB%OuKIE?j5^B|vPMY?dE-QC0zjEyOWs(#mVdDSQ@y
zuW1Po+Y$h7T>?)6u`dBoEKDG%i3!?~a)OKF0hl#j5|sSdCQ7w|iov{zNFM!dn}C?-
z0Z;=yJzEq^^VO%Xykr`3>e{JRqsDkz+D$~!3c#$Qu$1Z5I4B{L`v|)9TAEtbuGEh!
zNOSAH(5Hf+<gZe8aws`r>pldTOuP}EU0{2Cm4m@`LuT_;m-T9p(^UGg8q{jqhvx-H
zB4Jstg4ukPF7+|mu;Ts%GgiKlq?*;(_*oQ-XMA>&sKjNJpQo{64q~`0ZLgLczOc<p
z*Rl3Nvd72zWcj*e;SAWgUcL<6*4~F|<lSBALYKXi6kevgqLiV_BdiZ;+9Vz}bGbo)
z?l!tNd5W0iKY>>)WsY1~TLv3M{u9O^hTqANulXL1Xp4AE@138C-bxz^GjDHU+%u@n
zvE)tUqLIWLES#|mnDVxU0iXhRp)XbQZE3wVenI0%GwvFsanTSu4Uvpn)%~E(k;_+V
z5zhO?{(0DuOV~i%TZXhrSPocr>(~-R^_i3p>Y5JktL)<EeHL5sxVKA{<=z+47mAK<
z$ko|3Bh<0a#YniS849$tWQ$^@M!owu*}%rP*cj4jrKZqT=|BkLGaTHvmpOtGKTv!Q
z58-2H3qgap)?t^>xcubQK{e*F8JlxWWkYpz0ALH{c|)Ufr_6B@A3Ff-W#`<_Cq<>E
zOlS&oZ@`srqz%wKadI*;o#DcqjCa-aAw1v9XkA8<U9g9y&hLNh!_M!L-#hVn|LjxZ
z8^HbDu~KL)cQ}sZZ|t)q4j$Ow(tR^edFm(c%U&@b<8bD`#7q9TiU1}LdD+6-GT|iV
zU`#^^-Ju8c1hbnvT?s?!CmnxdsuaR1A$S~<C3ZRn?sbfF#PJw_xgHammE4a>0bn0w
z;7-UmhrU!PKynD5J`t5uE5w$!cS4t-PKHPr){SB!5NC4Lptp9-Tt2&w`*`W%*qn&`
z(qaG(00NW+U{*!s62K8q0gePV&CVnP5Wx$rK+zCL%5B{lSP)G+D*QjFe{6G<0UiCm
zo4|2MKpfs&x@fLigohA<S1TMm1oplVR0N#b{*(Cnpr12<9_2@rp<y^O47a!B_7fbO
zgJW}WXb!ifB}L?zz+q!tf*u7t#<eNduN5cc2CySSEdUWVM&1;?WjlOYhl$fm(jpX$
za9XgdacB;5Y7h<sbAh9C0Eg$GewG-Y12{kjaD)!KA5#LYl;<zgb0ICxLh6{YirGM@
zetWtPbPR=!p`bCgl_>^=p_7IMA*RU#8}EqMpk|CfEh~;BMe2U<QnTpJxAy7Qv{bU2
z5kpXhT%-|CCX65h1VRSAm$~GqT08h1c#{i7<n1CVDfH;<hLFC2;1HGsC?ZB|!e2jZ
zBB5%aq>5#PUdUXc%gSP=VnGdVEnk)*^eYwpDiG;-R|*H9h0ms|GMVtQvP>|6QBNm6
zSZ&Io*=K{zt6N7kmbKim+|X-yUR94AXR0ezcG6DDTNCK=f_56Rl#k_t#@@wj?h?q-
zMSTFYCbNmpetLP|B?7w*>p%KXfT#4^h_z;ffn_&-WateMXp^WqJv?u3%Mi$EC5+L6
z!CHYh?}czGaD;X?NFaT5BDABn+|L<)8ORud=yiKS(<JfF-+!`QHN2UX1_sslJZnIU
zB<|f9E0RD(5@8?4LMu&O2zCBTrje%QIv7;H^Q_#V>2!e41%_jR+&*lMtHA)s;TO_W
z1xA5$osE!6={^XFMCu7a%6$^mWyDL^;S99Bd}u75oLY`}SUN5`6_k|i=P&s+S&9Kx
z*%JcSO)$zGr0jGlhci?%f>9kT*iz>@1uz9PKTVbp&0}B{lVBT+VU=H5M<HyVeddYq
z;I)_NA(w|)Aj_w=&oeluO9wzVKcq54B8;*#+ENOmD}xujvSF>keRRS9ZA%LiYO;%D
zC=kf>830U72D4#~^gKca%O(H>d|Q4A!`up$(j9S1LrY#o0%~_uc3jIBd4MO|kl%E#
z3*lD8L6bRVGBXxqK*<aynLAPtSQ7$j0^EJxo;+@C4BT4G!paC_+9AunSeFLQC{e!1
z3q0BWExH7H*K4N#b=L?GeNcG@73XqogUaWdy#0#+z%3VwU`I-kBn@ETq)KQ&aMB3<
z@~<Ob>j%-3s5V}LN~qyCe%X%1+d|q;aTY>S9F9>C;w>09e|hoMF1ubP(FyAu6x4b`
zkp3U^xi!O?{w+&R%waz$17SkUNk(vm{0{B&+ENXf6j#CFWh>Eu3UUwyappLmfQ!ls
zf<2H7${A8PoCuV01r``2I-#1*!1gIc2(BPLsx}=e`T}|*(+M$w=MP20n%FgQuf<~Y
zn@o-!!pKqyUE?~01!Eu#OhagfL5?7T4kkc0>Cn{Sa;-p0;fPTMatLdYkjl_C?p03>
zqvWF?agicM290FC<$e-<i4*pQT}NxvfrhfU<#IeeoS_-f%P*Za^+FlzPd|F`mSR9_
z++lGj0azf;r2;yEcodm;SSdGt5Oj4~kb>7k;P7os48-i8*n;B|Jj+KNYYd%bmi(B}
z6MgB!EXig~&5nXX*HO7U0JDyR@^|<iOuh)fB4`1y7+OF;w}dA!CDA=u#j^5?pH=ex
zQm<wADVtM}T`s2}8UMDGpI+hVqrN#NtKz&$IR#~_CgUE->T_u0@IB9}b};vjS_e;D
zU+<a*4>ias7~1&TrX^aGZS`>L)hG98+bu7rpniu2onAOCr(i*sO<g;jmD{8G**U#m
z?lZsNg9Dx(v};Js;pdGQFlzAVoPxu~b6&Hs<f$Sni32J6Y7j*gQR7DS>%|d%&Y{<8
z)d8QFnE!wiNHOXg<?}uX!ulw|=$xlEq<FNtsvmi+nVEVmtoSrhQwc#OAfI@oXrFoQ
z+MJ?U|IL9CO>q)Ez;mf{i$a{xnj@n+z^D|@T0mJe5Q$KnLc}Hd6li@kV$=m>^<77V
z-yJDw-m1-;v>TT^(ye1@gqlYiY*Q;&yj;Iyp5&pXp7LV(i3xP#IlH)DQB@pB0vz&L
zP&d;!r*k<CoNW0XkeJDSG~msAYa=bYUTp7pQ?}_@ICKtMhAPdQ_eqOZ4R*kHdbI;4
zyN>$gpbr#aR3}^&Wv8S!D|#)^@U>3=ekt@+Iulfk&lW`A8YDiNH_3*TPGz>*DvUOO
z5UiURg9KkDt;Be?rU1;O9{N5;l1Wxqt5<Q~RYlt*#HPJ%K9A!qu$_dqgecx&4@(5C
zChmycWXQ=Uiu)jI$^Hs~k$}aIx0gfUYJ9*TBZBA?lu+KlV|fF17EJtu?n~C(N9@-|
zrtwASwmB&l``vOhWf=16aw91oZ5O>{1$4LPMem+xU$#T=I3;q52U&AR^suQ$vHft&
zDCLeK=Fp81r!o1>_U<8pVu>rgV1T*Ud($%5SL#Wo-6^eFQjbnoG*3ONRR-I27bk|@
z1jGRww~-Ln0RmGn8-F0NVMuTog9n~7f5JgmJjU7;y!k8-)GE6N?}P^B5J-$q>w^{~
zf9~x&ytMW??u{N+*P$YFIBP;U;1%m<n>}@{8i{=e#_&+gZrEK%pP$&>F~2d!9`qC@
zyihYNt*hs8+tW7R^KHi_Dg?8|6}5)E?!=A55TMVB&lh<7J#i6nFP<<30q!z%7=tWO
z*$-(W@#rTN;C61%*+~;I^090vr(eZA=kyqAcZ6rP9J5a@gMIdeMV#3u2iHV8qo!ZQ
zo$DI=c;_NcS(D_jh|=YxXVAnHzT}Z^tzia8)G!A5Ma4BPxm`%No~y2$IU_K3v7SZ1
z&R+=;n1-w(l0p!Wc7?~3?u3yLg)@qBMByitd+z06Cd@-$YL8X~n&m<VlawAZGcqVQ
z&`|P9Ppnj9B$_QgQ~morWz&*inmsAD{EYR3>V+BEc<S!coBq(?hpLcSilRn!LwbP0
zTE|yB?$~I1oim=5tR&}X6h6he>i_N5G5fa>Yjn~D3lJ+PX~W^!u9fm=ra%7hy&kkV
z)R|`Tq=_w~NICF01b@#2Wvo4ZVG|@agcV|(wRHlGdos6-De}7=4SqR>gt=6_m<<gJ
zKbom)q1gB(dA-B3BU3ayu!>&LIp3_1ci=Pd$OIt)!vM14TmWoM!akDQvEI8FSCoP!
z28)yExa?WCV&6QLyj)0fSd!{4bd{hPt|=+>0v}#gUHYBq9E!w?#x0!9M51}^>Y74n
z208M8ChQAa^<5(f7(_}_7^aA0T|hHvj@evn=0W4%T2>x5P;K`fyi>2x3+k(nc~##>
zK{KPxIW^?!p5j3966Pxv75V<&lvaq_Cc}5?aA4X{yD2{F;fW<0_huo3A4HcQxMzgE
z3GQO@a%f70A{~;07leb*)J~E>@OCC-B2onDEOJ=qsG<*YSW!Qx^^ngMd-WjuCJs<w
z`K|}PKZ$-3q%vyiOZtgacz${C(_((IJx_JxAqQU9)i9UwFzrFOPpOkZQZ9N14Gu6h
z94{u?`7O}1Rb+TEf$FAXB~&fIFveE}{KSeXtETQdMxHD|Idm%K#zKg3pXs%ybk|Yb
z=b6W%aq&%@uBWYATvB2E^jZ5+`*x;Pj;V);A|YFr@dA_vTR8-%ypgR|2BQwdEY4E;
z7vOyYPM_MrK=51&1FdhmSt-=AaM&as#`Sk!8wzBw`$^bL$wW+&UP@FsQsaUEdMQ3t
zEHs2VZ*_!%aNxf}Lo3<eN;u!TXn2|<8AW>XH2LIRqNVXtgl##*&&jeaV;;vZ@-`uJ
zYjT1iIy$4oQw>}teg#%!G(Y1azkcGmp3JCs3;bTG(dV5zl9&f}3ic*tmr<5xGx(xn
z0j(V?>C`RNVAdMgm<Rm0OQ{+3le0<7=_9Uabg1Zv(68P~{{yq6Z>QmJq;(DcQx26k
z3ZLkTm9&uw&~YMX4qf-Cap*BFv<7D7puA|h%`sF_v}%zlx;Oe$Ob~gcH^oFXpc;L?
zPMP4P{npKK{AFlip%Fov+$h!9Fp9m=zl^|L6soAtyeMAu(Xsq<sC&*+4$Q{dm)5-;
z#JK^-QQH2~&lnp9{T2(|b-<{p>9E$g#y5isP<|{^;n|7HmOD{7iz+%!xA6`sI`-!J
zbynfZ4@Sd1$KQOBi68?VRSCSg%wh*Nf>)jvfb-C<@(d|JEzBbt>WTp|JlXSM;FmIF
z^xcn%I)@tdw5=>Vyr_$$4+fCBCVERkCSEW^Mu1Rx_*RiDj%+8H0CFtV<($u}VjW;b
zv9(m3uGtqoWqcfDH2y6v%o11scCZXBD733Opx2U0Uz%y1s`Gs!yiu~(5%tHDbww>x
z@>+2;t)nqpR@x>#iv}e;b%`FG6a9^eavq7^KhgFmFX1Wh>R{`Z&C)vA-5YqJUa}N=
zKFT#ZiX_-Vf*;8jCBeR+{{R^1)kh3(kJr?t^R-YI_#S#DRi7-;DtD$Y!o-){*@Eop
z-dp%<ufOr80x$mg7hit0ansk|Y~J$i)@>yGGZS0<wsY6+-}mg@_lNZwp#AAgl?gnX
z-m6NlKM!t3l<4M|4ziy7j={_y+(zojlCoLE=>LsAbx6WK%%g0l5T|e~HHJ#}J60K?
z!=!5&mPxfxStWM@W6`^3kM2zj0pv_DdIm35Wxg@!qsIpuDgxjb>Eed~o>+Mwh9p4v
zhe%ccOHF{|Xmy4WTfC1O1n4Ofz)k;M+P|deLTsjD|AB*llGhRa6)!C%)|;axwoRZU
zpoKAVEU|%RNa?gt)dbwW>9Qy}n&Fa*rx9l)$Kk{=a-!9f{s*i>d{>jbuO4&P`&
zl~FgmM;}@K@B_>4Te8>_OhnzgRYVF$L)`0Zyrrws)3oGNA#lV-Z&nou2;O3VCHDXj
z8~|7*D24$?6K1Mmc;Us!aYN9a3?<gY9y<Y2*LsmBSDwJBzEeJtHMhkvO3c7ZRVs5o
z^$G-N2U}9}V9B<=fQE(uut2peLuxBfq++ti6sCeVA+*J-Z#(8u2<dh}`}8v}dBXoG
zsq+_wP`nQcI`$mXv${rP-eva}1(b{{9+ZR1>$3U*quCF55j+;4!5^5}MSmu?Sf)$s
zcep^;2o>!(bZapTZN6INIed)Lx+$RjWCf<M{XTB3))MT11lX&vqP2x1&CqvL*QeFy
z5k;PjeraGi%u!k#l*=21FnrF*3rrG&)v$)!p+#ZDgKBM=(|zSi(P39ARc6muh3;ul
zo;B^;pc;w2n=<&D!)ohc;8b2a?C>O}G7WpT<kv@SBaMDHzI-H4=jr@(3lPF7p{aBV
zEulP`AsZ*yfe3c1+WuJa2rMAX$H*zxxUU!<Hf6=$Z<zbB^e|NV&BomtOVG~nd;`1&
z8XfP|6f>)$WOm1|q$`f_<lWM%*R;Bpf^%esh))cxOwts6I?4`M7L=Ruqr5I@W?`Qn
z@b{-xHPq2>m<#v%DjSENhi;%72|h93wa^jvQ!_)yMe_<G^CxzO9GHwq{YRcNeCXhT
z{rmRn*#nC|yPV$n)RS8iqfy8AxmIqq6FksTEl71Ji7}Q9fa*}CDx3gN4XWEvD?qph
zXbc=gg<2e9WGQF?*lQLre9kEVZLmP79ppiKjHwKmYKYPvxP>7QQ&J$oL$Ob$1E~!o
zRO?YnMHfw;0#k|PA6&SoaZnk$0Y{UpT>Oy|tx8QQ^=PRMWmaU@KXh8zQstohPZc&C
zz2n&J$M@SbvdWAq^Q-jR#Qt@tW~G9Cx2UnJ=0&v|)*-c5*CT(sHn3W*YB;0up-q2j
z&hph;&1t=&^~jTpwK=0*>Gs=Bt&{&l$Fn*&I(^X@_nvuwx24^$>$&ah2EFI?y{P}?
z1A7ggGqm5P2ZsMTV)dvR=f66p(byf^fBa$Fw_7%UL!>pUiL9LfmN_(}9&aa%FTiAk
zP2kyiwMMRm2pbU)g@c_`tB&#N_(uzAhMi2U>&9;qVjiH09^lTlR5doYE)XCyD-C_@
zl$_{`Etq);0nvb=02s%cl#JlT&R5lug}YXy;BJ1bZ;=sF03=N#Ql__NXAus8t3W%i
z(3-t_*vvV^s!yTKDIltJxn*W4c-)M+mxd=HnHZCXIcREvOB^F388cG#3W?bXBzA+*
z{Rhy&bfLi%Jx8XKzA9!^oof;ye}FI|kFbEc+-k)!S4l8tGY`r9(Z}mQ+2F0)B(~DT
ztm&`6(LX67yHwsbZju*b>)n3a{9B700wUlT$V`b8#UeDT<8}`~yHiPTskygG<(rE*
zczg!cvN9RM#K`Xms7^3wSCR?M+cVj3BleIuMF(&uaLqoR$&3e8#x4!BXo%1Bd2=$T
za<Q!Jo!L9v(ZBx`Bk3srww4Uu>h02R<@nWJcbH#|qU*s&0NZ3K@i&WB^$cIt<z?Jn
zn6Uo=O;`OtWL=zvP-SA&=(FiY{pswtE_t0h3Ko}s1*=x5X2qpgxj}FGWwiXV48J_2
zl5SR=dGcna9b$6ry4_;G^k%u@GIe#l80^I{rUY7vwR~+4c>Nn$-0P}y%~Fy@4*SM~
s>&5iT(z2VXxy7>HDOIy#{kcTNT(xA@x}8TAr)&L-z?S^%5gYgX52PAb82|tP
literal 0
HcmV?d00001
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/roaring/mod.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/roaring/mod.rs
new file mode 100644
index 0000000000..3f558fffe8
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/roaring/mod.rs
@@ -0,0 +1,21 @@
+use roaring::RoaringBitmap;
+
+pub fn xid_start_bitmap() -> RoaringBitmap {
+ let mut bitmap = RoaringBitmap::new();
+ for ch in '\0'..=char::MAX {
+ if unicode_ident::is_xid_start(ch) {
+ bitmap.insert(ch as u32);
+ }
+ }
+ bitmap
+}
+
+pub fn xid_continue_bitmap() -> RoaringBitmap {
+ let mut bitmap = RoaringBitmap::new();
+ for ch in '\0'..=char::MAX {
+ if unicode_ident::is_xid_continue(ch) {
+ bitmap.insert(ch as u32);
+ }
+ }
+ bitmap
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/static_size.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/static_size.rs
new file mode 100644
index 0000000000..2df35373c9
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/static_size.rs
@@ -0,0 +1,95 @@
+#![allow(clippy::let_underscore_untyped, clippy::unreadable_literal)]
+
+use std::mem::size_of_val;
+
+#[test]
+fn test_size() {
+ #[allow(dead_code)]
+ #[path = "../src/tables.rs"]
+ mod tables;
+
+ let size = size_of_val(&tables::ASCII_START)
+ + size_of_val(&tables::ASCII_CONTINUE)
+ + size_of_val(&tables::TRIE_START)
+ + size_of_val(&tables::TRIE_CONTINUE)
+ + size_of_val(&tables::LEAF);
+ assert_eq!(10080, size);
+}
+
+#[test]
+fn test_xid_size() {
+ #[deny(dead_code)]
+ #[path = "tables/mod.rs"]
+ mod tables;
+
+ let size = size_of_val(tables::XID_START) +
size_of_val(tables::XID_CONTINUE);
+ assert_eq!(11544, size);
+
+ let _ = tables::BY_NAME;
+}
+
+#[cfg(target_pointer_width = "64")]
+#[test]
+fn test_trieset_size() {
+ #[deny(dead_code)]
+ #[allow(clippy::redundant_static_lifetimes)]
+ #[path = "trie/trie.rs"]
+ mod trie;
+
+ let ucd_trie::TrieSet {
+ tree1_level1,
+ tree2_level1,
+ tree2_level2,
+ tree3_level1,
+ tree3_level2,
+ tree3_level3,
+ } = *trie::XID_START;
+
+ let start_size = size_of_val(trie::XID_START)
+ + size_of_val(tree1_level1)
+ + size_of_val(tree2_level1)
+ + size_of_val(tree2_level2)
+ + size_of_val(tree3_level1)
+ + size_of_val(tree3_level2)
+ + size_of_val(tree3_level3);
+
+ let ucd_trie::TrieSet {
+ tree1_level1,
+ tree2_level1,
+ tree2_level2,
+ tree3_level1,
+ tree3_level2,
+ tree3_level3,
+ } = *trie::XID_CONTINUE;
+
+ let continue_size = size_of_val(trie::XID_CONTINUE)
+ + size_of_val(tree1_level1)
+ + size_of_val(tree2_level1)
+ + size_of_val(tree2_level2)
+ + size_of_val(tree3_level1)
+ + size_of_val(tree3_level2)
+ + size_of_val(tree3_level3);
+
+ assert_eq!(10200, start_size + continue_size);
+
+ let _ = trie::BY_NAME;
+}
+
+#[test]
+fn test_fst_size() {
+ let xid_start_fst = include_bytes!("fst/xid_start.fst");
+ let xid_continue_fst = include_bytes!("fst/xid_continue.fst");
+ let size = xid_start_fst.len() + xid_continue_fst.len();
+ assert_eq!(138736, size);
+}
+
+#[test]
+fn test_roaring_size() {
+ #[path = "roaring/mod.rs"]
+ mod roaring;
+
+ let xid_start_bitmap = roaring::xid_start_bitmap();
+ let xid_continue_bitmap = roaring::xid_continue_bitmap();
+ let size = xid_start_bitmap.serialized_size() +
xid_continue_bitmap.serialized_size();
+ assert_eq!(66104, size);
+}
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/mod.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/mod.rs
new file mode 100644
index 0000000000..72bfd8bd7b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/mod.rs
@@ -0,0 +1,7 @@
+#![allow(clippy::module_inception)]
+
+#[allow(clippy::redundant_static_lifetimes)]
+#[rustfmt::skip]
+mod tables;
+
+pub(crate) use self::tables::*;
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/tables.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/tables.rs
new file mode 100644
index 0000000000..ba7b0612db
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/tables/tables.rs
@@ -0,0 +1,347 @@
+// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
+//
+// ucd-generate property-bool UCD --include XID_Start,XID_Continue
+//
+// Unicode version: 15.1.0.
+//
+// ucd-generate 0.3.0 is available on crates.io.
+
+pub const BY_NAME: &'static [(&'static str, &'static [(u32, u32)])] = &[
+ ("XID_Continue", XID_CONTINUE), ("XID_Start", XID_START),
+];
+
+pub const XID_CONTINUE: &'static [(u32, u32)] = &[
+ (48, 57), (65, 90), (95, 95), (97, 122), (170, 170), (181, 181), (183, 183),
+ (186, 186), (192, 214), (216, 246), (248, 705), (710, 721), (736, 740),
+ (748, 748), (750, 750), (768, 884), (886, 887), (891, 893), (895, 895),
+ (902, 906), (908, 908), (910, 929), (931, 1013), (1015, 1153), (1155, 1159),
+ (1162, 1327), (1329, 1366), (1369, 1369), (1376, 1416), (1425, 1469),
+ (1471, 1471), (1473, 1474), (1476, 1477), (1479, 1479), (1488, 1514),
+ (1519, 1522), (1552, 1562), (1568, 1641), (1646, 1747), (1749, 1756),
+ (1759, 1768), (1770, 1788), (1791, 1791), (1808, 1866), (1869, 1969),
+ (1984, 2037), (2042, 2042), (2045, 2045), (2048, 2093), (2112, 2139),
+ (2144, 2154), (2160, 2183), (2185, 2190), (2200, 2273), (2275, 2403),
+ (2406, 2415), (2417, 2435), (2437, 2444), (2447, 2448), (2451, 2472),
+ (2474, 2480), (2482, 2482), (2486, 2489), (2492, 2500), (2503, 2504),
+ (2507, 2510), (2519, 2519), (2524, 2525), (2527, 2531), (2534, 2545),
+ (2556, 2556), (2558, 2558), (2561, 2563), (2565, 2570), (2575, 2576),
+ (2579, 2600), (2602, 2608), (2610, 2611), (2613, 2614), (2616, 2617),
+ (2620, 2620), (2622, 2626), (2631, 2632), (2635, 2637), (2641, 2641),
+ (2649, 2652), (2654, 2654), (2662, 2677), (2689, 2691), (2693, 2701),
+ (2703, 2705), (2707, 2728), (2730, 2736), (2738, 2739), (2741, 2745),
+ (2748, 2757), (2759, 2761), (2763, 2765), (2768, 2768), (2784, 2787),
+ (2790, 2799), (2809, 2815), (2817, 2819), (2821, 2828), (2831, 2832),
+ (2835, 2856), (2858, 2864), (2866, 2867), (2869, 2873), (2876, 2884),
+ (2887, 2888), (2891, 2893), (2901, 2903), (2908, 2909), (2911, 2915),
+ (2918, 2927), (2929, 2929), (2946, 2947), (2949, 2954), (2958, 2960),
+ (2962, 2965), (2969, 2970), (2972, 2972), (2974, 2975), (2979, 2980),
+ (2984, 2986), (2990, 3001), (3006, 3010), (3014, 3016), (3018, 3021),
+ (3024, 3024), (3031, 3031), (3046, 3055), (3072, 3084), (3086, 3088),
+ (3090, 3112), (3114, 3129), (3132, 3140), (3142, 3144), (3146, 3149),
+ (3157, 3158), (3160, 3162), (3165, 3165), (3168, 3171), (3174, 3183),
+ (3200, 3203), (3205, 3212), (3214, 3216), (3218, 3240), (3242, 3251),
+ (3253, 3257), (3260, 3268), (3270, 3272), (3274, 3277), (3285, 3286),
+ (3293, 3294), (3296, 3299), (3302, 3311), (3313, 3315), (3328, 3340),
+ (3342, 3344), (3346, 3396), (3398, 3400), (3402, 3406), (3412, 3415),
+ (3423, 3427), (3430, 3439), (3450, 3455), (3457, 3459), (3461, 3478),
+ (3482, 3505), (3507, 3515), (3517, 3517), (3520, 3526), (3530, 3530),
+ (3535, 3540), (3542, 3542), (3544, 3551), (3558, 3567), (3570, 3571),
+ (3585, 3642), (3648, 3662), (3664, 3673), (3713, 3714), (3716, 3716),
+ (3718, 3722), (3724, 3747), (3749, 3749), (3751, 3773), (3776, 3780),
+ (3782, 3782), (3784, 3790), (3792, 3801), (3804, 3807), (3840, 3840),
+ (3864, 3865), (3872, 3881), (3893, 3893), (3895, 3895), (3897, 3897),
+ (3902, 3911), (3913, 3948), (3953, 3972), (3974, 3991), (3993, 4028),
+ (4038, 4038), (4096, 4169), (4176, 4253), (4256, 4293), (4295, 4295),
+ (4301, 4301), (4304, 4346), (4348, 4680), (4682, 4685), (4688, 4694),
+ (4696, 4696), (4698, 4701), (4704, 4744), (4746, 4749), (4752, 4784),
+ (4786, 4789), (4792, 4798), (4800, 4800), (4802, 4805), (4808, 4822),
+ (4824, 4880), (4882, 4885), (4888, 4954), (4957, 4959), (4969, 4977),
+ (4992, 5007), (5024, 5109), (5112, 5117), (5121, 5740), (5743, 5759),
+ (5761, 5786), (5792, 5866), (5870, 5880), (5888, 5909), (5919, 5940),
+ (5952, 5971), (5984, 5996), (5998, 6000), (6002, 6003), (6016, 6099),
+ (6103, 6103), (6108, 6109), (6112, 6121), (6155, 6157), (6159, 6169),
+ (6176, 6264), (6272, 6314), (6320, 6389), (6400, 6430), (6432, 6443),
+ (6448, 6459), (6470, 6509), (6512, 6516), (6528, 6571), (6576, 6601),
+ (6608, 6618), (6656, 6683), (6688, 6750), (6752, 6780), (6783, 6793),
+ (6800, 6809), (6823, 6823), (6832, 6845), (6847, 6862), (6912, 6988),
+ (6992, 7001), (7019, 7027), (7040, 7155), (7168, 7223), (7232, 7241),
+ (7245, 7293), (7296, 7304), (7312, 7354), (7357, 7359), (7376, 7378),
+ (7380, 7418), (7424, 7957), (7960, 7965), (7968, 8005), (8008, 8013),
+ (8016, 8023), (8025, 8025), (8027, 8027), (8029, 8029), (8031, 8061),
+ (8064, 8116), (8118, 8124), (8126, 8126), (8130, 8132), (8134, 8140),
+ (8144, 8147), (8150, 8155), (8160, 8172), (8178, 8180), (8182, 8188),
+ (8204, 8205), (8255, 8256), (8276, 8276), (8305, 8305), (8319, 8319),
+ (8336, 8348), (8400, 8412), (8417, 8417), (8421, 8432), (8450, 8450),
+ (8455, 8455), (8458, 8467), (8469, 8469), (8472, 8477), (8484, 8484),
+ (8486, 8486), (8488, 8488), (8490, 8505), (8508, 8511), (8517, 8521),
+ (8526, 8526), (8544, 8584), (11264, 11492), (11499, 11507), (11520, 11557),
+ (11559, 11559), (11565, 11565), (11568, 11623), (11631, 11631),
+ (11647, 11670), (11680, 11686), (11688, 11694), (11696, 11702),
+ (11704, 11710), (11712, 11718), (11720, 11726), (11728, 11734),
+ (11736, 11742), (11744, 11775), (12293, 12295), (12321, 12335),
+ (12337, 12341), (12344, 12348), (12353, 12438), (12441, 12442),
+ (12445, 12447), (12449, 12543), (12549, 12591), (12593, 12686),
+ (12704, 12735), (12784, 12799), (13312, 19903), (19968, 42124),
+ (42192, 42237), (42240, 42508), (42512, 42539), (42560, 42607),
+ (42612, 42621), (42623, 42737), (42775, 42783), (42786, 42888),
+ (42891, 42954), (42960, 42961), (42963, 42963), (42965, 42969),
+ (42994, 43047), (43052, 43052), (43072, 43123), (43136, 43205),
+ (43216, 43225), (43232, 43255), (43259, 43259), (43261, 43309),
+ (43312, 43347), (43360, 43388), (43392, 43456), (43471, 43481),
+ (43488, 43518), (43520, 43574), (43584, 43597), (43600, 43609),
+ (43616, 43638), (43642, 43714), (43739, 43741), (43744, 43759),
+ (43762, 43766), (43777, 43782), (43785, 43790), (43793, 43798),
+ (43808, 43814), (43816, 43822), (43824, 43866), (43868, 43881),
+ (43888, 44010), (44012, 44013), (44016, 44025), (44032, 55203),
+ (55216, 55238), (55243, 55291), (63744, 64109), (64112, 64217),
+ (64256, 64262), (64275, 64279), (64285, 64296), (64298, 64310),
+ (64312, 64316), (64318, 64318), (64320, 64321), (64323, 64324),
+ (64326, 64433), (64467, 64605), (64612, 64829), (64848, 64911),
+ (64914, 64967), (65008, 65017), (65024, 65039), (65056, 65071),
+ (65075, 65076), (65101, 65103), (65137, 65137), (65139, 65139),
+ (65143, 65143), (65145, 65145), (65147, 65147), (65149, 65149),
+ (65151, 65276), (65296, 65305), (65313, 65338), (65343, 65343),
+ (65345, 65370), (65381, 65470), (65474, 65479), (65482, 65487),
+ (65490, 65495), (65498, 65500), (65536, 65547), (65549, 65574),
+ (65576, 65594), (65596, 65597), (65599, 65613), (65616, 65629),
+ (65664, 65786), (65856, 65908), (66045, 66045), (66176, 66204),
+ (66208, 66256), (66272, 66272), (66304, 66335), (66349, 66378),
+ (66384, 66426), (66432, 66461), (66464, 66499), (66504, 66511),
+ (66513, 66517), (66560, 66717), (66720, 66729), (66736, 66771),
+ (66776, 66811), (66816, 66855), (66864, 66915), (66928, 66938),
+ (66940, 66954), (66956, 66962), (66964, 66965), (66967, 66977),
+ (66979, 66993), (66995, 67001), (67003, 67004), (67072, 67382),
+ (67392, 67413), (67424, 67431), (67456, 67461), (67463, 67504),
+ (67506, 67514), (67584, 67589), (67592, 67592), (67594, 67637),
+ (67639, 67640), (67644, 67644), (67647, 67669), (67680, 67702),
+ (67712, 67742), (67808, 67826), (67828, 67829), (67840, 67861),
+ (67872, 67897), (67968, 68023), (68030, 68031), (68096, 68099),
+ (68101, 68102), (68108, 68115), (68117, 68119), (68121, 68149),
+ (68152, 68154), (68159, 68159), (68192, 68220), (68224, 68252),
+ (68288, 68295), (68297, 68326), (68352, 68405), (68416, 68437),
+ (68448, 68466), (68480, 68497), (68608, 68680), (68736, 68786),
+ (68800, 68850), (68864, 68903), (68912, 68921), (69248, 69289),
+ (69291, 69292), (69296, 69297), (69373, 69404), (69415, 69415),
+ (69424, 69456), (69488, 69509), (69552, 69572), (69600, 69622),
+ (69632, 69702), (69734, 69749), (69759, 69818), (69826, 69826),
+ (69840, 69864), (69872, 69881), (69888, 69940), (69942, 69951),
+ (69956, 69959), (69968, 70003), (70006, 70006), (70016, 70084),
+ (70089, 70092), (70094, 70106), (70108, 70108), (70144, 70161),
+ (70163, 70199), (70206, 70209), (70272, 70278), (70280, 70280),
+ (70282, 70285), (70287, 70301), (70303, 70312), (70320, 70378),
+ (70384, 70393), (70400, 70403), (70405, 70412), (70415, 70416),
+ (70419, 70440), (70442, 70448), (70450, 70451), (70453, 70457),
+ (70459, 70468), (70471, 70472), (70475, 70477), (70480, 70480),
+ (70487, 70487), (70493, 70499), (70502, 70508), (70512, 70516),
+ (70656, 70730), (70736, 70745), (70750, 70753), (70784, 70853),
+ (70855, 70855), (70864, 70873), (71040, 71093), (71096, 71104),
+ (71128, 71133), (71168, 71232), (71236, 71236), (71248, 71257),
+ (71296, 71352), (71360, 71369), (71424, 71450), (71453, 71467),
+ (71472, 71481), (71488, 71494), (71680, 71738), (71840, 71913),
+ (71935, 71942), (71945, 71945), (71948, 71955), (71957, 71958),
+ (71960, 71989), (71991, 71992), (71995, 72003), (72016, 72025),
+ (72096, 72103), (72106, 72151), (72154, 72161), (72163, 72164),
+ (72192, 72254), (72263, 72263), (72272, 72345), (72349, 72349),
+ (72368, 72440), (72704, 72712), (72714, 72758), (72760, 72768),
+ (72784, 72793), (72818, 72847), (72850, 72871), (72873, 72886),
+ (72960, 72966), (72968, 72969), (72971, 73014), (73018, 73018),
+ (73020, 73021), (73023, 73031), (73040, 73049), (73056, 73061),
+ (73063, 73064), (73066, 73102), (73104, 73105), (73107, 73112),
+ (73120, 73129), (73440, 73462), (73472, 73488), (73490, 73530),
+ (73534, 73538), (73552, 73561), (73648, 73648), (73728, 74649),
+ (74752, 74862), (74880, 75075), (77712, 77808), (77824, 78895),
+ (78912, 78933), (82944, 83526), (92160, 92728), (92736, 92766),
+ (92768, 92777), (92784, 92862), (92864, 92873), (92880, 92909),
+ (92912, 92916), (92928, 92982), (92992, 92995), (93008, 93017),
+ (93027, 93047), (93053, 93071), (93760, 93823), (93952, 94026),
+ (94031, 94087), (94095, 94111), (94176, 94177), (94179, 94180),
+ (94192, 94193), (94208, 100343), (100352, 101589), (101632, 101640),
+ (110576, 110579), (110581, 110587), (110589, 110590), (110592, 110882),
+ (110898, 110898), (110928, 110930), (110933, 110933), (110948, 110951),
+ (110960, 111355), (113664, 113770), (113776, 113788), (113792, 113800),
+ (113808, 113817), (113821, 113822), (118528, 118573), (118576, 118598),
+ (119141, 119145), (119149, 119154), (119163, 119170), (119173, 119179),
+ (119210, 119213), (119362, 119364), (119808, 119892), (119894, 119964),
+ (119966, 119967), (119970, 119970), (119973, 119974), (119977, 119980),
+ (119982, 119993), (119995, 119995), (119997, 120003), (120005, 120069),
+ (120071, 120074), (120077, 120084), (120086, 120092), (120094, 120121),
+ (120123, 120126), (120128, 120132), (120134, 120134), (120138, 120144),
+ (120146, 120485), (120488, 120512), (120514, 120538), (120540, 120570),
+ (120572, 120596), (120598, 120628), (120630, 120654), (120656, 120686),
+ (120688, 120712), (120714, 120744), (120746, 120770), (120772, 120779),
+ (120782, 120831), (121344, 121398), (121403, 121452), (121461, 121461),
+ (121476, 121476), (121499, 121503), (121505, 121519), (122624, 122654),
+ (122661, 122666), (122880, 122886), (122888, 122904), (122907, 122913),
+ (122915, 122916), (122918, 122922), (122928, 122989), (123023, 123023),
+ (123136, 123180), (123184, 123197), (123200, 123209), (123214, 123214),
+ (123536, 123566), (123584, 123641), (124112, 124153), (124896, 124902),
+ (124904, 124907), (124909, 124910), (124912, 124926), (124928, 125124),
+ (125136, 125142), (125184, 125259), (125264, 125273), (126464, 126467),
+ (126469, 126495), (126497, 126498), (126500, 126500), (126503, 126503),
+ (126505, 126514), (126516, 126519), (126521, 126521), (126523, 126523),
+ (126530, 126530), (126535, 126535), (126537, 126537), (126539, 126539),
+ (126541, 126543), (126545, 126546), (126548, 126548), (126551, 126551),
+ (126553, 126553), (126555, 126555), (126557, 126557), (126559, 126559),
+ (126561, 126562), (126564, 126564), (126567, 126570), (126572, 126578),
+ (126580, 126583), (126585, 126588), (126590, 126590), (126592, 126601),
+ (126603, 126619), (126625, 126627), (126629, 126633), (126635, 126651),
+ (130032, 130041), (131072, 173791), (173824, 177977), (177984, 178205),
+ (178208, 183969), (183984, 191456), (191472, 192093), (194560, 195101),
+ (196608, 201546), (201552, 205743), (917760, 917999),
+];
+
+pub const XID_START: &'static [(u32, u32)] = &[
+ (65, 90), (97, 122), (170, 170), (181, 181), (186, 186), (192, 214),
+ (216, 246), (248, 705), (710, 721), (736, 740), (748, 748), (750, 750),
+ (880, 884), (886, 887), (891, 893), (895, 895), (902, 902), (904, 906),
+ (908, 908), (910, 929), (931, 1013), (1015, 1153), (1162, 1327),
+ (1329, 1366), (1369, 1369), (1376, 1416), (1488, 1514), (1519, 1522),
+ (1568, 1610), (1646, 1647), (1649, 1747), (1749, 1749), (1765, 1766),
+ (1774, 1775), (1786, 1788), (1791, 1791), (1808, 1808), (1810, 1839),
+ (1869, 1957), (1969, 1969), (1994, 2026), (2036, 2037), (2042, 2042),
+ (2048, 2069), (2074, 2074), (2084, 2084), (2088, 2088), (2112, 2136),
+ (2144, 2154), (2160, 2183), (2185, 2190), (2208, 2249), (2308, 2361),
+ (2365, 2365), (2384, 2384), (2392, 2401), (2417, 2432), (2437, 2444),
+ (2447, 2448), (2451, 2472), (2474, 2480), (2482, 2482), (2486, 2489),
+ (2493, 2493), (2510, 2510), (2524, 2525), (2527, 2529), (2544, 2545),
+ (2556, 2556), (2565, 2570), (2575, 2576), (2579, 2600), (2602, 2608),
+ (2610, 2611), (2613, 2614), (2616, 2617), (2649, 2652), (2654, 2654),
+ (2674, 2676), (2693, 2701), (2703, 2705), (2707, 2728), (2730, 2736),
+ (2738, 2739), (2741, 2745), (2749, 2749), (2768, 2768), (2784, 2785),
+ (2809, 2809), (2821, 2828), (2831, 2832), (2835, 2856), (2858, 2864),
+ (2866, 2867), (2869, 2873), (2877, 2877), (2908, 2909), (2911, 2913),
+ (2929, 2929), (2947, 2947), (2949, 2954), (2958, 2960), (2962, 2965),
+ (2969, 2970), (2972, 2972), (2974, 2975), (2979, 2980), (2984, 2986),
+ (2990, 3001), (3024, 3024), (3077, 3084), (3086, 3088), (3090, 3112),
+ (3114, 3129), (3133, 3133), (3160, 3162), (3165, 3165), (3168, 3169),
+ (3200, 3200), (3205, 3212), (3214, 3216), (3218, 3240), (3242, 3251),
+ (3253, 3257), (3261, 3261), (3293, 3294), (3296, 3297), (3313, 3314),
+ (3332, 3340), (3342, 3344), (3346, 3386), (3389, 3389), (3406, 3406),
+ (3412, 3414), (3423, 3425), (3450, 3455), (3461, 3478), (3482, 3505),
+ (3507, 3515), (3517, 3517), (3520, 3526), (3585, 3632), (3634, 3634),
+ (3648, 3654), (3713, 3714), (3716, 3716), (3718, 3722), (3724, 3747),
+ (3749, 3749), (3751, 3760), (3762, 3762), (3773, 3773), (3776, 3780),
+ (3782, 3782), (3804, 3807), (3840, 3840), (3904, 3911), (3913, 3948),
+ (3976, 3980), (4096, 4138), (4159, 4159), (4176, 4181), (4186, 4189),
+ (4193, 4193), (4197, 4198), (4206, 4208), (4213, 4225), (4238, 4238),
+ (4256, 4293), (4295, 4295), (4301, 4301), (4304, 4346), (4348, 4680),
+ (4682, 4685), (4688, 4694), (4696, 4696), (4698, 4701), (4704, 4744),
+ (4746, 4749), (4752, 4784), (4786, 4789), (4792, 4798), (4800, 4800),
+ (4802, 4805), (4808, 4822), (4824, 4880), (4882, 4885), (4888, 4954),
+ (4992, 5007), (5024, 5109), (5112, 5117), (5121, 5740), (5743, 5759),
+ (5761, 5786), (5792, 5866), (5870, 5880), (5888, 5905), (5919, 5937),
+ (5952, 5969), (5984, 5996), (5998, 6000), (6016, 6067), (6103, 6103),
+ (6108, 6108), (6176, 6264), (6272, 6312), (6314, 6314), (6320, 6389),
+ (6400, 6430), (6480, 6509), (6512, 6516), (6528, 6571), (6576, 6601),
+ (6656, 6678), (6688, 6740), (6823, 6823), (6917, 6963), (6981, 6988),
+ (7043, 7072), (7086, 7087), (7098, 7141), (7168, 7203), (7245, 7247),
+ (7258, 7293), (7296, 7304), (7312, 7354), (7357, 7359), (7401, 7404),
+ (7406, 7411), (7413, 7414), (7418, 7418), (7424, 7615), (7680, 7957),
+ (7960, 7965), (7968, 8005), (8008, 8013), (8016, 8023), (8025, 8025),
+ (8027, 8027), (8029, 8029), (8031, 8061), (8064, 8116), (8118, 8124),
+ (8126, 8126), (8130, 8132), (8134, 8140), (8144, 8147), (8150, 8155),
+ (8160, 8172), (8178, 8180), (8182, 8188), (8305, 8305), (8319, 8319),
+ (8336, 8348), (8450, 8450), (8455, 8455), (8458, 8467), (8469, 8469),
+ (8472, 8477), (8484, 8484), (8486, 8486), (8488, 8488), (8490, 8505),
+ (8508, 8511), (8517, 8521), (8526, 8526), (8544, 8584), (11264, 11492),
+ (11499, 11502), (11506, 11507), (11520, 11557), (11559, 11559),
+ (11565, 11565), (11568, 11623), (11631, 11631), (11648, 11670),
+ (11680, 11686), (11688, 11694), (11696, 11702), (11704, 11710),
+ (11712, 11718), (11720, 11726), (11728, 11734), (11736, 11742),
+ (12293, 12295), (12321, 12329), (12337, 12341), (12344, 12348),
+ (12353, 12438), (12445, 12447), (12449, 12538), (12540, 12543),
+ (12549, 12591), (12593, 12686), (12704, 12735), (12784, 12799),
+ (13312, 19903), (19968, 42124), (42192, 42237), (42240, 42508),
+ (42512, 42527), (42538, 42539), (42560, 42606), (42623, 42653),
+ (42656, 42735), (42775, 42783), (42786, 42888), (42891, 42954),
+ (42960, 42961), (42963, 42963), (42965, 42969), (42994, 43009),
+ (43011, 43013), (43015, 43018), (43020, 43042), (43072, 43123),
+ (43138, 43187), (43250, 43255), (43259, 43259), (43261, 43262),
+ (43274, 43301), (43312, 43334), (43360, 43388), (43396, 43442),
+ (43471, 43471), (43488, 43492), (43494, 43503), (43514, 43518),
+ (43520, 43560), (43584, 43586), (43588, 43595), (43616, 43638),
+ (43642, 43642), (43646, 43695), (43697, 43697), (43701, 43702),
+ (43705, 43709), (43712, 43712), (43714, 43714), (43739, 43741),
+ (43744, 43754), (43762, 43764), (43777, 43782), (43785, 43790),
+ (43793, 43798), (43808, 43814), (43816, 43822), (43824, 43866),
+ (43868, 43881), (43888, 44002), (44032, 55203), (55216, 55238),
+ (55243, 55291), (63744, 64109), (64112, 64217), (64256, 64262),
+ (64275, 64279), (64285, 64285), (64287, 64296), (64298, 64310),
+ (64312, 64316), (64318, 64318), (64320, 64321), (64323, 64324),
+ (64326, 64433), (64467, 64605), (64612, 64829), (64848, 64911),
+ (64914, 64967), (65008, 65017), (65137, 65137), (65139, 65139),
+ (65143, 65143), (65145, 65145), (65147, 65147), (65149, 65149),
+ (65151, 65276), (65313, 65338), (65345, 65370), (65382, 65437),
+ (65440, 65470), (65474, 65479), (65482, 65487), (65490, 65495),
+ (65498, 65500), (65536, 65547), (65549, 65574), (65576, 65594),
+ (65596, 65597), (65599, 65613), (65616, 65629), (65664, 65786),
+ (65856, 65908), (66176, 66204), (66208, 66256), (66304, 66335),
+ (66349, 66378), (66384, 66421), (66432, 66461), (66464, 66499),
+ (66504, 66511), (66513, 66517), (66560, 66717), (66736, 66771),
+ (66776, 66811), (66816, 66855), (66864, 66915), (66928, 66938),
+ (66940, 66954), (66956, 66962), (66964, 66965), (66967, 66977),
+ (66979, 66993), (66995, 67001), (67003, 67004), (67072, 67382),
+ (67392, 67413), (67424, 67431), (67456, 67461), (67463, 67504),
+ (67506, 67514), (67584, 67589), (67592, 67592), (67594, 67637),
+ (67639, 67640), (67644, 67644), (67647, 67669), (67680, 67702),
+ (67712, 67742), (67808, 67826), (67828, 67829), (67840, 67861),
+ (67872, 67897), (67968, 68023), (68030, 68031), (68096, 68096),
+ (68112, 68115), (68117, 68119), (68121, 68149), (68192, 68220),
+ (68224, 68252), (68288, 68295), (68297, 68324), (68352, 68405),
+ (68416, 68437), (68448, 68466), (68480, 68497), (68608, 68680),
+ (68736, 68786), (68800, 68850), (68864, 68899), (69248, 69289),
+ (69296, 69297), (69376, 69404), (69415, 69415), (69424, 69445),
+ (69488, 69505), (69552, 69572), (69600, 69622), (69635, 69687),
+ (69745, 69746), (69749, 69749), (69763, 69807), (69840, 69864),
+ (69891, 69926), (69956, 69956), (69959, 69959), (69968, 70002),
+ (70006, 70006), (70019, 70066), (70081, 70084), (70106, 70106),
+ (70108, 70108), (70144, 70161), (70163, 70187), (70207, 70208),
+ (70272, 70278), (70280, 70280), (70282, 70285), (70287, 70301),
+ (70303, 70312), (70320, 70366), (70405, 70412), (70415, 70416),
+ (70419, 70440), (70442, 70448), (70450, 70451), (70453, 70457),
+ (70461, 70461), (70480, 70480), (70493, 70497), (70656, 70708),
+ (70727, 70730), (70751, 70753), (70784, 70831), (70852, 70853),
+ (70855, 70855), (71040, 71086), (71128, 71131), (71168, 71215),
+ (71236, 71236), (71296, 71338), (71352, 71352), (71424, 71450),
+ (71488, 71494), (71680, 71723), (71840, 71903), (71935, 71942),
+ (71945, 71945), (71948, 71955), (71957, 71958), (71960, 71983),
+ (71999, 71999), (72001, 72001), (72096, 72103), (72106, 72144),
+ (72161, 72161), (72163, 72163), (72192, 72192), (72203, 72242),
+ (72250, 72250), (72272, 72272), (72284, 72329), (72349, 72349),
+ (72368, 72440), (72704, 72712), (72714, 72750), (72768, 72768),
+ (72818, 72847), (72960, 72966), (72968, 72969), (72971, 73008),
+ (73030, 73030), (73056, 73061), (73063, 73064), (73066, 73097),
+ (73112, 73112), (73440, 73458), (73474, 73474), (73476, 73488),
+ (73490, 73523), (73648, 73648), (73728, 74649), (74752, 74862),
+ (74880, 75075), (77712, 77808), (77824, 78895), (78913, 78918),
+ (82944, 83526), (92160, 92728), (92736, 92766), (92784, 92862),
+ (92880, 92909), (92928, 92975), (92992, 92995), (93027, 93047),
+ (93053, 93071), (93760, 93823), (93952, 94026), (94032, 94032),
+ (94099, 94111), (94176, 94177), (94179, 94179), (94208, 100343),
+ (100352, 101589), (101632, 101640), (110576, 110579), (110581, 110587),
+ (110589, 110590), (110592, 110882), (110898, 110898), (110928, 110930),
+ (110933, 110933), (110948, 110951), (110960, 111355), (113664, 113770),
+ (113776, 113788), (113792, 113800), (113808, 113817), (119808, 119892),
+ (119894, 119964), (119966, 119967), (119970, 119970), (119973, 119974),
+ (119977, 119980), (119982, 119993), (119995, 119995), (119997, 120003),
+ (120005, 120069), (120071, 120074), (120077, 120084), (120086, 120092),
+ (120094, 120121), (120123, 120126), (120128, 120132), (120134, 120134),
+ (120138, 120144), (120146, 120485), (120488, 120512), (120514, 120538),
+ (120540, 120570), (120572, 120596), (120598, 120628), (120630, 120654),
+ (120656, 120686), (120688, 120712), (120714, 120744), (120746, 120770),
+ (120772, 120779), (122624, 122654), (122661, 122666), (122928, 122989),
+ (123136, 123180), (123191, 123197), (123214, 123214), (123536, 123565),
+ (123584, 123627), (124112, 124139), (124896, 124902), (124904, 124907),
+ (124909, 124910), (124912, 124926), (124928, 125124), (125184, 125251),
+ (125259, 125259), (126464, 126467), (126469, 126495), (126497, 126498),
+ (126500, 126500), (126503, 126503), (126505, 126514), (126516, 126519),
+ (126521, 126521), (126523, 126523), (126530, 126530), (126535, 126535),
+ (126537, 126537), (126539, 126539), (126541, 126543), (126545, 126546),
+ (126548, 126548), (126551, 126551), (126553, 126553), (126555, 126555),
+ (126557, 126557), (126559, 126559), (126561, 126562), (126564, 126564),
+ (126567, 126570), (126572, 126578), (126580, 126583), (126585, 126588),
+ (126590, 126590), (126592, 126601), (126603, 126619), (126625, 126627),
+ (126629, 126633), (126635, 126651), (131072, 173791), (173824, 177977),
+ (177984, 178205), (178208, 183969), (183984, 191456), (191472, 192093),
+ (194560, 195101), (196608, 201546), (201552, 205743),
+];
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/mod.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/mod.rs
new file mode 100644
index 0000000000..3e31c5cc57
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/mod.rs
@@ -0,0 +1,7 @@
+#![allow(clippy::module_inception)]
+
+#[allow(dead_code, clippy::redundant_static_lifetimes,
clippy::unreadable_literal)]
+#[rustfmt::skip]
+mod trie;
+
+pub(crate) use self::trie::*;
diff --git a/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/trie.rs
b/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/trie.rs
new file mode 100644
index 0000000000..fc805f497f
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/unicode-ident/tests/trie/trie.rs
@@ -0,0 +1,445 @@
+// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
+//
+// ucd-generate property-bool UCD --include XID_Start,XID_Continue --trie-set
+//
+// Unicode version: 15.1.0.
+//
+// ucd-generate 0.3.0 is available on crates.io.
+
+pub const BY_NAME: &'static [(&'static str, &'static ::ucd_trie::TrieSet)] = &[
+ ("XID_Continue", XID_CONTINUE), ("XID_Start", XID_START),
+];
+
+pub const XID_CONTINUE: &'static ::ucd_trie::TrieSet = &::ucd_trie::TrieSet {
+ tree1_level1: &[
+ 0x3FF000000000000, 0x7FFFFFE87FFFFFE, 0x4A0040000000000,
+ 0xFF7FFFFFFF7FFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0x501F0003FFC3,
+ 0xFFFFFFFFFFFFFFFF, 0xB8DFFFFFFFFFFFFF, 0xFFFFFFFBFFFFD7C0,
+ 0xFFBFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFCFB, 0xFFFFFFFFFFFFFFFF, 0xFFFEFFFFFFFFFFFF,
+ 0xFFFFFFFF027FFFFF, 0xBFFFFFFFFFFE01FF, 0x787FFFFFF00B6,
+ 0xFFFFFFFF07FF0000, 0xFFFFC3FFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0x9FFFFDFF9FEFFFFF, 0xFFFFFFFFFFFF0000, 0xFFFFFFFFFFFFE7FF,
+ 0x3FFFFFFFFFFFF, 0x243FFFFFFFFFFFFF,
+ ],
+ tree2_level1: &[
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
+ 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 4, 32, 33, 34, 4, 4, 4, 4, 4,
+ 35, 36, 37, 38, 39, 40, 41, 42, 4, 4, 4, 4, 4, 4, 4, 4, 43, 44, 45, 46,
+ 47, 4, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 4, 61, 4, 62,
+ 63, 64, 65, 66, 4, 4, 4, 4, 4, 4, 4, 4, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 4, 4, 4, 79, 80, 81, 82, 83, 78, 78, 78,
+ 78, 78, 78, 78, 78, 84, 42, 85, 4, 86, 4, 87, 88, 78, 78, 78, 78, 78, 78,
+ 78, 78, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 78, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 89, 90, 4, 4, 4, 4, 91, 92, 4, 93, 94, 4, 95, 96, 97, 62, 4,
+ 98, 99, 100, 4, 101, 102, 103, 4, 104, 105, 106, 4, 107, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 108, 109, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 78, 4, 4, 4, 4, 4, 99, 4, 110, 111, 112, 93, 113, 4, 114, 4,
+ 4, 115, 116, 117, 118, 119, 120, 4, 121, 122, 123, 124, 125,
+ ],
+ tree2_level2: &[
+ 0x3FFFFFFFFFFF, 0xFFFF07FF0FFFFFFF, 0xFFFFFFFFFF007EFF,
+ 0xFFFFFFFBFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFEFFCFFFFFFFFF,
+ 0xF3C5FDFFFFF99FEF, 0x5003FFCFB080799F, 0xD36DFDFFFFF987EE,
+ 0x3FFFC05E023987, 0xF3EDFDFFFFFBBFEE, 0xFE00FFCF00013BBF,
+ 0xF3EDFDFFFFF99FEE, 0x2FFCFB0E0399F, 0xC3FFC718D63DC7EC, 0xFFC000813DC7,
+ 0xF3FFFDFFFFFDDFFF, 0xFFCF27603DDF, 0xF3EFFDFFFFFDDFEF, 0xEFFCF60603DDF,
+ 0xFFFFFFFFFFFDDFFF, 0xFC00FFCF80F07DDF, 0x2FFBFFFFFC7FFFEE,
+ 0xCFFC0FF5F847F, 0x7FFFFFFFFFFFFFE, 0x3FF7FFF, 0x3FFFFFAFFFFFF7D6,
+ 0xF3FF7F5F, 0xC2A003FF03000001, 0xFFFE1FFFFFFFFEFF, 0x1FFFFFFFFEFFFFDF,
+ 0x40, 0xFFFFFFFFFFFF03FF, 0xFFFFFFFF3FFFFFFF, 0xF7FFFFFFFFFF20BF,
+ 0xFFFFFFFF3D7F3DFF, 0x7F3DFFFFFFFF3DFF, 0xFFFFFFFFFF7FFF3D,
+ 0xFFFFFFFFFF3DFFFF, 0x3FE00E7FFFFFF, 0xFFFFFFFF0000FFFF,
+ 0x3F3FFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFE, 0xFFFF9FFFFFFFFFFF,
+ 0xFFFFFFFF07FFFFFE, 0x1FFC7FFFFFFFFFF, 0x1FFFFF803FFFFF, 0xDDFFF000FFFFF,
+ 0x3FF308FFFFF, 0xFFFFFFFF03FFB800, 0x1FFFFFFFFFFFFFF, 0xFFFF07FFFFFFFFFF,
+ 0x3FFFFFFFFFFFFF, 0xFFF0FFF7FFFFFFF, 0x1F3FFFFFFFFFC0, 0xFFFF0FFFFFFFFFFF,
+ 0x7FF03FF, 0xFFFFFFFF0FFFFFFF, 0x9FFFFFFF7FFFFFFF, 0xBFFF008003FF03FF,
+ 0x7FFF, 0xFF80003FF1FFF, 0xFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFF,
+ 0x3FFFFFFFFFFFE3FF, 0xE7FFFFFFFFFF01FF, 0x7FFFFFFFFF70000,
+ 0xFFFFFFFF3F3FFFFF, 0x3FFFFFFFAAFF3F3F, 0x5FDFFFFFFFFFFFFF,
+ 0x1FDC1FFF0FCF1FDC, 0x8000000000003000, 0x8002000000100001, 0x1FFF0000,
+ 0x1FFE21FFF0000, 0xF3FFFD503F2FFC84, 0xFFFFFFFF000043E0, 0x1FF, 0,
+ 0xFF81FFFFFFFFF, 0xFFFF20BFFFFFFFFF, 0x800080FFFFFFFFFF,
+ 0x7F7F7F7F007FFFFF, 0xFFFFFFFF7F7F7F7F, 0x1F3EFFFE000000E0,
+ 0xFFFFFFFEE67FFFFF, 0xFFFEFFFFFFFFFFE0, 0xFFFFFFFF00007FFF,
+ 0xFFFF000000000000, 0x1FFF, 0x3FFFFFFFFFFF0000, 0xFFFFFFF1FFF,
+ 0xBFF0FFFFFFFFFFFF, 0x3FFFFFFFFFFFF, 0xFFFFFFFCFF800000,
+ 0xFFFFFFFFFFFFF9FF, 0xFFFC000003EB07FF, 0x10FFFFFFFFFF,
+ 0xE8FFFFFF03FF003F, 0xFFFF3FFFFFFFFFFF, 0x1FFFFFFF000FFFFF,
+ 0x7FFFFFFF03FF8001, 0x7FFFFFFFFFFFFF, 0xFC7FFFFF03FF3FFF,
+ 0x7CFFFF38000007, 0xFFFF7F7F007E7E7E, 0xFFFF03FFF7FFFFFF,
+ 0x3FF37FFFFFFFFFF, 0xFFFF000FFFFFFFFF, 0xFFFFFFFFFFFF87F, 0x3FFFFFF,
+ 0x5F7FFDFFE0F8007F, 0xFFFFFFFFFFFFFFDB, 0xFFFFFFFFFFF80000,
+ 0xFFFFFFF03FFFFFFF, 0x3FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFF0000,
+ 0xFFFFFFFFFFFCFFFF, 0x3FF0000000000FF, 0x18FFFF0000FFFF,
+ 0xAA8A00000000E000, 0x1FFFFFFFFFFFFFFF, 0x87FFFFFE03FF0000,
+ 0xFFFFFFE007FFFFFE, 0x7FFFFFFFFFFFFFFF, 0x1CFCFCFC,
+ ],
+ tree3_level1: &[
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 9, 10, 11, 12, 13, 14, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 15, 16, 17, 7, 18, 19, 7, 20, 21, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 22, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ ],
+ tree3_level2: &[
+ 0, 1, 2, 3, 4, 5, 4, 6, 4, 4, 7, 8, 9, 10, 11, 12, 2, 2, 13, 14, 15, 16,
+ 17, 4, 2, 2, 2, 2, 18, 19, 20, 4, 21, 22, 23, 24, 25, 4, 26, 4, 27, 28,
+ 29, 30, 31, 32, 33, 4, 2, 34, 35, 35, 36, 4, 4, 4, 4, 4, 37, 38, 39, 40,
+ 41, 42, 2, 43, 3, 44, 45, 46, 2, 47, 48, 49, 50, 51, 52, 53, 4, 4, 2, 54,
+ 2, 55, 4, 4, 56, 57, 2, 58, 59, 60, 61, 62, 4, 4, 3, 4, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 59, 4, 4, 4, 4, 72, 73, 74, 4, 75, 76, 77, 4, 4, 4, 4,
+ 78, 79, 80, 81, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 82, 4, 2, 83,
+ 2, 2, 2, 84, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 85, 86, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 87, 88, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 62, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2,
+ 2, 2, 2, 2, 2, 2, 2, 59, 89, 69, 90, 18, 91, 92, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 2, 4, 4, 2, 93, 94, 95, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 96, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 88, 34, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 97, 2, 2, 2, 2, 98, 99, 2, 2, 2, 2, 2, 100, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 2, 101, 102, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 103, 62, 4, 4, 4, 4, 4, 4, 4, 104, 105, 4, 4, 106, 4,
+ 4, 4, 4, 4, 4, 2, 107, 108, 109, 110, 111, 2, 2, 2, 2, 112, 113, 114, 115,
+ 116, 117, 4, 4, 4, 4, 4, 4, 4, 4, 118, 119, 120, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 121, 4, 4, 4, 122, 123, 124, 4, 125, 126, 4, 4,
+ 4, 4, 127, 128, 4, 4, 4, 4, 4, 4, 4, 129, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 130, 2, 2, 2, 131, 2, 132, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 133, 134, 135, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 136, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 137, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 128, 2, 2, 2,
+ 11, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 138, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 139, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 140, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2,
+ 2, 2, 2, 2, 140, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 141, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 87, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 87, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ ],
+ tree3_level3: &[
+ 0xB7FFFF7FFFFFEFFF, 0x3FFF3FFF, 0xFFFFFFFFFFFFFFFF, 0x7FFFFFFFFFFFFFF, 0,
+ 0x1FFFFFFFFFFFFF, 0x2000000000000000, 0xFFFFFFFF1FFFFFFF, 0x10001FFFF,
+ 0xFFFFE000FFFFFFFF, 0x7FFFFFFFFFF07FF, 0xFFFFFFFF3FFFFFFF, 0x3EFF0F,
+ 0xFFFF03FF3FFFFFFF, 0xFFFFFFFFF0FFFFF, 0xFFFF00FFFFFFFFFF,
+ 0xF7FF000FFFFFFFFF, 0x1BFBFFFBFFB7F7FF, 0x7FFFFFFFFFFFFF, 0xFF003FFFFF,
+ 0x7FDFFFFFFFFFFBF, 0x91BFFFFFFFFFFD3F, 0x7FFFFF003FFFFF, 0x7FFFFFFF,
+ 0x37FFFF00000000, 0x3FFFFFF003FFFFF, 0xC0FFFFFFFFFFFFFF,
+ 0x873FFFFFFEEFF06F, 0x1FFFFFFF00000000, 0x1FFFFFFF, 0x7FFFFFFEFF,
+ 0x3FFFFFFFFFFFFF, 0x7FFFF003FFFFF, 0x3FFFF, 0x1FF, 0x7FFFFFFFFFFFF,
+ 0x3FF00FFFFFFFFFF, 0x31BFFFFFFFFFF, 0xE000000000000000,
+ 0xFFFF00801FFFFFFF, 0xFFFF00000001FFFF, 0xFFFF00000000003F,
+ 0x7FFFFF0000001F, 0x803FFFC00000007F, 0x3FF01FFFFFF0004,
+ 0xFFDFFFFFFFFFFFFF, 0x4FFFFFFFFF00F0, 0x17FFDE1F, 0xC0FFFFFFFFFBFFFF, 0x3,
+ 0xFFFF01FFBFFFBD7F, 0x3FF07FFFFFFFFFF, 0xFBEDFDFFFFF99FEF,
+ 0x1F1FCFE081399F, 0x3C3FF07FF, 0x3FF00BF, 0xFF3FFFFFFFFFFFFF, 0x3F000001,
+ 0x3FF0011, 0x1FFFFFFFFFFFFFF, 0x3FF, 0x3FF0FFFE7FFFFFF, 0x7F,
+ 0xFFFFFFFF00000000, 0x800003FFFFFFFFFF, 0xF9BFFFFFFF6FF27F, 0x3FF000F,
+ 0xFFFFFCFF00000000, 0x1BFCFFFFFF, 0x7FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFF0080,
+ 0xFFFF000023FFFFFF, 0xFF7FFFFFFFFFFDFF, 0xFFFC000003FF0001,
+ 0x7FFEFFFFFCFFFF, 0xB47FFFFFFFFFFB7F, 0xFFFFFDBF03FF00FF, 0x3FF01FB7FFF,
+ 0x7FFFFF00000000, 0xC7FFFFFFFFFDFFFF, 0x3FF0007, 0x1000000000000,
+ 0x3FFFFFF, 0x7FFFFFFFFFFF, 0xF, 0xFFFFFFFFFFFF0000, 0x1FFFFFFFFFFFF,
+ 0xFFFFFFFFFFFF, 0x3FFFFF, 0xFFFF03FF7FFFFFFF, 0x1F3FFFFFFF03FF,
+ 0xE0FFFFF803FF000F, 0xFFFF, 0xFFFFFFFFFFFF87FF, 0xFFFF80FF,
+ 0x3001B00000000, 0xFFFFFFFFFFFFFF, 0x6FEF000000000000, 0x40007FFFFFFFF,
+ 0xFFFF00F000270000, 0xFFFFFFFFFFFFFFF, 0x1FFF07FFFFFFFFFF, 0x63FF01FF,
+ 0xFFFF3FFFFFFFFFFF, 0xF807E3E000000000, 0x3C0000000FE7, 0x1C,
+ 0xFFFFFFFFFFDFFFFF, 0xEBFFDE64DFFFFFFF, 0xFFFFFFFFFFFFFFEF,
+ 0x7BFFFFFFDFDFE7BF, 0xFFFFFFFFFFFDFC5F, 0xFFFFFF3FFFFFFFFF,
+ 0xF7FFFFFFF7FFFFFD, 0xFFDFFFFFFFDFFFFF, 0xFFFF7FFFFFFF7FFF,
+ 0xFFFFFDFFFFFFFDFF, 0xFFFFFFFFFFFFCFF7, 0xF87FFFFFFFFFFFFF,
+ 0x201FFFFFFFFFFF, 0xFFFEF8000010, 0x7E07FFFFFFF, 0xFFFF07DBF9FFFF7F,
+ 0x3FFFFFFFFFFF, 0x8000, 0x3FFF1FFFFFFFFFFF, 0x43FF, 0x7FFFFFFF0000,
+ 0x3FFFFFFFFFFFFFF, 0x3FFFFFFFFFF0000, 0x7FFF6F7F00000000, 0x7F001F,
+ 0x3FF0FFF, 0xAF7FE96FFFFFFEF, 0x5EF7F796AA96EA84, 0xFFFFBEE0FFFFBFF,
+ 0x3FF000000000000, 0xFFFFFFFF, 0xFFFF0003FFFFFFFF, 0xFFFF0001FFFFFFFF,
+ 0x3FFFFFFF, 0xFFFFFFFFFFFF07FF,
+ ],
+};
+
+pub const XID_START: &'static ::ucd_trie::TrieSet = &::ucd_trie::TrieSet {
+ tree1_level1: &[
+ 0, 0x7FFFFFE07FFFFFE, 0x420040000000000, 0xFF7FFFFFFF7FFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0x501F0003FFC3, 0, 0xB8DF000000000000,
+ 0xFFFFFFFBFFFFD740, 0xFFBFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFC03, 0xFFFFFFFFFFFFFFFF,
+ 0xFFFEFFFFFFFFFFFF, 0xFFFFFFFF027FFFFF, 0x1FF, 0x787FFFFFF0000,
+ 0xFFFFFFFF00000000, 0xFFFEC000000007FF, 0xFFFFFFFFFFFFFFFF,
+ 0x9C00C060002FFFFF, 0xFFFFFFFD0000, 0xFFFFFFFFFFFFE000, 0x2003FFFFFFFFF,
+ 0x43007FFFFFFFC00,
+ ],
+ tree2_level1: &[
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
+ 21, 22, 23, 24, 23, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 35, 35,
+ 35, 35, 36, 37, 38, 39, 40, 41, 42, 43, 35, 35, 35, 35, 35, 35, 35, 35,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 3, 58, 59, 60, 30,
+ 61, 62, 63, 64, 65, 66, 67, 68, 35, 35, 35, 30, 35, 35, 35, 35, 69, 70,
+ 71, 72, 30, 73, 74, 30, 75, 76, 77, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 35, 35, 35, 78,
+ 79, 80, 81, 82, 30, 30, 30, 30, 30, 30, 30, 30, 83, 43, 84, 85, 86, 35,
+ 87, 88, 30, 30, 30, 30, 30, 30, 30, 30, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 30, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 89, 90, 35, 35, 35, 35, 91, 92,
+ 93, 94, 95, 35, 96, 97, 98, 49, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 35, 111, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 112, 113, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 35, 35, 35, 35, 35, 114, 35, 115, 116,
+ 117, 118, 119, 35, 120, 35, 35, 121, 122, 123, 124, 30, 125, 35, 126, 127,
+ 128, 129, 130,
+ ],
+ tree2_level2: &[
+ 0x110043FFFFF, 0xFFFF07FF01FFFFFF, 0xFFFFFFFF00007EFF, 0x3FF,
+ 0x23FFFFFFFFFFFFF0, 0xFFFE0003FF010000, 0x23C5FDFFFFF99FE1,
+ 0x10030003B0004000, 0x36DFDFFFFF987E0, 0x1C00005E000000,
+ 0x23EDFDFFFFFBBFE0, 0x200000300010000, 0x23EDFDFFFFF99FE0,
+ 0x20003B0000000, 0x3FFC718D63DC7E8, 0x10000, 0x23FFFDFFFFFDDFE0,
+ 0x327000000, 0x23EFFDFFFFFDDFE1, 0x6000360000000, 0x27FFFFFFFFFDDFF0,
+ 0xFC00000380704000, 0x2FFBFFFFFC7FFFE0, 0x7F, 0x5FFFFFFFFFFFE,
+ 0x2005FFAFFFFFF7D6, 0xF000005F, 0x1, 0x1FFFFFFFFEFF, 0x1F00, 0,
+ 0x800007FFFFFFFFFF, 0xFFE1C0623C3F0000, 0xFFFFFFFF00004003,
+ 0xF7FFFFFFFFFF20BF, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFF3D7F3DFF,
+ 0x7F3DFFFFFFFF3DFF, 0xFFFFFFFFFF7FFF3D, 0xFFFFFFFFFF3DFFFF, 0x7FFFFFF,
+ 0xFFFFFFFF0000FFFF, 0x3F3FFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFE,
+ 0xFFFF9FFFFFFFFFFF, 0xFFFFFFFF07FFFFFE, 0x1FFC7FFFFFFFFFF,
+ 0x3FFFF8003FFFF, 0x1DFFF0003FFFF, 0xFFFFFFFFFFFFF, 0x10800000,
+ 0xFFFFFFFF00000000, 0x1FFFFFFFFFFFFFF, 0xFFFF05FFFFFFFFFF,
+ 0x3FFFFFFFFFFFFF, 0x7FFFFFFF, 0x1F3FFFFFFF0000, 0xFFFF0FFFFFFFFFFF,
+ 0xFFFFFFFF007FFFFF, 0x1FFFFF, 0x8000000000, 0xFFFFFFFFFFFE0, 0x1FE0,
+ 0xFC00C001FFFFFFF8, 0x3FFFFFFFFF, 0xFFFFFFFFF, 0x3FFFFFFFFC00E000,
+ 0xE7FFFFFFFFFF01FF, 0x46FDE0000000000, 0xFFFFFFFF3F3FFFFF,
+ 0x3FFFFFFFAAFF3F3F, 0x5FDFFFFFFFFFFFFF, 0x1FDC1FFF0FCF1FDC,
+ 0x8002000000000000, 0x1FFF0000, 0xF3FFFD503F2FFC84, 0xFFFFFFFF000043E0,
+ 0x1FF, 0xC781FFFFFFFFF, 0xFFFF20BFFFFFFFFF, 0x80FFFFFFFFFF,
+ 0x7F7F7F7F007FFFFF, 0x7F7F7F7F, 0x1F3E03FE000000E0, 0xFFFFFFFEE07FFFFF,
+ 0xF7FFFFFFFFFFFFFF, 0xFFFEFFFFFFFFFFE0, 0xFFFFFFFF00007FFF,
+ 0xFFFF000000000000, 0x1FFF, 0x3FFFFFFFFFFF0000, 0xC00FFFF1FFF,
+ 0x80007FFFFFFFFFFF, 0xFFFFFFFF3FFFFFFF, 0xFFFFFFFFFFFF,
+ 0xFFFFFFFCFF800000, 0xFFFFFFFFFFFFF9FF, 0xFFFC000003EB07FF, 0x7FFFFF7BB,
+ 0xFFFFFFFFFFFFC, 0x68FC000000000000, 0xFFFF003FFFFFFC00,
+ 0x1FFFFFFF0000007F, 0x7FFFFFFFFFFF0, 0x7C00FFDF00008000, 0x1FFFFFFFFFF,
+ 0xC47FFFFF00000FF7, 0x3E62FFFFFFFFFFFF, 0x1C07FF38000005,
+ 0xFFFF7F7F007E7E7E, 0xFFFF03FFF7FFFFFF, 0x7FFFFFFFF, 0xFFFF000FFFFFFFFF,
+ 0xFFFFFFFFFFFF87F, 0xFFFF3FFFFFFFFFFF, 0x3FFFFFF, 0x5F7FFDFFA0F8007F,
+ 0xFFFFFFFFFFFFFFDB, 0x3FFFFFFFFFFFF, 0xFFFFFFFFFFF80000,
+ 0xFFFFFFF03FFFFFFF, 0x3FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFF0000,
+ 0xFFFFFFFFFFFCFFFF, 0x3FF0000000000FF, 0xAA8A000000000000,
+ 0x1FFFFFFFFFFFFFFF, 0x7FFFFFE00000000, 0xFFFFFFC007FFFFFE,
+ 0x7FFFFFFF3FFFFFFF, 0x1CFCFCFC,
+ ],
+ tree3_level1: &[
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 9, 10, 5, 11, 12, 5, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 13, 14, 15, 7, 16, 17, 7, 18, 19, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ ],
+ tree3_level2: &[
+ 0, 1, 2, 3, 4, 5, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 2, 2, 12, 13, 14, 15,
+ 16, 4, 2, 2, 2, 2, 17, 18, 19, 4, 20, 21, 22, 23, 24, 4, 25, 4, 26, 27,
+ 28, 29, 30, 31, 32, 4, 2, 33, 34, 34, 35, 4, 4, 4, 4, 4, 36, 4, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 22, 52, 53, 4, 4, 5,
+ 54, 55, 56, 4, 4, 57, 58, 55, 59, 60, 4, 61, 62, 4, 4, 63, 4, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 4, 4, 4, 4, 74, 75, 76, 4, 77, 78, 79, 4, 4,
+ 4, 4, 80, 81, 4, 82, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 83, 4,
+ 2, 57, 2, 2, 2, 84, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 85, 86,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 55, 87, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 62, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 2, 2, 2, 2, 2, 2, 2, 2, 73, 88, 89, 90, 55, 91, 76, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 2, 4, 4, 2, 92, 93, 94, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 95, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 96, 33, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 97, 2, 2, 2, 2, 98, 99, 2, 2, 2, 2, 2, 100, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 2, 101, 102, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 103, 104, 105, 106, 107,
+ 2, 2, 2, 2, 108, 109, 110, 111, 112, 113, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 114, 4, 4, 4, 115, 116,
+ 4, 4, 117, 118, 4, 4, 4, 4, 90, 63, 4, 4, 4, 4, 4, 4, 4, 119, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 120, 2, 2, 2, 121, 2, 122, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 123, 124, 125, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 126, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 127, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 128, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 129, 2, 2, 2, 2, 2, 2, 2, 2, 2, 130, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 130, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 131, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4,
+ ],
+ tree3_level3: &[
+ 0xB7FFFF7FFFFFEFFF, 0x3FFF3FFF, 0xFFFFFFFFFFFFFFFF, 0x7FFFFFFFFFFFFFF, 0,
+ 0x1FFFFFFFFFFFFF, 0xFFFFFFFF1FFFFFFF, 0x1FFFF, 0xFFFFE000FFFFFFFF,
+ 0x3FFFFFFFFF07FF, 0xFFFFFFFF3FFFFFFF, 0x3EFF0F, 0xFFFF00003FFFFFFF,
+ 0xFFFFFFFFF0FFFFF, 0xFFFF00FFFFFFFFFF, 0xF7FF000FFFFFFFFF,
+ 0x1BFBFFFBFFB7F7FF, 0x7FFFFFFFFFFFFF, 0xFF003FFFFF, 0x7FDFFFFFFFFFFBF,
+ 0x91BFFFFFFFFFFD3F, 0x7FFFFF003FFFFF, 0x7FFFFFFF, 0x37FFFF00000000,
+ 0x3FFFFFF003FFFFF, 0xC0FFFFFFFFFFFFFF, 0x3FFFFFFEEF0001,
+ 0x1FFFFFFF00000000, 0x1FFFFFFF, 0x1FFFFFFEFF, 0x3FFFFFFFFFFFFF,
+ 0x7FFFF003FFFFF, 0x3FFFF, 0x1FF, 0x7FFFFFFFFFFFF, 0xFFFFFFFFF,
+ 0x303FFFFFFFFFF, 0xFFFF00801FFFFFFF, 0xFFFF00000000003F,
+ 0xFFFF000000000003, 0x7FFFFF0000001F, 0xFFFFFFFFFFFFF8, 0x26000000000000,
+ 0xFFFFFFFFFFF8, 0x1FFFFFF0000, 0x7FFFFFFFF8, 0x47FFFFFFFF0090,
+ 0x7FFFFFFFFFFF8, 0x1400001E, 0x80000FFFFFFBFFFF, 0x1, 0xFFFF01FFBFFFBD7F,
+ 0x23EDFDFFFFF99FE0, 0x3E0010000, 0x380000780, 0xFFFFFFFFFFFF, 0xB0,
+ 0x7FFFFFFFFFFF, 0xF000000, 0x10, 0x10007FFFFFFFFFF, 0x7FFFFFF, 0x7F,
+ 0xFFFFFFFFFFF, 0xFFFFFFFF00000000, 0x80000000FFFFFFFF, 0x8000FFFFFF6FF27F,
+ 0x2, 0xFFFFFCFF00000000, 0xA0001FFFF, 0x407FFFFFFFFF801,
+ 0xFFFFFFFFF0010000, 0xFFFF0000200003FF, 0x1FFFFFFFFFFFFFF, 0x7FFFFFFFFDFF,
+ 0xFFFC000000000001, 0xFFFF, 0x1FFFFFFFFFB7F, 0xFFFFFDBF00000040,
+ 0x10003FF, 0x7FFFF00000000, 0xFFFFFFFFDFFF4, 0x1000000000000, 0x3FFFFFF,
+ 0xF, 0xFFFFFFFFFFFF0000, 0x1FFFFFFFFFFFF, 0x7E, 0xFFFF00007FFFFFFF,
+ 0x7FFFFFFFFFFFFFFF, 0x3FFFFFFF0000, 0xE0FFFFF80000000F, 0x107FF,
+ 0xFFF80000, 0xB00000000, 0xFFFFFFFFFFFFFF, 0x3FFFFF, 0x6FEF000000000000,
+ 0x40007FFFFFFFF, 0xFFFF00F000270000, 0xFFFFFFFFFFFFFFF,
+ 0x1FFF07FFFFFFFFFF, 0x3FF01FF, 0xFFFFFFFFFFDFFFFF, 0xEBFFDE64DFFFFFFF,
+ 0xFFFFFFFFFFFFFFEF, 0x7BFFFFFFDFDFE7BF, 0xFFFFFFFFFFFDFC5F,
+ 0xFFFFFF3FFFFFFFFF, 0xF7FFFFFFF7FFFFFD, 0xFFDFFFFFFFDFFFFF,
+ 0xFFFF7FFFFFFF7FFF, 0xFFFFFDFFFFFFFDFF, 0xFF7, 0x7E07FFFFFFF,
+ 0xFFFF000000000000, 0x3FFFFFFFFFFF, 0x3F801FFFFFFFFFFF, 0x4000,
+ 0xFFFFFFF0000, 0x7FFF6F7F00000000, 0x1F, 0x80F, 0xAF7FE96FFFFFFEF,
+ 0x5EF7F796AA96EA84, 0xFFFFBEE0FFFFBFF, 0xFFFFFFFF, 0x3FFFFFFFFFFFFFF,
+ 0xFFFF0003FFFFFFFF, 0xFFFF0001FFFFFFFF, 0x3FFFFFFF, 0xFFFFFFFFFFFF07FF,
+ ],
+};
diff --git a/rust/hw/char/pl011/vendor/version_check/.cargo-checksum.json
b/rust/hw/char/pl011/vendor/version_check/.cargo-checksum.json
new file mode 100644
index 0000000000..dd79d121d3
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"f25d88044914cb3466df43bc39a199e1589dda1aad3226c9c7e7ac4d2f8751d0","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"b7e650f3fce5c53249d1cdc608b54df156a97edd636cf9d23498d0cfe7aec63e","README.md":"ac2a0a360812436bd5798f5fe2affe7d6ed9eb7f15d6e4d73931e95b437560f2","src/channel.rs":"d2443d503d4cc469a171a51a26eca3ec0d2a58b5f7375a84542c36f1421766a8","src/date.rs":"09580a0a2008fad2ccbc43fb42a88f42221b98b01692702022a296dc9c86bf37","src/lib.rs":"760f0d29567ecaa61287088cf23cf74b3c0efbbcd3077cea5fb7c88359e96c7e","src/version.rs":"dba18a25983ec6e37b952f4cdc5219c9e5abba2c3a76cef87465e1fba6f8ac89"},"package":"49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"}
\ No newline at end of file
diff --git a/rust/hw/char/pl011/vendor/version_check/Cargo.toml
b/rust/hw/char/pl011/vendor/version_check/Cargo.toml
new file mode 100644
index 0000000000..39fedbd48b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/Cargo.toml
@@ -0,0 +1,24 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+name = "version_check"
+version = "0.9.4"
+authors = ["Sergio Benitez <sb@sergio.bz>"]
+exclude = ["static"]
+description = "Tiny crate to check the version of the installed/running rustc."
+documentation = "https://docs.rs/version_check/"
+readme = "README.md"
+keywords = ["version", "rustc", "minimum", "check"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/SergioBenitez/version_check"
+
+[dependencies]
diff --git a/rust/hw/char/pl011/vendor/version_check/LICENSE-APACHE
b/rust/hw/char/pl011/vendor/version_check/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/rust/hw/char/pl011/vendor/version_check/LICENSE-MIT
b/rust/hw/char/pl011/vendor/version_check/LICENSE-MIT
new file mode 100644
index 0000000000..dfc0e73b19
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/LICENSE-MIT
@@ -0,0 +1,19 @@
+The MIT License (MIT)
+Copyright (c) 2017-2018 Sergio Benitez
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/rust/hw/char/pl011/vendor/version_check/README.md
b/rust/hw/char/pl011/vendor/version_check/README.md
new file mode 100644
index 0000000000..8637d2ab1d
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/README.md
@@ -0,0 +1,80 @@
+# version\_check
+
+[![Build
Status](https://github.com/SergioBenitez/version_check/workflows/CI/badge.svg)](https://github.com/SergioBenitez/version_check/actions)
+[![Current Crates.io
Version](https://img.shields.io/crates/v/version_check.svg)](https://crates.io/crates/version_check)
+[![rustdocs on
docs.rs](https://docs.rs/version_check/badge.svg)](https://docs.rs/version_check)
+
+This tiny crate checks that the running or installed `rustc` meets some version
+requirements. The version is queried by calling the Rust compiler with
+`--version`. The path to the compiler is determined first via the `RUSTC`
+environment variable. If it is not set, then `rustc` is used. If that fails, no
+determination is made, and calls return `None`.
+
+## Usage
+
+Add to your `Cargo.toml` file, typically as a build dependency:
+
+```toml
+[build-dependencies]
+version_check = "0.9"
+```
+
+`version_check` is compatible and compiles with Rust 1.0.0 and beyond.
+
+## Examples
+
+Set a `cfg` flag in `build.rs` if the running compiler was determined to be
+at least version `1.13.0`:
+
+```rust
+extern crate version_check as rustc;
+
+if rustc::is_min_version("1.13.0").unwrap_or(false) {
+ println!("cargo:rustc-cfg=question_mark_operator");
+}
+```
+
+Check that the running compiler was released on or after `2018-12-18`:
+
+```rust
+extern crate version_check as rustc;
+
+match rustc::is_min_date("2018-12-18") {
+ Some(true) => "Yep! It's recent!",
+ Some(false) => "No, it's older.",
+ None => "Couldn't determine the rustc version."
+};
+```
+
+Check that the running compiler supports feature flags:
+
+```rust
+extern crate version_check as rustc;
+
+match rustc::is_feature_flaggable() {
+ Some(true) => "Yes! It's a dev or nightly release!",
+ Some(false) => "No, it's stable or beta.",
+ None => "Couldn't determine the rustc version."
+};
+```
+
+See the [rustdocs](https://docs.rs/version_check) for more examples and
complete
+documentation.
+
+## Alternatives
+
+This crate is dead simple with no dependencies. If you need something more and
+don't care about panicking if the version cannot be obtained, or if you don't
+mind adding dependencies, see [rustc_version]. If you'd instead prefer a
feature
+detection library that works by dynamically invoking `rustc` with a
+representative code sample, see [autocfg].
+
+[rustc_version]: https://crates.io/crates/rustc_version
+[autocfg]: https://crates.io/crates/autocfg
+
+## License
+
+`version_check` is licensed under either of the following, at your option:
+
+ * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT License ([LICENSE-MIT](LICENSE-MIT) or
http://opensource.org/licenses/MIT)
diff --git a/rust/hw/char/pl011/vendor/version_check/meson.build
b/rust/hw/char/pl011/vendor/version_check/meson.build
new file mode 100644
index 0000000000..9a37495615
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/meson.build
@@ -0,0 +1,14 @@
+_version_check_rs = static_library(
+ 'version_check',
+ files('src/lib.rs'),
+ gnu_symbol_visibility: 'hidden',
+ rust_abi: 'rust',
+ rust_args: rust_args + [
+ '--edition', '2021',
+ ],
+ dependencies: [],
+)
+
+dep_version_check = declare_dependency(
+ link_with: _version_check_rs,
+)
diff --git a/rust/hw/char/pl011/vendor/version_check/src/channel.rs
b/rust/hw/char/pl011/vendor/version_check/src/channel.rs
new file mode 100644
index 0000000000..f84c508d16
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/src/channel.rs
@@ -0,0 +1,193 @@
+use std::fmt;
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+enum Kind {
+ Dev,
+ Nightly,
+ Beta,
+ Stable,
+}
+
+/// Release channel: "dev", "nightly", "beta", or "stable".
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub struct Channel(Kind);
+
+impl Channel {
+ /// Reads the release channel of the running compiler. If it cannot be
+ /// determined (see the [top-level documentation](crate)), returns `None`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// match Channel::read() {
+ /// Some(c) => format!("The channel is: {}", c),
+ /// None => format!("Failed to read the release channel.")
+ /// };
+ /// ```
+ pub fn read() -> Option<Channel> {
+ ::get_version_and_date()
+ .and_then(|(version, _)| version)
+ .and_then(|version| Channel::parse(&version))
+ }
+
+ /// Parse a Rust release channel from a Rust release version string (of the
+ /// form `major[.minor[.patch[-channel]]]`). Returns `None` if `version` is
+ /// not a valid Rust version string.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let dev = Channel::parse("1.3.0-dev").unwrap();
+ /// assert!(dev.is_dev());
+ ///
+ /// let nightly = Channel::parse("1.42.2-nightly").unwrap();
+ /// assert!(nightly.is_nightly());
+ ///
+ /// let beta = Channel::parse("1.32.0-beta").unwrap();
+ /// assert!(beta.is_beta());
+ ///
+ /// let stable = Channel::parse("1.4.0").unwrap();
+ /// assert!(stable.is_stable());
+ /// ```
+ pub fn parse(version: &str) -> Option<Channel> {
+ let version = version.trim();
+ if version.contains("-dev") || version == "dev" {
+ Some(Channel(Kind::Dev))
+ } else if version.contains("-nightly") || version == "nightly" {
+ Some(Channel(Kind::Nightly))
+ } else if version.contains("-beta") || version == "beta" {
+ Some(Channel(Kind::Beta))
+ } else if !version.contains("-") {
+ Some(Channel(Kind::Stable))
+ } else {
+ None
+ }
+ }
+
+ /// Returns the name of the release channel.
+ fn as_str(&self) -> &'static str {
+ match self.0 {
+ Kind::Dev => "dev",
+ Kind::Beta => "beta",
+ Kind::Nightly => "nightly",
+ Kind::Stable => "stable",
+ }
+ }
+
+ /// Returns `true` if this channel supports feature flags. In other words,
+ /// returns `true` if the channel is either `dev` or `nightly`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let dev = Channel::parse("1.3.0-dev").unwrap();
+ /// assert!(dev.supports_features());
+ ///
+ /// let nightly = Channel::parse("1.42.2-nightly").unwrap();
+ /// assert!(nightly.supports_features());
+ ///
+ /// let beta = Channel::parse("1.32.0-beta").unwrap();
+ /// assert!(!beta.supports_features());
+ ///
+ /// let stable = Channel::parse("1.4.0").unwrap();
+ /// assert!(!stable.supports_features());
+ /// ```
+ pub fn supports_features(&self) -> bool {
+ match self.0 {
+ Kind::Dev | Kind::Nightly => true,
+ Kind::Beta | Kind::Stable => false
+ }
+ }
+
+ /// Returns `true` if this channel is `dev` and `false` otherwise.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let dev = Channel::parse("1.3.0-dev").unwrap();
+ /// assert!(dev.is_dev());
+ ///
+ /// let stable = Channel::parse("1.0.0").unwrap();
+ /// assert!(!stable.is_dev());
+ /// ```
+ pub fn is_dev(&self) -> bool {
+ match self.0 {
+ Kind::Dev => true,
+ _ => false
+ }
+ }
+
+ /// Returns `true` if this channel is `nightly` and `false` otherwise.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let nightly = Channel::parse("1.3.0-nightly").unwrap();
+ /// assert!(nightly.is_nightly());
+ ///
+ /// let stable = Channel::parse("1.0.0").unwrap();
+ /// assert!(!stable.is_nightly());
+ /// ```
+ pub fn is_nightly(&self) -> bool {
+ match self.0 {
+ Kind::Nightly => true,
+ _ => false
+ }
+ }
+
+ /// Returns `true` if this channel is `beta` and `false` otherwise.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let beta = Channel::parse("1.3.0-beta").unwrap();
+ /// assert!(beta.is_beta());
+ ///
+ /// let stable = Channel::parse("1.0.0").unwrap();
+ /// assert!(!stable.is_beta());
+ /// ```
+ pub fn is_beta(&self) -> bool {
+ match self.0 {
+ Kind::Beta => true,
+ _ => false
+ }
+ }
+
+ /// Returns `true` if this channel is `stable` and `false` otherwise.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Channel;
+ ///
+ /// let stable = Channel::parse("1.0.0").unwrap();
+ /// assert!(stable.is_stable());
+ ///
+ /// let beta = Channel::parse("1.3.0-beta").unwrap();
+ /// assert!(!beta.is_stable());
+ /// ```
+ pub fn is_stable(&self) -> bool {
+ match self.0 {
+ Kind::Stable => true,
+ _ => false
+ }
+ }
+}
+
+impl fmt::Display for Channel {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.as_str())
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/version_check/src/date.rs
b/rust/hw/char/pl011/vendor/version_check/src/date.rs
new file mode 100644
index 0000000000..de0b2d05d7
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/src/date.rs
@@ -0,0 +1,203 @@
+use std::fmt;
+
+/// Release date including year, month, and day.
+// Internal storage is: y[31..9] | m[8..5] | d[5...0].
+#[derive(Debug, PartialEq, Eq, Copy, Clone, PartialOrd, Ord)]
+pub struct Date(u32);
+
+impl Date {
+ /// Reads the release date of the running compiler. If it cannot be
+ /// determined (see the [top-level documentation](crate)), returns `None`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// match Date::read() {
+ /// Some(d) => format!("The release date is: {}", d),
+ /// None => format!("Failed to read the release date.")
+ /// };
+ /// ```
+ pub fn read() -> Option<Date> {
+ ::get_version_and_date()
+ .and_then(|(_, date)| date)
+ .and_then(|date| Date::parse(&date))
+ }
+
+ /// Parse a release date of the form `%Y-%m-%d`. Returns `None` if `date`
is
+ /// not in `%Y-%m-%d` format.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// let date = Date::parse("2016-04-20").unwrap();
+ ///
+ /// assert!(date.at_least("2016-01-10"));
+ /// assert!(date.at_most("2016-04-20"));
+ /// assert!(date.exactly("2016-04-20"));
+ ///
+ /// assert!(Date::parse("2021-12-31").unwrap().exactly("2021-12-31"));
+ ///
+ /// assert!(Date::parse("March 13, 2018").is_none());
+ /// assert!(Date::parse("1-2-3-4-5").is_none());
+ /// assert!(Date::parse("2020-300-23120").is_none());
+ /// assert!(Date::parse("2020-12-12 1").is_none());
+ /// assert!(Date::parse("2020-10").is_none());
+ /// assert!(Date::parse("2020").is_none());
+ /// ```
+ pub fn parse(date: &str) -> Option<Date> {
+ let mut ymd = [0u16; 3];
+ for (i, split) in date.split('-').map(|s|
s.parse::<u16>()).enumerate() {
+ ymd[i] = match (i, split) {
+ (3, _) | (_, Err(_)) => return None,
+ (_, Ok(v)) => v,
+ };
+ }
+
+ let (year, month, day) = (ymd[0], ymd[1], ymd[2]);
+ if year == 0 || month == 0 || month > 12 || day == 0 || day > 31 {
+ return None;
+ }
+
+ Some(Date::from_ymd(year, month as u8, day as u8))
+ }
+
+ /// Creates a `Date` from `(year, month, day)` date components.
+ ///
+ /// Does not check the validity of `year`, `month`, or `day`, but `year` is
+ /// truncated to 23 bits (% 8,388,608), `month` to 4 bits (% 16), and `day`
+ /// to 5 bits (% 32).
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// assert!(Date::from_ymd(2021, 7, 30).exactly("2021-07-30"));
+ /// assert!(Date::from_ymd(2010, 3, 23).exactly("2010-03-23"));
+ /// assert!(Date::from_ymd(2090, 1, 31).exactly("2090-01-31"));
+ ///
+ /// // Truncation: 33 % 32 == 0x21 & 0x1F == 1.
+ /// assert!(Date::from_ymd(2090, 1, 33).exactly("2090-01-01"));
+ /// ```
+ pub fn from_ymd(year: u16, month: u8, day: u8) -> Date {
+ let year = (year as u32) << 9;
+ let month = ((month as u32) & 0xF) << 5;
+ let day = (day as u32) & 0x1F;
+ Date(year | month | day)
+ }
+
+ /// Return the original (YYYY, MM, DD).
+ fn to_ymd(&self) -> (u16, u8, u8) {
+ let y = self.0 >> 9;
+ let m = (self.0 >> 5) & 0xF;
+ let d = self.0 & 0x1F;
+ (y as u16, m as u8, d as u8)
+ }
+
+ /// Returns `true` if `self` occurs on or after `date`.
+ ///
+ /// If `date` occurs before `self`, or if `date` is not in `%Y-%m-%d`
+ /// format, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// let date = Date::parse("2020-01-01").unwrap();
+ ///
+ /// assert!(date.at_least("2019-12-31"));
+ /// assert!(date.at_least("2020-01-01"));
+ /// assert!(date.at_least("2014-04-31"));
+ ///
+ /// assert!(!date.at_least("2020-01-02"));
+ /// assert!(!date.at_least("2024-08-18"));
+ /// ```
+ pub fn at_least(&self, date: &str) -> bool {
+ Date::parse(date)
+ .map(|date| self >= &date)
+ .unwrap_or(false)
+ }
+
+ /// Returns `true` if `self` occurs on or before `date`.
+ ///
+ /// If `date` occurs after `self`, or if `date` is not in `%Y-%m-%d`
+ /// format, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// let date = Date::parse("2020-01-01").unwrap();
+ ///
+ /// assert!(date.at_most("2020-01-01"));
+ /// assert!(date.at_most("2020-01-02"));
+ /// assert!(date.at_most("2024-08-18"));
+ ///
+ /// assert!(!date.at_most("2019-12-31"));
+ /// assert!(!date.at_most("2014-04-31"));
+ /// ```
+ pub fn at_most(&self, date: &str) -> bool {
+ Date::parse(date)
+ .map(|date| self <= &date)
+ .unwrap_or(false)
+ }
+
+ /// Returns `true` if `self` occurs exactly on `date`.
+ ///
+ /// If `date` is not exactly `self`, or if `date` is not in `%Y-%m-%d`
+ /// format, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Date;
+ ///
+ /// let date = Date::parse("2020-01-01").unwrap();
+ ///
+ /// assert!(date.exactly("2020-01-01"));
+ ///
+ /// assert!(!date.exactly("2019-12-31"));
+ /// assert!(!date.exactly("2014-04-31"));
+ /// assert!(!date.exactly("2020-01-02"));
+ /// assert!(!date.exactly("2024-08-18"));
+ /// ```
+ pub fn exactly(&self, date: &str) -> bool {
+ Date::parse(date)
+ .map(|date| self == &date)
+ .unwrap_or(false)
+ }
+}
+
+impl fmt::Display for Date {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (y, m, d) = self.to_ymd();
+ write!(f, "{}-{:02}-{:02}", y, m, d)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Date;
+
+ macro_rules! reflexive_display {
+ ($string:expr) => (
+ assert_eq!(Date::parse($string).unwrap().to_string(), $string);
+ )
+ }
+
+ #[test]
+ fn display() {
+ reflexive_display!("2019-05-08");
+ reflexive_display!("2000-01-01");
+ reflexive_display!("2000-12-31");
+ reflexive_display!("2090-12-31");
+ reflexive_display!("1999-02-19");
+ reflexive_display!("9999-12-31");
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/version_check/src/lib.rs
b/rust/hw/char/pl011/vendor/version_check/src/lib.rs
new file mode 100644
index 0000000000..6c16074822
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/src/lib.rs
@@ -0,0 +1,493 @@
+//! This tiny crate checks that the running or installed `rustc` meets some
+//! version requirements. The version is queried by calling the Rust compiler
+//! with `--version`. The path to the compiler is determined first via the
+//! `RUSTC` environment variable. If it is not set, then `rustc` is used. If
+//! that fails, no determination is made, and calls return `None`.
+//!
+//! # Examples
+//!
+//! * Set a `cfg` flag in `build.rs` if the running compiler was determined to
+//! be at least version `1.13.0`:
+//!
+//! ```rust
+//! extern crate version_check as rustc;
+//!
+//! if rustc::is_min_version("1.13.0").unwrap_or(false) {
+//! println!("cargo:rustc-cfg=question_mark_operator");
+//! }
+//! ```
+//!
+//! See [`is_max_version`] or [`is_exact_version`] to check if the compiler
+//! is _at most_ or _exactly_ a certain version.
+//!
+//! * Check that the running compiler was released on or after `2018-12-18`:
+//!
+//! ```rust
+//! extern crate version_check as rustc;
+//!
+//! match rustc::is_min_date("2018-12-18") {
+//! Some(true) => "Yep! It's recent!",
+//! Some(false) => "No, it's older.",
+//! None => "Couldn't determine the rustc version."
+//! };
+//! ```
+//!
+//! See [`is_max_date`] or [`is_exact_date`] to check if the compiler was
+//! released _prior to_ or _exactly on_ a certain date.
+//!
+//! * Check that the running compiler supports feature flags:
+//!
+//! ```rust
+//! extern crate version_check as rustc;
+//!
+//! match rustc::is_feature_flaggable() {
+//! Some(true) => "Yes! It's a dev or nightly release!",
+//! Some(false) => "No, it's stable or beta.",
+//! None => "Couldn't determine the rustc version."
+//! };
+//! ```
+//!
+//! * Check that the running compiler supports a specific feature:
+//!
+//! ```rust
+//! extern crate version_check as rustc;
+//!
+//! if let Some(true) = rustc::supports_feature("doc_cfg") {
+//! println!("cargo:rustc-cfg=has_doc_cfg");
+//! }
+//! ```
+//!
+//! * Check that the running compiler is on the stable channel:
+//!
+//! ```rust
+//! extern crate version_check as rustc;
+//!
+//! match rustc::Channel::read() {
+//! Some(c) if c.is_stable() => format!("Yes! It's stable."),
+//! Some(c) => format!("No, the channel {} is not stable.", c),
+//! None => format!("Couldn't determine the rustc version.")
+//! };
+//! ```
+//!
+//! To interact with the version, release date, and release channel as structs,
+//! use [`Version`], [`Date`], and [`Channel`], respectively. The [`triple()`]
+//! function returns all three values efficiently.
+//!
+//! # Alternatives
+//!
+//! This crate is dead simple with no dependencies. If you need something more
+//! and don't care about panicking if the version cannot be obtained, or if you
+//! don't mind adding dependencies, see
+//! [rustc_version](https://crates.io/crates/rustc_version).
+
+#![allow(deprecated)]
+
+mod version;
+mod channel;
+mod date;
+
+use std::env;
+use std::process::Command;
+
+#[doc(inline)] pub use version::*;
+#[doc(inline)] pub use channel::*;
+#[doc(inline)] pub use date::*;
+
+/// Parses (version, date) as available from rustc version string.
+fn version_and_date_from_rustc_version(s: &str) -> (Option<String>,
Option<String>) {
+ let last_line = s.lines().last().unwrap_or(s);
+ let mut components = last_line.trim().split(" ");
+ let version = components.nth(1);
+ let date = components.filter(|c| c.ends_with(')')).next()
+ .map(|s|
s.trim_right().trim_right_matches(")").trim_left().trim_left_matches('('));
+ (version.map(|s| s.to_string()), date.map(|s| s.to_string()))
+}
+
+/// Parses (version, date) as available from rustc verbose version output.
+fn version_and_date_from_rustc_verbose_version(s: &str) -> (Option<String>,
Option<String>) {
+ let (mut version, mut date) = (None, None);
+ for line in s.lines() {
+ let split = |s: &str| s.splitn(2, ":").nth(1).map(|s|
s.trim().to_string());
+ match line.trim().split(" ").nth(0) {
+ Some("rustc") => {
+ let (v, d) = version_and_date_from_rustc_version(line);
+ version = version.or(v);
+ date = date.or(d);
+ },
+ Some("release:") => version = split(line),
+ Some("commit-date:") if line.ends_with("unknown") => date = None,
+ Some("commit-date:") => date = split(line),
+ _ => continue
+ }
+ }
+
+ (version, date)
+}
+
+/// Returns (version, date) as available from `rustc --version`.
+fn get_version_and_date() -> Option<(Option<String>, Option<String>)> {
+ let rustc = env::var("RUSTC").unwrap_or_else(|_| "rustc".to_string());
+ Command::new(rustc).arg("--verbose").arg("--version").output().ok()
+ .and_then(|output| String::from_utf8(output.stdout).ok())
+ .map(|s| version_and_date_from_rustc_verbose_version(&s))
+}
+
+/// Reads the triple of [`Version`], [`Channel`], and [`Date`] of the installed
+/// or running `rustc`.
+///
+/// If any attribute cannot be determined (see the [top-level
+/// documentation](crate)), returns `None`.
+///
+/// To obtain only one of three attributes, use [`Version::read()`],
+/// [`Channel::read()`], or [`Date::read()`].
+pub fn triple() -> Option<(Version, Channel, Date)> {
+ let (version_str, date_str) = match get_version_and_date() {
+ Some((Some(version), Some(date))) => (version, date),
+ _ => return None
+ };
+
+ // Can't use `?` or `try!` for `Option` in 1.0.0.
+ match Version::parse(&version_str) {
+ Some(version) => match Channel::parse(&version_str) {
+ Some(channel) => match Date::parse(&date_str) {
+ Some(date) => Some((version, channel, date)),
+ _ => None,
+ },
+ _ => None,
+ },
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` was released **on or after**
+/// some date.
+///
+/// The format of `min_date` must be YYYY-MM-DD. For instance: `2016-12-20` or
+/// `2017-01-09`.
+///
+/// If the date cannot be retrieved or parsed, or if `min_date` could not be
+/// parsed, returns `None`. Otherwise returns `true` if the installed `rustc`
+/// was release on or after `min_date` and `false` otherwise.
+pub fn is_min_date(min_date: &str) -> Option<bool> {
+ match (Date::read(), Date::parse(min_date)) {
+ (Some(rustc_date), Some(min_date)) => Some(rustc_date >= min_date),
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` was released **on or before**
+/// some date.
+///
+/// The format of `max_date` must be YYYY-MM-DD. For instance: `2016-12-20` or
+/// `2017-01-09`.
+///
+/// If the date cannot be retrieved or parsed, or if `max_date` could not be
+/// parsed, returns `None`. Otherwise returns `true` if the installed `rustc`
+/// was release on or before `max_date` and `false` otherwise.
+pub fn is_max_date(max_date: &str) -> Option<bool> {
+ match (Date::read(), Date::parse(max_date)) {
+ (Some(rustc_date), Some(max_date)) => Some(rustc_date <= max_date),
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` was released **exactly** on
+/// some date.
+///
+/// The format of `date` must be YYYY-MM-DD. For instance: `2016-12-20` or
+/// `2017-01-09`.
+///
+/// If the date cannot be retrieved or parsed, or if `date` could not be
parsed,
+/// returns `None`. Otherwise returns `true` if the installed `rustc` was
+/// release on `date` and `false` otherwise.
+pub fn is_exact_date(date: &str) -> Option<bool> {
+ match (Date::read(), Date::parse(date)) {
+ (Some(rustc_date), Some(date)) => Some(rustc_date == date),
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` is **at least** some minimum
+/// version.
+///
+/// The format of `min_version` is a semantic version: `1.3.0`, `1.15.0-beta`,
+/// `1.14.0`, `1.16.0-nightly`, etc.
+///
+/// If the version cannot be retrieved or parsed, or if `min_version` could not
+/// be parsed, returns `None`. Otherwise returns `true` if the installed
`rustc`
+/// is at least `min_version` and `false` otherwise.
+pub fn is_min_version(min_version: &str) -> Option<bool> {
+ match (Version::read(), Version::parse(min_version)) {
+ (Some(rustc_ver), Some(min_ver)) => Some(rustc_ver >= min_ver),
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` is **at most** some maximum
+/// version.
+///
+/// The format of `max_version` is a semantic version: `1.3.0`, `1.15.0-beta`,
+/// `1.14.0`, `1.16.0-nightly`, etc.
+///
+/// If the version cannot be retrieved or parsed, or if `max_version` could not
+/// be parsed, returns `None`. Otherwise returns `true` if the installed
`rustc`
+/// is at most `max_version` and `false` otherwise.
+pub fn is_max_version(max_version: &str) -> Option<bool> {
+ match (Version::read(), Version::parse(max_version)) {
+ (Some(rustc_ver), Some(max_ver)) => Some(rustc_ver <= max_ver),
+ _ => None
+ }
+}
+
+/// Checks that the running or installed `rustc` is **exactly** some version.
+///
+/// The format of `version` is a semantic version: `1.3.0`, `1.15.0-beta`,
+/// `1.14.0`, `1.16.0-nightly`, etc.
+///
+/// If the version cannot be retrieved or parsed, or if `version` could not be
+/// parsed, returns `None`. Otherwise returns `true` if the installed `rustc`
is
+/// exactly `version` and `false` otherwise.
+pub fn is_exact_version(version: &str) -> Option<bool> {
+ match (Version::read(), Version::parse(version)) {
+ (Some(rustc_ver), Some(version)) => Some(rustc_ver == version),
+ _ => None
+ }
+}
+
+/// Checks whether the running or installed `rustc` supports feature flags.
+///
+/// In other words, if the channel is either "nightly" or "dev".
+///
+/// Note that support for specific `rustc` features can be enabled or disabled
+/// via the `allow-features` compiler flag, which this function _does not_
+/// check. That is, this function _does not_ check whether a _specific_ feature
+/// is supported, but instead whether features are supported at all. To check
+/// for support for a specific feature, use [`supports_feature()`].
+///
+/// If the version could not be determined, returns `None`. Otherwise returns
+/// `true` if the running version supports feature flags and `false` otherwise.
+pub fn is_feature_flaggable() -> Option<bool> {
+ Channel::read().map(|c| c.supports_features())
+}
+
+/// Checks whether the running or installed `rustc` supports `feature`.
+///
+/// Returns _true_ _iff_ [`is_feature_flaggable()`] returns `true` _and_ the
+/// feature is not disabled via exclusion in `allow-features` via `RUSTFLAGS`
or
+/// `CARGO_ENCODED_RUSTFLAGS`. If the version could not be determined, returns
+/// `None`.
+///
+/// # Example
+///
+/// ```rust
+/// use version_check as rustc;
+///
+/// if let Some(true) = rustc::supports_feature("doc_cfg") {
+/// println!("cargo:rustc-cfg=has_doc_cfg");
+/// }
+/// ```
+pub fn supports_feature(feature: &str) -> Option<bool> {
+ match is_feature_flaggable() {
+ Some(true) => { /* continue */ }
+ Some(false) => return Some(false),
+ None => return None,
+ }
+
+ let env_flags = env::var_os("CARGO_ENCODED_RUSTFLAGS")
+ .map(|flags| (flags, '\x1f'))
+ .or_else(|| env::var_os("RUSTFLAGS").map(|flags| (flags, ' ')));
+
+ if let Some((flags, delim)) = env_flags {
+ const ALLOW_FEATURES: &'static str = "allow-features=";
+
+ let rustflags = flags.to_string_lossy();
+ let allow_features = rustflags.split(delim)
+ .map(|flag| flag.trim_left_matches("-Z").trim())
+ .filter(|flag| flag.starts_with(ALLOW_FEATURES))
+ .map(|flag| &flag[ALLOW_FEATURES.len()..]);
+
+ if let Some(allow_features) = allow_features.last() {
+ return Some(allow_features.split(',').any(|f| f.trim() ==
feature));
+ }
+ }
+
+ // If there are no `RUSTFLAGS` or `CARGO_ENCODED_RUSTFLAGS` or they don't
+ // contain an `allow-features` flag, assume compiler allows all features.
+ Some(true)
+}
+
+#[cfg(test)]
+mod tests {
+ use std::{env, fs};
+
+ use super::version_and_date_from_rustc_version;
+ use super::version_and_date_from_rustc_verbose_version;
+
+ macro_rules! check_parse {
+ (@ $f:expr, $s:expr => $v:expr, $d:expr) => ({
+ if let (Some(v), d) = $f(&$s) {
+ let e_d: Option<&str> = $d.into();
+ assert_eq!((v, d), ($v.to_string(), e_d.map(|s| s.into())));
+ } else {
+ panic!("{:?} didn't parse for version testing.", $s);
+ }
+ });
+ ($f:expr, $s:expr => $v:expr, $d:expr) => ({
+ let warn = "warning: invalid logging spec 'warning', ignoring it";
+ let warn2 = "warning: sorry, something went wrong :(sad)";
+ check_parse!(@ $f, $s => $v, $d);
+ check_parse!(@ $f, &format!("{}\n{}", warn, $s) => $v, $d);
+ check_parse!(@ $f, &format!("{}\n{}", warn2, $s) => $v, $d);
+ check_parse!(@ $f, &format!("{}\n{}\n{}", warn, warn2, $s) => $v,
$d);
+ check_parse!(@ $f, &format!("{}\n{}\n{}", warn2, warn, $s) => $v,
$d);
+ })
+ }
+
+ macro_rules! check_terse_parse {
+ ($($s:expr => $v:expr, $d:expr,)+) => {$(
+ check_parse!(version_and_date_from_rustc_version, $s => $v, $d);
+ )+}
+ }
+
+ macro_rules! check_verbose_parse {
+ ($($s:expr => $v:expr, $d:expr,)+) => {$(
+ check_parse!(version_and_date_from_rustc_verbose_version, $s =>
$v, $d);
+ )+}
+ }
+
+ #[test]
+ fn test_version_parse() {
+ check_terse_parse! {
+ "rustc 1.18.0" => "1.18.0", None,
+ "rustc 1.8.0" => "1.8.0", None,
+ "rustc 1.20.0-nightly" => "1.20.0-nightly", None,
+ "rustc 1.20" => "1.20", None,
+ "rustc 1.3" => "1.3", None,
+ "rustc 1" => "1", None,
+ "rustc 1.5.1-beta" => "1.5.1-beta", None,
+ "rustc 1.20.0 (2017-07-09)" => "1.20.0", Some("2017-07-09"),
+ "rustc 1.20.0-dev (2017-07-09)" => "1.20.0-dev",
Some("2017-07-09"),
+ "rustc 1.20.0-nightly (d84693b93 2017-07-09)" => "1.20.0-nightly",
Some("2017-07-09"),
+ "rustc 1.20.0 (d84693b93 2017-07-09)" => "1.20.0",
Some("2017-07-09"),
+ "rustc 1.30.0-nightly (3bc2ca7e4 2018-09-20)" => "1.30.0-nightly",
Some("2018-09-20"),
+ };
+ }
+
+ #[test]
+ fn test_verbose_version_parse() {
+ check_verbose_parse! {
+ "rustc 1.0.0 (a59de37e9 2015-05-13) (built 2015-05-14)\n\
+ binary: rustc\n\
+ commit-hash: a59de37e99060162a2674e3ff45409ac73595c0e\n\
+ commit-date: 2015-05-13\n\
+ build-date: 2015-05-14\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.0.0" => "1.0.0", Some("2015-05-13"),
+
+ "rustc 1.0.0 (a59de37e9 2015-05-13) (built 2015-05-14)\n\
+ commit-hash: a59de37e99060162a2674e3ff45409ac73595c0e\n\
+ commit-date: 2015-05-13\n\
+ build-date: 2015-05-14\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.0.0" => "1.0.0", Some("2015-05-13"),
+
+ "rustc 1.50.0 (cb75ad5db 2021-02-10)\n\
+ binary: rustc\n\
+ commit-hash: cb75ad5db02783e8b0222fee363c5f63f7e2cf5b\n\
+ commit-date: 2021-02-10\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.50.0" => "1.50.0", Some("2021-02-10"),
+
+ "rustc 1.52.0-nightly (234781afe 2021-03-07)\n\
+ binary: rustc\n\
+ commit-hash: 234781afe33d3f339b002f85f948046d8476cfc9\n\
+ commit-date: 2021-03-07\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.52.0-nightly\n\
+ LLVM version: 12.0.0" => "1.52.0-nightly", Some("2021-03-07"),
+
+ "rustc 1.41.1\n\
+ binary: rustc\n\
+ commit-hash: unknown\n\
+ commit-date: unknown\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.41.1\n\
+ LLVM version: 7.0" => "1.41.1", None,
+
+ "rustc 1.49.0\n\
+ binary: rustc\n\
+ commit-hash: unknown\n\
+ commit-date: unknown\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.49.0" => "1.49.0", None,
+
+ "rustc 1.50.0 (Fedora 1.50.0-1.fc33)\n\
+ binary: rustc\n\
+ commit-hash: unknown\n\
+ commit-date: unknown\n\
+ host: x86_64-unknown-linux-gnu\n\
+ release: 1.50.0" => "1.50.0", None,
+ };
+ }
+
+ fn read_static(verbose: bool, channel: &str, minor: usize) -> String {
+ use std::fs::File;
+ use std::path::Path;
+ use std::io::{BufReader, Read};
+
+ let subdir = if verbose { "verbose" } else { "terse" };
+ let path = Path::new(STATIC_PATH)
+ .join(channel)
+ .join(subdir)
+ .join(format!("rustc-1.{}.0", minor));
+
+ let file = File::open(path).unwrap();
+ let mut buf_reader = BufReader::new(file);
+ let mut contents = String::new();
+ buf_reader.read_to_string(&mut contents).unwrap();
+ contents
+ }
+
+ static STATIC_PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"),
"/static");
+
+ static DATES: [&'static str; 51] = [
+ "2015-05-13", "2015-06-19", "2015-08-03", "2015-09-15", "2015-10-27",
+ "2015-12-04", "2016-01-19", "2016-02-29", "2016-04-11", "2016-05-18",
+ "2016-07-03", "2016-08-15", "2016-09-23", "2016-11-07", "2016-12-16",
+ "2017-01-19", "2017-03-10", "2017-04-24", "2017-06-06", "2017-07-17",
+ "2017-08-27", "2017-10-09", "2017-11-20", "2018-01-01", "2018-02-12",
+ "2018-03-25", "2018-05-07", "2018-06-19", "2018-07-30", "2018-09-11",
+ "2018-10-24", "2018-12-04", "2019-01-16", "2019-02-28", "2019-04-10",
+ "2019-05-20", "2019-07-03", "2019-08-13", "2019-09-23", "2019-11-04",
+ "2019-12-16", "2020-01-27", "2020-03-09", "2020-04-20", "2020-06-01",
+ "2020-07-13", "2020-08-24", "2020-10-07", "2020-11-16", "2020-12-29",
+ "2021-02-10",
+ ];
+
+ #[test]
+ fn test_stable_compatibility() {
+ if env::var_os("FORCE_STATIC").is_none() &&
fs::metadata(STATIC_PATH).is_err() {
+ // We exclude `/static` when we package `version_check`, so don't
+ // run if static files aren't present unless we know they should
be.
+ return;
+ }
+
+ // Ensure we can parse all output from all Linux stable releases.
+ for v in 0..DATES.len() {
+ let (version, date) = (&format!("1.{}.0", v), Some(DATES[v]));
+ check_terse_parse!(read_static(false, "stable", v) => version,
date,);
+ check_verbose_parse!(read_static(true, "stable", v) => version,
date,);
+ }
+ }
+
+ #[test]
+ fn test_parse_current() {
+ let (version, channel) = (::Version::read(), ::Channel::read());
+ assert!(version.is_some());
+ assert!(channel.is_some());
+
+ if let Ok(known_channel) = env::var("KNOWN_CHANNEL") {
+ assert_eq!(channel, ::Channel::parse(&known_channel));
+ }
+ }
+}
diff --git a/rust/hw/char/pl011/vendor/version_check/src/version.rs
b/rust/hw/char/pl011/vendor/version_check/src/version.rs
new file mode 100644
index 0000000000..2bc18aae2b
--- /dev/null
+++ b/rust/hw/char/pl011/vendor/version_check/src/version.rs
@@ -0,0 +1,316 @@
+use std::fmt;
+
+/// Version number: `major.minor.patch`, ignoring release channel.
+#[derive(PartialEq, Eq, Copy, Clone, PartialOrd, Ord)]
+pub struct Version(u64);
+
+impl Version {
+ /// Reads the version of the running compiler. If it cannot be determined
+ /// (see the [top-level documentation](crate)), returns `None`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// match Version::read() {
+ /// Some(d) => format!("Version is: {}", d),
+ /// None => format!("Failed to read the version.")
+ /// };
+ /// ```
+ pub fn read() -> Option<Version> {
+ ::get_version_and_date()
+ .and_then(|(version, _)| version)
+ .and_then(|version| Version::parse(&version))
+ }
+
+
+ /// Parse a Rust release version (of the form
+ /// `major[.minor[.patch[-channel]]]`), ignoring the release channel, if
+ /// any. Returns `None` if `version` is not a valid Rust version string.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// let version = Version::parse("1.18.0").unwrap();
+ /// assert!(version.exactly("1.18.0"));
+ ///
+ /// let version = Version::parse("1.20.0-nightly").unwrap();
+ /// assert!(version.exactly("1.20.0"));
+ /// assert!(version.exactly("1.20.0-beta"));
+ ///
+ /// let version = Version::parse("1.3").unwrap();
+ /// assert!(version.exactly("1.3.0"));
+ ///
+ /// let version = Version::parse("1").unwrap();
+ /// assert!(version.exactly("1.0.0"));
+ ///
+ /// assert!(Version::parse("one.two.three").is_none());
+ /// assert!(Version::parse("1.65536.2").is_none());
+ /// assert!(Version::parse("1. 2").is_none());
+ /// assert!(Version::parse("").is_none());
+ /// assert!(Version::parse("1.").is_none());
+ /// assert!(Version::parse("1.2.3.4").is_none());
+ /// ```
+ pub fn parse(version: &str) -> Option<Version> {
+ let splits = version.split('-')
+ .nth(0)
+ .unwrap_or("")
+ .split('.')
+ .map(|s| s.parse::<u16>());
+
+ let mut mmp = [0u16; 3];
+ for (i, split) in splits.enumerate() {
+ mmp[i] = match (i, split) {
+ (3, _) | (_, Err(_)) => return None,
+ (_, Ok(v)) => v,
+ };
+ }
+
+ let (maj, min, patch) = (mmp[0], mmp[1], mmp[2]);
+ Some(Version::from_mmp(maj, min, patch))
+ }
+
+ /// Creates a `Version` from `(major, minor, patch)` version components.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// assert!(Version::from_mmp(1, 35, 0).exactly("1.35.0"));
+ /// assert!(Version::from_mmp(1, 33, 0).exactly("1.33.0"));
+ /// assert!(Version::from_mmp(1, 35, 1).exactly("1.35.1"));
+ /// assert!(Version::from_mmp(1, 13, 2).exactly("1.13.2"));
+ /// ```
+ pub fn from_mmp(major: u16, minor: u16, patch: u16) -> Version {
+ Version(((major as u64) << 32) | ((minor as u64) << 16) | patch as u64)
+ }
+
+ /// Returns the `(major, minor, patch)` version components of `self`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// assert_eq!(Version::parse("1.35.0").unwrap().to_mmp(), (1, 35, 0));
+ /// assert_eq!(Version::parse("1.33.0").unwrap().to_mmp(), (1, 33, 0));
+ /// assert_eq!(Version::parse("1.35.1").unwrap().to_mmp(), (1, 35, 1));
+ /// assert_eq!(Version::parse("1.13.2").unwrap().to_mmp(), (1, 13, 2));
+ /// ```
+ pub fn to_mmp(&self) -> (u16, u16, u16) {
+ let major = self.0 >> 32;
+ let minor = self.0 >> 16;
+ let patch = self.0;
+ (major as u16, minor as u16, patch as u16)
+ }
+
+ /// Returns `true` if `self` is greater than or equal to `version`.
+ ///
+ /// If `version` is greater than `self`, or if `version` is not a valid
Rust
+ /// version string, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// let version = Version::parse("1.35.0").unwrap();
+ ///
+ /// assert!(version.at_least("1.33.0"));
+ /// assert!(version.at_least("1.35.0"));
+ /// assert!(version.at_least("1.13.2"));
+ ///
+ /// assert!(!version.at_least("1.35.1"));
+ /// assert!(!version.at_least("1.55.0"));
+ ///
+ /// let version = Version::parse("1.12.5").unwrap();
+ ///
+ /// assert!(version.at_least("1.12.0"));
+ /// assert!(!version.at_least("1.35.0"));
+ /// ```
+ pub fn at_least(&self, version: &str) -> bool {
+ Version::parse(version)
+ .map(|version| self >= &version)
+ .unwrap_or(false)
+ }
+
+ /// Returns `true` if `self` is less than or equal to `version`.
+ ///
+ /// If `version` is less than `self`, or if `version` is not a valid Rust
+ /// version string, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// let version = Version::parse("1.35.0").unwrap();
+ ///
+ /// assert!(version.at_most("1.35.1"));
+ /// assert!(version.at_most("1.55.0"));
+ /// assert!(version.at_most("1.35.0"));
+ ///
+ /// assert!(!version.at_most("1.33.0"));
+ /// assert!(!version.at_most("1.13.2"));
+ /// ```
+ pub fn at_most(&self, version: &str) -> bool {
+ Version::parse(version)
+ .map(|version| self <= &version)
+ .unwrap_or(false)
+ }
+
+ /// Returns `true` if `self` is exactly equal to `version`.
+ ///
+ /// If `version` is not equal to `self`, or if `version` is not a valid
Rust
+ /// version string, returns `false`.
+ ///
+ /// # Example
+ ///
+ /// ```rust
+ /// use version_check::Version;
+ ///
+ /// let version = Version::parse("1.35.0").unwrap();
+ ///
+ /// assert!(version.exactly("1.35.0"));
+ ///
+ /// assert!(!version.exactly("1.33.0"));
+ /// assert!(!version.exactly("1.35.1"));
+ /// assert!(!version.exactly("1.13.2"));
+ /// ```
+ pub fn exactly(&self, version: &str) -> bool {
+ Version::parse(version)
+ .map(|version| self == &version)
+ .unwrap_or(false)
+ }
+}
+
+impl fmt::Display for Version {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (major, minor, patch) = self.to_mmp();
+ write!(f, "{}.{}.{}", major, minor, patch)
+ }
+}
+
+impl fmt::Debug for Version {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // We don't use `debug_*` because it's not available in `1.0.0`.
+ write!(f, "Version({:?}, {:?})", self.0, self.to_mmp())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Version;
+
+ macro_rules! assert_to_mmp {
+ // We don't use `.into::<Option<_>>` because it's not available in 1.0.
+ // We don't use the message part of `assert!` for the same reason.
+ ($s:expr, None) => (
+ assert_eq!(Version::parse($s), None);
+ );
+ ($s:expr, $mmp:expr) => (
+ assert_eq!(Version::parse($s).map(|v| v.to_mmp()), Some($mmp));
+ )
+ }
+
+ macro_rules! assert_from_mmp {
+ (($x:expr, $y:expr, $z:expr) => $s:expr) => {
+ assert_eq!(Some(Version::from_mmp($x, $y, $z)),
Version::parse($s));
+ };
+ }
+
+ #[test]
+ fn test_str_to_mmp() {
+ assert_to_mmp!("1", (1, 0, 0));
+ assert_to_mmp!("1.2", (1, 2, 0));
+ assert_to_mmp!("1.18.0", (1, 18, 0));
+ assert_to_mmp!("3.19.0", (3, 19, 0));
+ assert_to_mmp!("1.19.0-nightly", (1, 19, 0));
+ assert_to_mmp!("1.12.2349", (1, 12, 2349));
+ assert_to_mmp!("0.12", (0, 12, 0));
+ assert_to_mmp!("1.12.5", (1, 12, 5));
+ assert_to_mmp!("1.12", (1, 12, 0));
+ assert_to_mmp!("1", (1, 0, 0));
+ assert_to_mmp!("1.4.4-nightly (d84693b93 2017-07-09))", (1, 4, 4));
+ assert_to_mmp!("1.58879.4478-dev", (1, 58879, 4478));
+ assert_to_mmp!("1.58879.4478-dev (d84693b93 2017-07-09))", (1, 58879,
4478));
+ }
+
+ #[test]
+ fn test_malformed() {
+ assert_to_mmp!("1.65536.2", None);
+ assert_to_mmp!("-1.2.3", None);
+ assert_to_mmp!("1. 2", None);
+ assert_to_mmp!("", None);
+ assert_to_mmp!(" ", None);
+ assert_to_mmp!(".", None);
+ assert_to_mmp!("one", None);
+ assert_to_mmp!("1.", None);
+ assert_to_mmp!("1.2.3.4.5.6", None);
+ }
+
+ #[test]
+ fn test_from_mmp() {
+ assert_from_mmp!((1, 18, 0) => "1.18.0");
+ assert_from_mmp!((3, 19, 0) => "3.19.0");
+ assert_from_mmp!((1, 19, 0) => "1.19.0");
+ assert_from_mmp!((1, 12, 2349) => "1.12.2349");
+ assert_from_mmp!((0, 12, 0) => "0.12");
+ assert_from_mmp!((1, 12, 5) => "1.12.5");
+ assert_from_mmp!((1, 12, 0) => "1.12");
+ assert_from_mmp!((1, 0, 0) => "1");
+ assert_from_mmp!((1, 4, 4) => "1.4.4");
+ assert_from_mmp!((1, 58879, 4478) => "1.58879.4478");
+ }
+
+ #[test]
+ fn test_comparisons() {
+ let version = Version::parse("1.18.0").unwrap();
+ assert!(version.exactly("1.18.0"));
+ assert!(version.at_least("1.12.0"));
+ assert!(version.at_least("1.12"));
+ assert!(version.at_least("1"));
+ assert!(version.at_most("1.18.1"));
+ assert!(!version.exactly("1.19.0"));
+ assert!(!version.exactly("1.18.1"));
+
+ let version = Version::parse("1.20.0-nightly").unwrap();
+ assert!(version.exactly("1.20.0-beta"));
+ assert!(version.exactly("1.20.0-nightly"));
+ assert!(version.exactly("1.20.0"));
+ assert!(!version.exactly("1.19"));
+
+ let version = Version::parse("1.3").unwrap();
+ assert!(version.exactly("1.3.0"));
+ assert!(version.exactly("1.3.0-stable"));
+ assert!(version.exactly("1.3"));
+ assert!(!version.exactly("1.5.0-stable"));
+
+ let version = Version::parse("1").unwrap();
+ assert!(version.exactly("1.0.0"));
+ assert!(version.exactly("1.0"));
+ assert!(version.exactly("1"));
+
+ assert!(Version::parse("one.two.three").is_none());
+ }
+
+ macro_rules! reflexive_display {
+ ($s:expr) => (
+ assert_eq!(Version::parse($s).unwrap().to_string(), $s);
+ )
+ }
+
+ #[test]
+ fn display() {
+ reflexive_display!("1.0.0");
+ reflexive_display!("1.2.3");
+ reflexive_display!("1.12.1438");
+ reflexive_display!("1.44.0");
+ reflexive_display!("2.44.0");
+ reflexive_display!("23459.28923.3483");
+ }
+}
--
γαῖα πυρί μιχθήτω
- Re: [RFC PATCH v5 3/8] CI: Add build-system-rust-debian job, (continued)
[RFC PATCH v5 4/8] rust: add bindgen step as a meson dependency, Manos Pitsidianakis, 2024/07/22
[RFC PATCH v5 5/8] .gitattributes: add Rust diff and merge attributes, Manos Pitsidianakis, 2024/07/22
[RFC PATCH v5 6/8] rust: add crate to expose bindings and interfaces, Manos Pitsidianakis, 2024/07/22
[RFC PATCH v5 7/8] rust: add PL011 device model, Manos Pitsidianakis, 2024/07/22
[RFC PATCH v5 8/8] rust/pl011: vendor dependencies,
Manos Pitsidianakis <=
Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Paolo Bonzini, 2024/07/23
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Manos Pitsidianakis, 2024/07/24
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Paolo Bonzini, 2024/07/24
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Manos Pitsidianakis, 2024/07/25
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Paolo Bonzini, 2024/07/25
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Manos Pitsidianakis, 2024/07/25
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Paolo Bonzini, 2024/07/25
- Re: [RFC PATCH v5 0/8] Add Rust support, implement ARM PL011, Manos Pitsidianakis, 2024/07/25