Files
adler32
backtrace
backtrace_sys
base64
bigtable
bitflags
byteorder
bytes
cfg_if
cookie
cookie_store
crc32fast
crossbeam_deque
crossbeam_epoch
crossbeam_queue
crossbeam_utils
curl
curl_sys
dtoa
either
encoding_rs
error_chain
failure
failure_derive
flate2
fnv
foreign_types
foreign_types_shared
futures
futures_cpupool
goauth
h2
http
http_body
httparse
hyper
hyper_tls
idna
indexmap
iovec
itoa
lazy_static
libc
libz_sys
lock_api
log
matches
maybe_uninit
memoffset
mime
mime_guess
miniz_oxide
mio
native_tls
net2
num_cpus
num_traits
openssl
openssl_probe
openssl_sys
parking_lot
parking_lot_core
percent_encoding
proc_macro2
protobuf
protobuf_json
publicsuffix
quote
rand
rand_chacha
rand_core
rand_hc
rand_isaac
rand_jitter
rand_os
rand_pcg
rand_xorshift
regex
regex_syntax
reqwest
rustc_demangle
rustc_serialize
ryu
scopeguard
serde
serde_codegen_internals
serde_derive
serde_json
serde_urlencoded
slab
smallvec
smpl_jwt
socket2
string
syn
synom
synstructure
time
tokio
tokio_buf
tokio_current_thread
tokio_executor
tokio_io
tokio_reactor
tokio_sync
tokio_tcp
tokio_threadpool
tokio_timer
try_from
try_lock
unicase
unicode_bidi
unicode_normalization
unicode_xid
url
uuid
want
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
//! Extensions to the parsing API with niche applicability.

use super::*;

/// Extensions to the `ParseStream` API to support speculative parsing.
pub trait Speculative {
    /// Advance this parse stream to the position of a forked parse stream.
    ///
    /// This is the opposite operation to [`ParseStream::fork`]. You can fork a
    /// parse stream, perform some speculative parsing, then join the original
    /// stream to the fork to "commit" the parsing from the fork to the main
    /// stream.
    ///
    /// If you can avoid doing this, you should, as it limits the ability to
    /// generate useful errors. That said, it is often the only way to parse
    /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
    /// is that when the fork fails to parse an `A`, it's impossible to tell
    /// whether that was because of a syntax error and the user meant to provide
    /// an `A`, or that the `A`s are finished and its time to start parsing
    /// `B`s. Use with care.
    ///
    /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
    /// parsing `B*` and removing the leading members of `A` from the
    /// repetition, bypassing the need to involve the downsides associated with
    /// speculative parsing.
    ///
    /// [`ParseStream::fork`]: ParseBuffer::fork
    ///
    /// # Example
    ///
    /// There has been chatter about the possibility of making the colons in the
    /// turbofish syntax like `path::to::<T>` no longer required by accepting
    /// `path::to<T>` in expression position. Specifically, according to [RFC
    /// 2544], [`PathSegment`] parsing should always try to consume a following
    /// `<` token as the start of generic arguments, and reset to the `<` if
    /// that fails (e.g. the token is acting as a less-than operator).
    ///
    /// This is the exact kind of parsing behavior which requires the "fork,
    /// try, commit" behavior that [`ParseStream::fork`] discourages. With
    /// `advance_to`, we can avoid having to parse the speculatively parsed
    /// content a second time.
    ///
    /// This change in behavior can be implemented in syn by replacing just the
    /// `Parse` implementation for `PathSegment`:
    ///
    /// ```
    /// # use syn::ext::IdentExt;
    /// use syn::parse::discouraged::Speculative;
    /// # use syn::parse::{Parse, ParseStream};
    /// # use syn::{Ident, PathArguments, Result, Token};
    ///
    /// pub struct PathSegment {
    ///     pub ident: Ident,
    ///     pub arguments: PathArguments,
    /// }
    /// #
    /// # impl<T> From<T> for PathSegment
    /// # where
    /// #     T: Into<Ident>,
    /// # {
    /// #     fn from(ident: T) -> Self {
    /// #         PathSegment {
    /// #             ident: ident.into(),
    /// #             arguments: PathArguments::None,
    /// #         }
    /// #     }
    /// # }
    ///
    /// impl Parse for PathSegment {
    ///     fn parse(input: ParseStream) -> Result<Self> {
    ///         if input.peek(Token![super])
    ///             || input.peek(Token![self])
    ///             || input.peek(Token![Self])
    ///             || input.peek(Token![crate])
    ///             || input.peek(Token![extern])
    ///         {
    ///             let ident = input.call(Ident::parse_any)?;
    ///             return Ok(PathSegment::from(ident));
    ///         }
    ///
    ///         let ident = input.parse()?;
    ///         if input.peek(Token![::]) && input.peek3(Token![<]) {
    ///             return Ok(PathSegment {
    ///                 ident,
    ///                 arguments: PathArguments::AngleBracketed(input.parse()?),
    ///             });
    ///         }
    ///         if input.peek(Token![<]) && !input.peek(Token![<=]) {
    ///             let fork = input.fork();
    ///             if let Ok(arguments) = fork.parse() {
    ///                 input.advance_to(&fork);
    ///                 return Ok(PathSegment {
    ///                     ident,
    ///                     arguments: PathArguments::AngleBracketed(arguments),
    ///                 });
    ///             }
    ///         }
    ///         Ok(PathSegment::from(ident))
    ///     }
    /// }
    ///
    /// # syn::parse_str::<PathSegment>("a<b,c>").unwrap();
    /// ```
    ///
    /// # Drawbacks
    ///
    /// The main drawback of this style of speculative parsing is in error
    /// presentation. Even if the lookahead is the "correct" parse, the error
    /// that is shown is that of the "fallback" parse. To use the same example
    /// as the turbofish above, take the following unfinished "turbofish":
    ///
    /// ```text
    /// let _ = f<&'a fn(), for<'a> serde::>();
    /// ```
    ///
    /// If this is parsed as generic arguments, we can provide the error message
    ///
    /// ```text
    /// error: expected identifier
    ///  --> src.rs:L:C
    ///   |
    /// L | let _ = f<&'a fn(), for<'a> serde::>();
    ///   |                                    ^
    /// ```
    ///
    /// but if parsed using the above speculative parsing, it falls back to
    /// assuming that the `<` is a less-than when it fails to parse the generic
    /// arguments, and tries to interpret the `&'a` as the start of a labelled
    /// loop, resulting in the much less helpful error
    ///
    /// ```text
    /// error: expected `:`
    ///  --> src.rs:L:C
    ///   |
    /// L | let _ = f<&'a fn(), for<'a> serde::>();
    ///   |               ^^
    /// ```
    ///
    /// This can be mitigated with various heuristics (two examples: show both
    /// forks' parse errors, or show the one that consumed more tokens), but
    /// when you can control the grammar, sticking to something that can be
    /// parsed LL(3) and without the LL(*) speculative parsing this makes
    /// possible, displaying reasonable errors becomes much more simple.
    ///
    /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544
    /// [`PathSegment`]: crate::PathSegment
    ///
    /// # Performance
    ///
    /// This method performs a cheap fixed amount of work that does not depend
    /// on how far apart the two streams are positioned.
    ///
    /// # Panics
    ///
    /// The forked stream in the argument of `advance_to` must have been
    /// obtained by forking `self`. Attempting to advance to any other stream
    /// will cause a panic.
    fn advance_to(&self, fork: &Self);
}

impl<'a> Speculative for ParseBuffer<'a> {
    fn advance_to(&self, fork: &Self) {
        if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
            panic!("Fork was not derived from the advancing parse stream");
        }

        let (self_unexp, self_sp) = inner_unexpected(self);
        let (fork_unexp, fork_sp) = inner_unexpected(fork);
        if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
            match (fork_sp, self_sp) {
                // Unexpected set on the fork, but not on `self`, copy it over.
                (Some(span), None) => {
                    self_unexp.set(Unexpected::Some(span));
                }
                // Unexpected unset. Use chain to propagate errors from fork.
                (None, None) => {
                    fork_unexp.set(Unexpected::Chain(self_unexp));

                    // Ensure toplevel 'unexpected' tokens from the fork don't
                    // bubble up the chain by replacing the root `unexpected`
                    // pointer, only 'unexpected' tokens from existing group
                    // parsers should bubble.
                    fork.unexpected
                        .set(Some(Rc::new(Cell::new(Unexpected::None))));
                }
                // Unexpected has been set on `self`. No changes needed.
                (_, Some(_)) => {}
            }
        }

        // See comment on `cell` in the struct definition.
        self.cell
            .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
    }
}