Files
aho_corasick
atty
beef
bitflags
bstr
byteorder
cfg_if
clap
clap_derive
codespan
codespan_reporting
crc_any
crypto_hash
csv
csv_core
debug_helper
filepath
fixed
fixed_macro
fixed_macro_impl
fixed_macro_types
fnv
foreign_types
foreign_types_shared
getrandom
glob
hashbrown
heck
hex
indexmap
itoa
lazy_static
libc
linked_hash_map
linked_hash_set
logos
logos_derive
lrl_test_compiler
maplit
memchr
memoffset
once_cell
openssl
openssl_sys
os_str_bytes
paste
pest
pest_derive
pest_generator
pest_meta
phf
phf_generator
phf_macros
phf_shared
ppv_lite86
proc_macro2
proc_macro_error
proc_macro_error_attr
proc_macro_hack
quote
rand
rand_chacha
rand_core
regex
regex_automata
regex_syntax
remove_dir_all
ring
rowan
rustc_hash
ryu
semver
semver_parser
serde
serde_derive
serde_json
siphasher
smallvec
smawk
smol_str
spin
stable_deref_trait
strsim
syn
taplo
tempfile
termcolor
text_size
textwrap
toml
triomphe
typenum
ucd_trie
unicode_linebreak
unicode_segmentation
unicode_width
unicode_xid
untrusted
utf8_ranges
vec_map
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
use std::cmp::max;

use proc_macro2::TokenStream;
use quote::quote;
use fnv::FnvHashMap as Map;

use crate::graph::{NodeId, Fork, Range};
use crate::generator::{Generator, Context};
use crate::util::ToIdent;

type Targets = Map<NodeId, Vec<Range>>;

impl<'a> Generator<'a> {
    pub fn generate_fork(&mut self, this: NodeId, fork: &Fork, mut ctx: Context) -> TokenStream {
        let mut targets: Targets = Map::default();

        for (range, then) in fork.branches() {
            targets.entry(then).or_default().push(range);
        }
        let loops_to_self = self.meta[this].loop_entry_from.contains(&this);

        match targets.len() {
            1 if loops_to_self => return self.generate_fast_loop(fork, ctx),
            0..=2 => (),
            _ => return self.generate_fork_jump_table(this, fork, targets, ctx),
        }
        let miss = ctx.miss(fork.miss, self);
        let end = self.fork_end(this, &miss);
        let (byte, read) = self.fork_read(this, end, &mut ctx);
        let branches = targets.into_iter().map(|(id, ranges)| {
            let next = self.goto(id, ctx.advance(1));

            match *ranges {
                [range] => {
                    quote!(#range => #next,)
                },
                [a, b] if a.is_byte() && b.is_byte() => {
                    quote!(#a | #b => #next,)
                },
                _ => {
                    let test = self.generate_test(ranges).clone();
                    let next = self.goto(id, ctx.advance(1));

                    quote!(byte if #test(byte) => #next,)
                },
            }
        });

        quote! {
            #read

            match #byte {
                #(#branches)*
                _ => #miss,
            }
        }
    }

    fn generate_fork_jump_table(&mut self, this: NodeId, fork: &Fork, targets: Targets, mut ctx: Context) -> TokenStream {
        let miss = ctx.miss(fork.miss, self);
        let end = self.fork_end(this, &miss);
        let (byte, read) = self.fork_read(this, end, &mut ctx);

        let mut table: [u8; 256] = [0; 256];
        let mut jumps = vec!["__".to_ident()];

        let branches = targets.into_iter().enumerate().map(|(idx, (id, ranges))| {
            let idx = (idx as u8) + 1;
            let next = self.goto(id, ctx.advance(1));
            jumps.push(format!("J{}", id).to_ident());

            for byte in ranges.into_iter().flatten() {
                table[byte as usize] = idx;
            }
            let jump = jumps.last().unwrap();

            quote!(Jump::#jump => #next,)
        }).collect::<TokenStream>();

        let jumps = &jumps;
        let table = table.iter().copied().map(|idx| &jumps[idx as usize]);

        quote! {
            enum Jump {
                #(#jumps,)*
            }

            const LUT: [Jump; 256] = {
                use Jump::*;

                [#(#table),*]
            };

            #read

            match LUT[#byte as usize] {
                #branches
                Jump::__ => #miss,
            }
        }
    }

    fn fork_end(&self, this: NodeId, miss: &TokenStream) -> TokenStream {
        if this == self.root {
            quote!(_end(lex))
        } else {
            miss.clone()
        }
    }

    fn fork_read(&self, this: NodeId, end: TokenStream, ctx: &mut Context) -> (TokenStream, TokenStream) {
        let min_read = self.meta[this].min_read;

        if ctx.remainder() >= max(min_read, 1) {
            let read = ctx.read_unchecked(0);

            return (
                quote!(byte),
                quote!(let byte = unsafe { #read };),
            );
        }

        match min_read {
            0 | 1 => {
                let read = ctx.read(0);

                (
                    quote!(byte),
                    quote! {
                        let byte = match #read {
                            Some(byte) => byte,
                            None => return #end,
                        };
                    },
                )
            },
            len => {
                let read = ctx.read(len);

                (
                    quote!(arr[0]),
                    quote! {
                        let arr = match #read {
                            Some(arr) => arr,
                            None => return #end,
                        };
                    },
                )
            },
        }
    }

    fn generate_fast_loop(&mut self, fork: &Fork, ctx: Context) -> TokenStream {
        let miss = ctx.miss(fork.miss, self);
        let ranges = fork.branches().map(|(range, _)| range).collect::<Vec<_>>();
        let test = self.generate_test(ranges);

        quote! {
            _fast_loop!(lex, #test, #miss);
        }
    }

    pub fn fast_loop_macro() -> TokenStream {
        quote! {
            macro_rules! _fast_loop {
                ($lex:ident, $test:ident, $miss:expr) => {
                    // Do one bounds check for multiple bytes till EOF
                    while let Some(arr) = $lex.read::<&[u8; 16]>() {
                        if $test(arr[0])  { if $test(arr[1])  { if $test(arr[2])  { if $test(arr[3]) {
                        if $test(arr[4])  { if $test(arr[5])  { if $test(arr[6])  { if $test(arr[7]) {
                        if $test(arr[8])  { if $test(arr[9])  { if $test(arr[10]) { if $test(arr[11]) {
                        if $test(arr[12]) { if $test(arr[13]) { if $test(arr[14]) { if $test(arr[15]) {

                        $lex.bump_unchecked(16); continue;     } $lex.bump_unchecked(15); return $miss; }
                        $lex.bump_unchecked(14); return $miss; } $lex.bump_unchecked(13); return $miss; }
                        $lex.bump_unchecked(12); return $miss; } $lex.bump_unchecked(11); return $miss; }
                        $lex.bump_unchecked(10); return $miss; } $lex.bump_unchecked(9); return $miss;  }
                        $lex.bump_unchecked(8); return $miss;  } $lex.bump_unchecked(7); return $miss;  }
                        $lex.bump_unchecked(6); return $miss;  } $lex.bump_unchecked(5); return $miss;  }
                        $lex.bump_unchecked(4); return $miss;  } $lex.bump_unchecked(3); return $miss;  }
                        $lex.bump_unchecked(2); return $miss;  } $lex.bump_unchecked(1); return $miss;  }

                        return $miss;
                    }

                    while $lex.test($test) {
                        $lex.bump_unchecked(1);
                    }

                    $miss
                };
            }
        }
    }
}