Skip to content

Commit 4641184

Browse files
authored
Don't import Status/State enum variants into the global namespace. (#641)
Signed-off-by: Simon Wülker <[email protected]>
1 parent 6db7bbf commit 4641184

File tree

2 files changed

+42
-48
lines changed

2 files changed

+42
-48
lines changed

html5ever/src/tokenizer/char_ref/mod.rs

Lines changed: 39 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,6 @@ use log::debug;
1616
use std::borrow::Cow::{self, Borrowed};
1717
use std::char::from_u32;
1818

19-
use self::State::*;
20-
pub(super) use self::Status::*;
21-
2219
//§ tokenizing-character-references
2320
pub(super) struct CharRef {
2421
/// The resulting character(s)
@@ -63,7 +60,7 @@ impl CharRefTokenizer {
6360
pub(super) fn new(is_consumed_in_attribute: bool) -> CharRefTokenizer {
6461
CharRefTokenizer {
6562
is_consumed_in_attribute,
66-
state: Begin,
63+
state: State::Begin,
6764
result: None,
6865
num: 0,
6966
num_too_big: false,
@@ -98,15 +95,15 @@ impl CharRefTokenizer {
9895
chars: ['\0', '\0'],
9996
num_chars: 0,
10097
});
101-
Done
98+
Status::Done
10299
}
103100

104101
fn finish_one(&mut self, c: char) -> Status {
105102
self.result = Some(CharRef {
106103
chars: [c, '\0'],
107104
num_chars: 1,
108105
});
109-
Done
106+
Status::Done
110107
}
111108
}
112109

@@ -117,17 +114,17 @@ impl CharRefTokenizer {
117114
input: &BufferQueue,
118115
) -> Status {
119116
if self.result.is_some() {
120-
return Done;
117+
return Status::Done;
121118
}
122119

123120
debug!("char ref tokenizer stepping in state {:?}", self.state);
124121
match self.state {
125-
Begin => self.do_begin(tokenizer, input),
126-
Octothorpe => self.do_octothorpe(tokenizer, input),
127-
Numeric(base) => self.do_numeric(tokenizer, input, base),
128-
NumericSemicolon => self.do_numeric_semicolon(tokenizer, input),
129-
Named => self.do_named(tokenizer, input),
130-
BogusName => self.do_bogus_name(tokenizer, input),
122+
State::Begin => self.do_begin(tokenizer, input),
123+
State::Octothorpe => self.do_octothorpe(tokenizer, input),
124+
State::Numeric(base) => self.do_numeric(tokenizer, input, base),
125+
State::NumericSemicolon => self.do_numeric_semicolon(tokenizer, input),
126+
State::Named => self.do_named(tokenizer, input),
127+
State::BogusName => self.do_bogus_name(tokenizer, input),
131128
}
132129
}
133130

@@ -138,17 +135,17 @@ impl CharRefTokenizer {
138135
) -> Status {
139136
match tokenizer.peek(input) {
140137
Some('a'..='z' | 'A'..='Z' | '0'..='9') => {
141-
self.state = Named;
138+
self.state = State::Named;
142139
self.name_buf_opt = Some(StrTendril::new());
143-
Progress
140+
Status::Progress
144141
},
145142
Some('#') => {
146143
tokenizer.discard_char(input);
147-
self.state = Octothorpe;
148-
Progress
144+
self.state = State::Octothorpe;
145+
Status::Progress
149146
},
150147
Some(_) => self.finish_none(),
151-
None => Stuck,
148+
None => Status::Stuck,
152149
}
153150
}
154151

@@ -161,15 +158,15 @@ impl CharRefTokenizer {
161158
Some(c @ ('x' | 'X')) => {
162159
tokenizer.discard_char(input);
163160
self.hex_marker = Some(c);
164-
self.state = Numeric(16);
161+
self.state = State::Numeric(16);
165162
},
166163
Some(_) => {
167164
self.hex_marker = None;
168-
self.state = Numeric(10);
165+
self.state = State::Numeric(10);
169166
},
170-
None => return Stuck,
167+
None => return Status::Stuck,
171168
}
172-
Progress
169+
Status::Progress
173170
}
174171

175172
fn do_numeric<Sink: TokenSink>(
@@ -179,7 +176,7 @@ impl CharRefTokenizer {
179176
base: u32,
180177
) -> Status {
181178
let Some(c) = tokenizer.peek(input) else {
182-
return Stuck;
179+
return Status::Stuck;
183180
};
184181
match c.to_digit(base) {
185182
Some(n) => {
@@ -192,14 +189,14 @@ impl CharRefTokenizer {
192189
}
193190
self.num = self.num.wrapping_add(n);
194191
self.seen_digit = true;
195-
Progress
192+
Status::Progress
196193
},
197194

198195
None if !self.seen_digit => self.unconsume_numeric(tokenizer, input),
199196

200197
None => {
201-
self.state = NumericSemicolon;
202-
Progress
198+
self.state = State::NumericSemicolon;
199+
Status::Progress
203200
},
204201
}
205202
}
@@ -214,7 +211,7 @@ impl CharRefTokenizer {
214211
Some(_) => tokenizer.emit_error(Borrowed(
215212
"Semicolon missing after numeric character reference",
216213
)),
217-
None => return Stuck,
214+
None => return Status::Stuck,
218215
};
219216
self.finish_numeric(tokenizer)
220217
}
@@ -278,7 +275,7 @@ impl CharRefTokenizer {
278275
// peek + discard skips over newline normalization, therefore making it easier to
279276
// un-consume
280277
let Some(c) = tokenizer.peek(input) else {
281-
return Stuck;
278+
return Status::Stuck;
282279
};
283280
tokenizer.discard_char(input);
284281
self.name_buf_mut().push_char(c);
@@ -291,7 +288,7 @@ impl CharRefTokenizer {
291288
self.name_len = self.name_buf().len();
292289
}
293290
// Otherwise we just have a prefix match.
294-
Progress
291+
Status::Progress
295292
},
296293

297294
// Can't continue the match.
@@ -324,8 +321,8 @@ impl CharRefTokenizer {
324321
Some(c) if c.is_ascii_alphanumeric() => {
325322
// Keep looking for a semicolon, to determine whether
326323
// we emit a parse error.
327-
self.state = BogusName;
328-
return Progress;
324+
self.state = State::BogusName;
325+
return Status::Progress;
329326
},
330327

331328
// Check length because &; is not a parse error.
@@ -390,7 +387,7 @@ impl CharRefTokenizer {
390387
chars: [from_u32(c1).unwrap(), from_u32(c2).unwrap()],
391388
num_chars: if c2 == 0 { 1 } else { 2 },
392389
});
393-
Done
390+
Status::Done
394391
}
395392
},
396393
}
@@ -404,12 +401,12 @@ impl CharRefTokenizer {
404401
// peek + discard skips over newline normalization, therefore making it easier to
405402
// un-consume
406403
let Some(c) = tokenizer.peek(input) else {
407-
return Stuck;
404+
return Status::Stuck;
408405
};
409406
tokenizer.discard_char(input);
410407
self.name_buf_mut().push_char(c);
411408
match c {
412-
_ if c.is_ascii_alphanumeric() => return Progress,
409+
_ if c.is_ascii_alphanumeric() => return Status::Progress,
413410
';' => self.emit_name_error(tokenizer),
414411
_ => (),
415412
}
@@ -424,23 +421,20 @@ impl CharRefTokenizer {
424421
) {
425422
while self.result.is_none() {
426423
match self.state {
427-
Begin => drop(self.finish_none()),
428-
429-
Numeric(_) if !self.seen_digit => drop(self.unconsume_numeric(tokenizer, input)),
430-
431-
Numeric(_) | NumericSemicolon => {
424+
State::Begin => drop(self.finish_none()),
425+
State::Numeric(_) if !self.seen_digit => {
426+
self.unconsume_numeric(tokenizer, input);
427+
},
428+
State::Numeric(_) | State::NumericSemicolon => {
432429
tokenizer.emit_error(Borrowed("EOF in numeric character reference"));
433430
self.finish_numeric(tokenizer);
434431
},
435-
436-
Named => drop(self.finish_named(tokenizer, input, None)),
437-
438-
BogusName => {
432+
State::Named => drop(self.finish_named(tokenizer, input, None)),
433+
State::BogusName => {
439434
self.unconsume_name(input);
440435
self.finish_none();
441436
},
442-
443-
Octothorpe => {
437+
State::Octothorpe => {
444438
input.push_front(StrTendril::from_slice("#"));
445439
tokenizer.emit_error(Borrowed("EOF after '#' in character reference"));
446440
self.finish_none();

html5ever/src/tokenizer/mod.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1666,13 +1666,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
16661666
let outcome = tok.step(self, input);
16671667

16681668
let progress = match outcome {
1669-
char_ref::Done => {
1669+
char_ref::Status::Done => {
16701670
self.process_char_ref(tok.get_result());
16711671
return ProcessResult::Continue;
16721672
},
16731673

1674-
char_ref::Stuck => ProcessResult::Suspend,
1675-
char_ref::Progress => ProcessResult::Continue,
1674+
char_ref::Status::Stuck => ProcessResult::Suspend,
1675+
char_ref::Status::Progress => ProcessResult::Continue,
16761676
};
16771677

16781678
*self.char_ref_tokenizer.borrow_mut() = Some(tok);

0 commit comments

Comments
 (0)