summaryrefslogtreecommitdiffstats
path: root/vendor/html5ever/examples/noop-tokenize.rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /vendor/html5ever/examples/noop-tokenize.rs
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/html5ever/examples/noop-tokenize.rs')
-rw-r--r--vendor/html5ever/examples/noop-tokenize.rs43
1 files changed, 43 insertions, 0 deletions
diff --git a/vendor/html5ever/examples/noop-tokenize.rs b/vendor/html5ever/examples/noop-tokenize.rs
new file mode 100644
index 000000000..d6c62f1dd
--- /dev/null
+++ b/vendor/html5ever/examples/noop-tokenize.rs
@@ -0,0 +1,43 @@
+// Copyright 2014-2017 The html5ever Project Developers. See the
+// COPYRIGHT file at the top-level directory of this distribution.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Run a single benchmark once. For use with profiling tools.
+
+extern crate html5ever;
+
+use std::default::Default;
+use std::io;
+
+use html5ever::tendril::*;
+use html5ever::tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer};
+
+struct Sink(Vec<Token>);
+
+impl TokenSink for Sink {
+ type Handle = ();
+
+ fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
+ // Don't use the token, but make sure we don't get
+ // optimized out entirely.
+ self.0.push(token);
+ TokenSinkResult::Continue
+ }
+}
+
+fn main() {
+ let mut chunk = ByteTendril::new();
+ io::stdin().read_to_tendril(&mut chunk).unwrap();
+ let mut input = BufferQueue::new();
+ input.push_back(chunk.try_reinterpret().unwrap());
+
+ let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());
+ let _ = tok.feed(&mut input);
+ assert!(input.is_empty());
+ tok.end();
+}