summaryrefslogtreecommitdiffstats
path: root/servo/components/style/rule_tree
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /servo/components/style/rule_tree
parentInitial commit. (diff)
downloadfirefox-e51783d008170d9ab27d25da98ca3a38b0a41b67.tar.xz
firefox-e51783d008170d9ab27d25da98ca3a38b0a41b67.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'servo/components/style/rule_tree')
-rw-r--r--servo/components/style/rule_tree/core.rs772
-rw-r--r--servo/components/style/rule_tree/level.rs249
-rw-r--r--servo/components/style/rule_tree/map.rs201
-rw-r--r--servo/components/style/rule_tree/mod.rs403
-rw-r--r--servo/components/style/rule_tree/source.rs75
-rw-r--r--servo/components/style/rule_tree/unsafe_box.rs74
6 files changed, 1774 insertions, 0 deletions
diff --git a/servo/components/style/rule_tree/core.rs b/servo/components/style/rule_tree/core.rs
new file mode 100644
index 0000000000..85584a0e22
--- /dev/null
+++ b/servo/components/style/rule_tree/core.rs
@@ -0,0 +1,772 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![allow(unsafe_code)]
+
+use crate::applicable_declarations::CascadePriority;
+use crate::shared_lock::StylesheetGuards;
+use crate::stylesheets::layer_rule::LayerOrder;
+use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
+use parking_lot::RwLock;
+use smallvec::SmallVec;
+use std::fmt;
+use std::hash;
+use std::io::Write;
+use std::mem;
+use std::ptr;
+use std::sync::atomic::{self, AtomicPtr, AtomicUsize, Ordering};
+
+use super::map::{Entry, Map};
+use super::unsafe_box::UnsafeBox;
+use super::{CascadeLevel, StyleSource};
+
+/// The rule tree, the structure servo uses to preserve the results of selector
+/// matching.
+///
+/// This is organized as a tree of rules. When a node matches a set of rules,
+/// they're inserted in order in the tree, starting with the less specific one.
+///
+/// When a rule is inserted in the tree, other elements may share the path up to
+/// a given rule. If that's the case, we don't duplicate child nodes, but share
+/// them.
+///
+/// When the rule node refcount drops to zero, it doesn't get freed. It gets
+/// instead put into a free list, and it is potentially GC'd after a while.
+///
+/// That way, a rule node that represents a likely-to-match-again rule (like a
+/// :hover rule) can be reused if we haven't GC'd it yet.
+#[derive(Debug)]
+pub struct RuleTree {
+ root: StrongRuleNode,
+}
+
+impl Drop for RuleTree {
+ fn drop(&mut self) {
+ unsafe { self.swap_free_list_and_gc(ptr::null_mut()) }
+ }
+}
+
+impl MallocSizeOf for RuleTree {
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ let mut n = 0;
+ let mut stack = SmallVec::<[_; 32]>::new();
+ stack.push(self.root.clone());
+
+ while let Some(node) = stack.pop() {
+ n += unsafe { ops.malloc_size_of(&*node.p) };
+ let children = node.p.children.read();
+ children.shallow_size_of(ops);
+ for c in &*children {
+ stack.push(unsafe { c.upgrade() });
+ }
+ }
+
+ n
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+struct ChildKey(CascadePriority, ptr::NonNull<()>);
+unsafe impl Send for ChildKey {}
+unsafe impl Sync for ChildKey {}
+
+impl RuleTree {
+ /// Construct a new rule tree.
+ pub fn new() -> Self {
+ RuleTree {
+ root: StrongRuleNode::new(Box::new(RuleNode::root())),
+ }
+ }
+
+ /// Get the root rule node.
+ pub fn root(&self) -> &StrongRuleNode {
+ &self.root
+ }
+
+ /// This can only be called when no other threads is accessing this tree.
+ pub fn gc(&self) {
+ unsafe { self.swap_free_list_and_gc(RuleNode::DANGLING_PTR) }
+ }
+
+ /// This can only be called when no other threads is accessing this tree.
+ pub fn maybe_gc(&self) {
+ #[cfg(debug_assertions)]
+ self.maybe_dump_stats();
+
+ if self.root.p.approximate_free_count.load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
+ self.gc();
+ }
+ }
+
+ #[cfg(debug_assertions)]
+ fn maybe_dump_stats(&self) {
+ use itertools::Itertools;
+ use std::cell::Cell;
+ use std::time::{Duration, Instant};
+
+ if !log_enabled!(log::Level::Trace) {
+ return;
+ }
+
+ const RULE_TREE_STATS_INTERVAL: Duration = Duration::from_secs(2);
+
+ thread_local! {
+ pub static LAST_STATS: Cell<Instant> = Cell::new(Instant::now());
+ };
+
+ let should_dump = LAST_STATS.with(|s| {
+ let now = Instant::now();
+ if now.duration_since(s.get()) < RULE_TREE_STATS_INTERVAL {
+ return false;
+ }
+ s.set(now);
+ true
+ });
+
+ if !should_dump {
+ return;
+ }
+
+ let mut children_count = fxhash::FxHashMap::default();
+
+ let mut stack = SmallVec::<[_; 32]>::new();
+ stack.push(self.root.clone());
+ while let Some(node) = stack.pop() {
+ let children = node.p.children.read();
+ *children_count.entry(children.len()).or_insert(0) += 1;
+ for c in &*children {
+ stack.push(unsafe { c.upgrade() });
+ }
+ }
+
+ trace!("Rule tree stats:");
+ let counts = children_count.keys().sorted();
+ for count in counts {
+ trace!(" {} - {}", count, children_count[count]);
+ }
+ }
+
+ /// Steals the free list and drops its contents.
+ unsafe fn swap_free_list_and_gc(&self, ptr: *mut RuleNode) {
+ let root = &self.root.p;
+
+ debug_assert!(!root.next_free.load(Ordering::Relaxed).is_null());
+
+ // Reset the approximate free count to zero, as we are going to steal
+ // the free list.
+ root.approximate_free_count.store(0, Ordering::Relaxed);
+
+ // Steal the free list head. Memory loads on nodes while iterating it
+ // must observe any prior changes that occured so this requires
+ // acquire ordering, but there are no writes that need to be kept
+ // before this swap so there is no need for release.
+ let mut head = root.next_free.swap(ptr, Ordering::Acquire);
+
+ while head != RuleNode::DANGLING_PTR {
+ debug_assert!(!head.is_null());
+
+ let mut node = UnsafeBox::from_raw(head);
+
+ // The root node cannot go on the free list.
+ debug_assert!(node.root.is_some());
+
+ // The refcount of nodes on the free list never goes below 1.
+ debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
+
+ // No one else is currently writing to that field. Get the address
+ // of the next node in the free list and replace it with null,
+ // other threads will now consider that this node is not on the
+ // free list.
+ head = node.next_free.swap(ptr::null_mut(), Ordering::Relaxed);
+
+ // This release write synchronises with the acquire fence in
+ // `WeakRuleNode::upgrade`, making sure that if `upgrade` observes
+ // decrements the refcount to 0, it will also observe the
+ // `node.next_free` swap to null above.
+ if node.refcount.fetch_sub(1, Ordering::Release) == 1 {
+ // And given it observed the null swap above, it will need
+ // `pretend_to_be_on_free_list` to finish its job, writing
+ // `RuleNode::DANGLING_PTR` in `node.next_free`.
+ RuleNode::pretend_to_be_on_free_list(&node);
+
+ // Drop this node now that we just observed its refcount going
+ // down to zero.
+ RuleNode::drop_without_free_list(&mut node);
+ }
+ }
+ }
+}
+
+/// The number of RuleNodes added to the free list before we will consider
+/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
+/// where it likely did not result from a rigorous performance analysis.)
+const RULE_TREE_GC_INTERVAL: usize = 300;
+
+/// A node in the rule tree.
+struct RuleNode {
+ /// The root node. Only the root has no root pointer, for obvious reasons.
+ root: Option<WeakRuleNode>,
+
+ /// The parent rule node. Only the root has no parent.
+ parent: Option<StrongRuleNode>,
+
+ /// The actual style source, either coming from a selector in a StyleRule,
+ /// or a raw property declaration block (like the style attribute).
+ ///
+ /// None for the root node.
+ source: Option<StyleSource>,
+
+ /// The cascade level + layer order this rule is positioned at.
+ cascade_priority: CascadePriority,
+
+ /// The refcount of this node.
+ ///
+ /// Starts at one. Incremented in `StrongRuleNode::clone` and
+ /// `WeakRuleNode::upgrade`. Decremented in `StrongRuleNode::drop`
+ /// and `RuleTree::swap_free_list_and_gc`.
+ ///
+ /// If a non-root node's refcount reaches zero, it is incremented back to at
+ /// least one in `RuleNode::pretend_to_be_on_free_list` until the caller who
+ /// observed it dropping to zero had a chance to try to remove it from its
+ /// parent's children list.
+ ///
+ /// The refcount should never be decremented to zero if the value in
+ /// `next_free` is not null.
+ refcount: AtomicUsize,
+
+ /// Only used for the root, stores the number of free rule nodes that are
+ /// around.
+ approximate_free_count: AtomicUsize,
+
+ /// The children of a given rule node. Children remove themselves from here
+ /// when they go away.
+ children: RwLock<Map<ChildKey, WeakRuleNode>>,
+
+ /// This field has two different meanings depending on whether this is the
+ /// root node or not.
+ ///
+ /// If it is the root, it represents the head of the free list. It may be
+ /// null, which means the free list is gone because the tree was dropped,
+ /// and it may be `RuleNode::DANGLING_PTR`, which means the free list is
+ /// empty.
+ ///
+ /// If it is not the root node, this field is either null if the node is
+ /// not on the free list, `RuleNode::DANGLING_PTR` if it is the last item
+ /// on the free list or the node is pretending to be on the free list, or
+ /// any valid non-null pointer representing the next item on the free list
+ /// after this one.
+ ///
+ /// See `RuleNode::push_on_free_list`, `swap_free_list_and_gc`, and
+ /// `WeakRuleNode::upgrade`.
+ ///
+ /// Two threads should never attempt to put the same node on the free list
+ /// both at the same time.
+ next_free: AtomicPtr<RuleNode>,
+}
+
+// On Gecko builds, hook into the leak checking machinery.
+#[cfg(feature = "gecko_refcount_logging")]
+mod gecko_leak_checking {
+ use super::RuleNode;
+ use std::mem::size_of;
+ use std::os::raw::{c_char, c_void};
+
+ extern "C" {
+ fn NS_LogCtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
+ fn NS_LogDtor(aPtr: *mut c_void, aTypeName: *const c_char, aSize: u32);
+ }
+ static NAME: &'static [u8] = b"RuleNode\0";
+
+ /// Logs the creation of a heap-allocated object to Gecko's leak-checking machinery.
+ pub(super) fn log_ctor(ptr: *const RuleNode) {
+ let s = NAME as *const [u8] as *const u8 as *const c_char;
+ unsafe {
+ NS_LogCtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
+ }
+ }
+
+ /// Logs the destruction of a heap-allocated object to Gecko's leak-checking machinery.
+ pub(super) fn log_dtor(ptr: *const RuleNode) {
+ let s = NAME as *const [u8] as *const u8 as *const c_char;
+ unsafe {
+ NS_LogDtor(ptr as *mut c_void, s, size_of::<RuleNode>() as u32);
+ }
+ }
+}
+
+#[inline(always)]
+fn log_new(_ptr: *const RuleNode) {
+ #[cfg(feature = "gecko_refcount_logging")]
+ gecko_leak_checking::log_ctor(_ptr);
+}
+
+#[inline(always)]
+fn log_drop(_ptr: *const RuleNode) {
+ #[cfg(feature = "gecko_refcount_logging")]
+ gecko_leak_checking::log_dtor(_ptr);
+}
+
+impl RuleNode {
+ const DANGLING_PTR: *mut Self = ptr::NonNull::dangling().as_ptr();
+
+ unsafe fn new(
+ root: WeakRuleNode,
+ parent: StrongRuleNode,
+ source: StyleSource,
+ cascade_priority: CascadePriority,
+ ) -> Self {
+ debug_assert!(root.p.parent.is_none());
+ RuleNode {
+ root: Some(root),
+ parent: Some(parent),
+ source: Some(source),
+ cascade_priority,
+ refcount: AtomicUsize::new(1),
+ children: Default::default(),
+ approximate_free_count: AtomicUsize::new(0),
+ next_free: AtomicPtr::new(ptr::null_mut()),
+ }
+ }
+
+ fn root() -> Self {
+ RuleNode {
+ root: None,
+ parent: None,
+ source: None,
+ cascade_priority: CascadePriority::new(CascadeLevel::UANormal, LayerOrder::root()),
+ refcount: AtomicUsize::new(1),
+ approximate_free_count: AtomicUsize::new(0),
+ children: Default::default(),
+ next_free: AtomicPtr::new(RuleNode::DANGLING_PTR),
+ }
+ }
+
+ fn key(&self) -> ChildKey {
+ ChildKey(
+ self.cascade_priority,
+ self.source
+ .as_ref()
+ .expect("Called key() on the root node")
+ .key(),
+ )
+ }
+
+ /// Drops a node without ever putting it on the free list.
+ ///
+ /// Note that the node may not be dropped if we observe that its refcount
+ /// isn't zero anymore when we write-lock its parent's children map to
+ /// remove it.
+ ///
+ /// This loops over parents of dropped nodes if their own refcount reaches
+ /// zero to avoid recursion when dropping deep hierarchies of nodes.
+ ///
+ /// For non-root nodes, this should always be preceded by a call of
+ /// `RuleNode::pretend_to_be_on_free_list`.
+ unsafe fn drop_without_free_list(this: &mut UnsafeBox<Self>) {
+ // We clone the box and shadow the original one to be able to loop
+ // over its ancestors if they also need to be dropped.
+ let mut this = UnsafeBox::clone(this);
+ loop {
+ // If the node has a parent, we need to remove it from its parent's
+ // children list.
+ if let Some(parent) = this.parent.as_ref() {
+ debug_assert!(!this.next_free.load(Ordering::Relaxed).is_null());
+
+ // We lock the parent's children list, which means no other
+ // thread will have any more opportunity to resurrect the node
+ // anymore.
+ let mut children = parent.p.children.write();
+
+ this.next_free.store(ptr::null_mut(), Ordering::Relaxed);
+
+ // We decrement the counter to remove the "pretend to be
+ // on the free list" reference.
+ let old_refcount = this.refcount.fetch_sub(1, Ordering::Release);
+ debug_assert!(old_refcount != 0);
+ if old_refcount != 1 {
+ // Other threads resurrected this node and those references
+ // are still alive, we have nothing to do anymore.
+ return;
+ }
+
+ // We finally remove the node from its parent's children list,
+ // there are now no other references to it and it cannot
+ // be resurrected anymore even after we unlock the list.
+ debug!(
+ "Remove from child list: {:?}, parent: {:?}",
+ this.as_mut_ptr(),
+ this.parent.as_ref().map(|p| p.p.as_mut_ptr())
+ );
+ let weak = children.remove(&this.key(), |node| node.p.key()).unwrap();
+ assert_eq!(weak.p.as_mut_ptr(), this.as_mut_ptr());
+ } else {
+ debug_assert_eq!(this.next_free.load(Ordering::Relaxed), ptr::null_mut());
+ debug_assert_eq!(this.refcount.load(Ordering::Relaxed), 0);
+ }
+
+ // We are going to drop this node for good this time, as per the
+ // usual refcounting protocol we need an acquire fence here before
+ // we run the destructor.
+ //
+ // See https://github.com/rust-lang/rust/pull/41714#issuecomment-298996916
+ // for why it doesn't matter whether this is a load or a fence.
+ atomic::fence(Ordering::Acquire);
+
+ // Remove the parent reference from the child to avoid
+ // recursively dropping it and putting it on the free list.
+ let parent = UnsafeBox::deref_mut(&mut this).parent.take();
+
+ // We now drop the actual box and its contents, no one should
+ // access the current value in `this` anymore.
+ log_drop(&*this);
+ UnsafeBox::drop(&mut this);
+
+ if let Some(parent) = parent {
+ // We will attempt to drop the node's parent without the free
+ // list, so we clone the inner unsafe box and forget the
+ // original parent to avoid running its `StrongRuleNode`
+ // destructor which would attempt to use the free list if it
+ // still exists.
+ this = UnsafeBox::clone(&parent.p);
+ mem::forget(parent);
+ if this.refcount.fetch_sub(1, Ordering::Release) == 1 {
+ debug_assert_eq!(this.next_free.load(Ordering::Relaxed), ptr::null_mut());
+ if this.root.is_some() {
+ RuleNode::pretend_to_be_on_free_list(&this);
+ }
+ // Parent also reached refcount zero, we loop to drop it.
+ continue;
+ }
+ }
+
+ return;
+ }
+ }
+
+ /// Pushes this node on the tree's free list. Returns false if the free list
+ /// is gone. Should only be called after we decremented a node's refcount
+ /// to zero and pretended to be on the free list.
+ unsafe fn push_on_free_list(this: &UnsafeBox<Self>) -> bool {
+ let root = &this.root.as_ref().unwrap().p;
+
+ debug_assert!(this.refcount.load(Ordering::Relaxed) > 0);
+ debug_assert_eq!(this.next_free.load(Ordering::Relaxed), Self::DANGLING_PTR);
+
+ // Increment the approximate free count by one.
+ root.approximate_free_count.fetch_add(1, Ordering::Relaxed);
+
+ // If the compare-exchange operation fails in the loop, we will retry
+ // with the new head value, so this can be a relaxed load.
+ let mut head = root.next_free.load(Ordering::Relaxed);
+
+ while !head.is_null() {
+ // Two threads can never attempt to push the same node on the free
+ // list both at the same time, so whoever else pushed a node on the
+ // free list cannot have done so with this node.
+ debug_assert_ne!(head, this.as_mut_ptr());
+
+ // Store the current head of the free list in this node.
+ this.next_free.store(head, Ordering::Relaxed);
+
+ // Any thread acquiring the free list must observe the previous
+ // next_free changes that occured, hence the release ordering
+ // on success.
+ match root.next_free.compare_exchange_weak(
+ head,
+ this.as_mut_ptr(),
+ Ordering::Release,
+ Ordering::Relaxed,
+ ) {
+ Ok(_) => {
+ // This node is now on the free list, caller should not use
+ // the node anymore.
+ return true;
+ },
+ Err(new_head) => head = new_head,
+ }
+ }
+
+ // Tree was dropped and free list has been destroyed. We did not push
+ // this node on the free list but we still pretend to be on the free
+ // list to be ready to call `drop_without_free_list`.
+ false
+ }
+
+ /// Makes the node pretend to be on the free list. This will increment the
+ /// refcount by 1 and store `Self::DANGLING_PTR` in `next_free`. This
+ /// method should only be called after caller decremented the refcount to
+ /// zero, with the null pointer stored in `next_free`.
+ unsafe fn pretend_to_be_on_free_list(this: &UnsafeBox<Self>) {
+ debug_assert_eq!(this.next_free.load(Ordering::Relaxed), ptr::null_mut());
+ this.refcount.fetch_add(1, Ordering::Relaxed);
+ this.next_free.store(Self::DANGLING_PTR, Ordering::Release);
+ }
+
+ fn as_mut_ptr(&self) -> *mut RuleNode {
+ self as *const RuleNode as *mut RuleNode
+ }
+}
+
+pub(crate) struct WeakRuleNode {
+ p: UnsafeBox<RuleNode>,
+}
+
+/// A strong reference to a rule node.
+pub struct StrongRuleNode {
+ p: UnsafeBox<RuleNode>,
+}
+
+#[cfg(feature = "servo")]
+malloc_size_of_is_0!(StrongRuleNode);
+
+impl StrongRuleNode {
+ fn new(n: Box<RuleNode>) -> Self {
+ debug_assert_eq!(n.parent.is_none(), !n.source.is_some());
+
+ log_new(&*n);
+
+ debug!("Creating rule node: {:p}", &*n);
+
+ Self {
+ p: UnsafeBox::from_box(n),
+ }
+ }
+
+ unsafe fn from_unsafe_box(p: UnsafeBox<RuleNode>) -> Self {
+ Self { p }
+ }
+
+ unsafe fn downgrade(&self) -> WeakRuleNode {
+ WeakRuleNode {
+ p: UnsafeBox::clone(&self.p),
+ }
+ }
+
+ /// Get the parent rule node of this rule node.
+ pub fn parent(&self) -> Option<&StrongRuleNode> {
+ self.p.parent.as_ref()
+ }
+
+ pub(super) fn ensure_child(
+ &self,
+ root: &StrongRuleNode,
+ source: StyleSource,
+ cascade_priority: CascadePriority,
+ ) -> StrongRuleNode {
+ use parking_lot::RwLockUpgradableReadGuard;
+
+ debug_assert!(
+ self.p.cascade_priority <= cascade_priority,
+ "Should be ordered (instead {:?} > {:?}), from {:?} and {:?}",
+ self.p.cascade_priority,
+ cascade_priority,
+ self.p.source,
+ source,
+ );
+
+ let key = ChildKey(cascade_priority, source.key());
+ let children = self.p.children.upgradable_read();
+ if let Some(child) = children.get(&key, |node| node.p.key()) {
+ // Sound to call because we read-locked the parent's children.
+ return unsafe { child.upgrade() };
+ }
+ let mut children = RwLockUpgradableReadGuard::upgrade(children);
+ match children.entry(key, |node| node.p.key()) {
+ Entry::Occupied(child) => {
+ // Sound to call because we write-locked the parent's children.
+ unsafe { child.upgrade() }
+ },
+ Entry::Vacant(entry) => unsafe {
+ let node = StrongRuleNode::new(Box::new(RuleNode::new(
+ root.downgrade(),
+ self.clone(),
+ source,
+ cascade_priority,
+ )));
+ // Sound to call because we still own a strong reference to
+ // this node, through the `node` variable itself that we are
+ // going to return to the caller.
+ entry.insert(node.downgrade());
+ node
+ },
+ }
+ }
+
+ /// Get the style source corresponding to this rule node. May return `None`
+ /// if it's the root node, which means that the node hasn't matched any
+ /// rules.
+ pub fn style_source(&self) -> Option<&StyleSource> {
+ self.p.source.as_ref()
+ }
+
+ /// The cascade priority.
+ #[inline]
+ pub fn cascade_priority(&self) -> CascadePriority {
+ self.p.cascade_priority
+ }
+
+ /// The cascade level.
+ #[inline]
+ pub fn cascade_level(&self) -> CascadeLevel {
+ self.cascade_priority().cascade_level()
+ }
+
+ /// The importance.
+ #[inline]
+ pub fn importance(&self) -> crate::properties::Importance {
+ self.cascade_level().importance()
+ }
+
+ /// Returns whether this node has any child, only intended for testing
+ /// purposes.
+ pub unsafe fn has_children_for_testing(&self) -> bool {
+ !self.p.children.read().is_empty()
+ }
+
+ pub(super) fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
+ const INDENT_INCREMENT: usize = 4;
+
+ for _ in 0..indent {
+ let _ = write!(writer, " ");
+ }
+
+ let _ = writeln!(
+ writer,
+ " - {:p} (ref: {:?}, parent: {:?})",
+ &*self.p,
+ self.p.refcount.load(Ordering::Relaxed),
+ self.parent().map(|p| &*p.p as *const RuleNode)
+ );
+
+ for _ in 0..indent {
+ let _ = write!(writer, " ");
+ }
+
+ if let Some(source) = self.style_source() {
+ source.dump(self.cascade_level().guard(guards), writer);
+ } else {
+ if indent != 0 {
+ warn!("How has this happened?");
+ }
+ let _ = write!(writer, "(root)");
+ }
+
+ let _ = write!(writer, "\n");
+ for child in &*self.p.children.read() {
+ unsafe {
+ child
+ .upgrade()
+ .dump(guards, writer, indent + INDENT_INCREMENT);
+ }
+ }
+ }
+}
+
+impl Clone for StrongRuleNode {
+ fn clone(&self) -> Self {
+ debug!(
+ "{:p}: {:?}+",
+ &*self.p,
+ self.p.refcount.load(Ordering::Relaxed)
+ );
+ debug_assert!(self.p.refcount.load(Ordering::Relaxed) > 0);
+ self.p.refcount.fetch_add(1, Ordering::Relaxed);
+ unsafe { StrongRuleNode::from_unsafe_box(UnsafeBox::clone(&self.p)) }
+ }
+}
+
+impl Drop for StrongRuleNode {
+ #[cfg_attr(feature = "servo", allow(unused_mut))]
+ fn drop(&mut self) {
+ let node = &*self.p;
+ debug!("{:p}: {:?}-", node, node.refcount.load(Ordering::Relaxed));
+ debug!(
+ "Dropping node: {:p}, root: {:?}, parent: {:?}",
+ node,
+ node.root.as_ref().map(|r| &*r.p as *const RuleNode),
+ node.parent.as_ref().map(|p| &*p.p as *const RuleNode)
+ );
+
+ let should_drop = {
+ debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
+ node.refcount.fetch_sub(1, Ordering::Release) == 1
+ };
+
+ if !should_drop {
+ // The refcount didn't even drop zero yet, there is nothing for us
+ // to do anymore.
+ return;
+ }
+
+ unsafe {
+ if node.root.is_some() {
+ // This is a non-root node and we just observed the refcount
+ // dropping to zero, we need to pretend to be on the free list
+ // to unstuck any thread who tried to resurrect this node first
+ // through `WeakRuleNode::upgrade`.
+ RuleNode::pretend_to_be_on_free_list(&self.p);
+
+ // Attempt to push the node on the free list. This may fail
+ // if the free list is gone.
+ if RuleNode::push_on_free_list(&self.p) {
+ return;
+ }
+ }
+
+ // Either this was the last reference of the root node, or the
+ // tree rule is gone and there is no free list anymore. Drop the
+ // node.
+ RuleNode::drop_without_free_list(&mut self.p);
+ }
+ }
+}
+
+impl WeakRuleNode {
+ /// Upgrades this weak node reference, returning a strong one.
+ ///
+ /// Must be called with items stored in a node's children list. The children
+ /// list must at least be read-locked when this is called.
+ unsafe fn upgrade(&self) -> StrongRuleNode {
+ debug!("Upgrading weak node: {:p}", &*self.p);
+
+ if self.p.refcount.fetch_add(1, Ordering::Relaxed) == 0 {
+ // We observed a refcount of 0, we need to wait for this node to
+ // be put on the free list. Resetting the `next_free` pointer to
+ // null is only done in `RuleNode::drop_without_free_list`, just
+ // before a release refcount decrement, so this acquire fence here
+ // makes sure that we observed the write to null before we loop
+ // until there is a non-null value.
+ atomic::fence(Ordering::Acquire);
+ while self.p.next_free.load(Ordering::Relaxed).is_null() {}
+ }
+ StrongRuleNode::from_unsafe_box(UnsafeBox::clone(&self.p))
+ }
+}
+
+impl fmt::Debug for StrongRuleNode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ (&*self.p as *const RuleNode).fmt(f)
+ }
+}
+
+impl Eq for StrongRuleNode {}
+impl PartialEq for StrongRuleNode {
+ fn eq(&self, other: &Self) -> bool {
+ &*self.p as *const RuleNode == &*other.p
+ }
+}
+
+impl hash::Hash for StrongRuleNode {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: hash::Hasher,
+ {
+ (&*self.p as *const RuleNode).hash(state)
+ }
+}
+
+// Large pages generate thousands of RuleNode objects.
+size_of_test!(RuleNode, 80);
+// StrongRuleNode should be pointer-sized even inside an option.
+size_of_test!(Option<StrongRuleNode>, 8);
diff --git a/servo/components/style/rule_tree/level.rs b/servo/components/style/rule_tree/level.rs
new file mode 100644
index 0000000000..b8cbe55ed9
--- /dev/null
+++ b/servo/components/style/rule_tree/level.rs
@@ -0,0 +1,249 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![forbid(unsafe_code)]
+
+use crate::properties::Importance;
+use crate::shared_lock::{SharedRwLockReadGuard, StylesheetGuards};
+use crate::stylesheets::Origin;
+
+/// The cascade level these rules are relevant at, as per[1][2][3].
+///
+/// Presentational hints for SVG and HTML are in the "author-level
+/// zero-specificity" level, that is, right after user rules, and before author
+/// rules.
+///
+/// The order of variants declared here is significant, and must be in
+/// _ascending_ order of precedence.
+///
+/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
+/// from outside the tree the element is in can't affect the element.
+///
+/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
+/// tree may affect an element connected to the document or an "outer" shadow
+/// tree.
+///
+/// [1]: https://drafts.csswg.org/css-cascade/#cascade-origin
+/// [2]: https://drafts.csswg.org/css-cascade/#preshint
+/// [3]: https://html.spec.whatwg.org/multipage/#presentational-hints
+/// [4]: https://drafts.csswg.org/css-scoping/#shadow-cascading
+#[repr(u8)]
+#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
+pub enum CascadeLevel {
+ /// Normal User-Agent rules.
+ UANormal,
+ /// User normal rules.
+ UserNormal,
+ /// Presentational hints.
+ PresHints,
+ /// Shadow DOM styles from author styles.
+ AuthorNormal {
+ /// The order in the shadow tree hierarchy. This number is relative to
+ /// the tree of the element, and thus the only invariants that need to
+ /// be preserved is:
+ ///
+ /// * Zero is the same tree as the element that matched the rule. This
+ /// is important so that we can optimize style attribute insertions.
+ ///
+ /// * The levels are ordered in accordance with
+ /// https://drafts.csswg.org/css-scoping/#shadow-cascading
+ shadow_cascade_order: ShadowCascadeOrder,
+ },
+ /// SVG SMIL animations.
+ SMILOverride,
+ /// CSS animations and script-generated animations.
+ Animations,
+ /// Author-supplied important rules.
+ AuthorImportant {
+ /// The order in the shadow tree hierarchy, inverted, so that PartialOrd
+ /// does the right thing.
+ shadow_cascade_order: ShadowCascadeOrder,
+ },
+ /// User important rules.
+ UserImportant,
+ /// User-agent important rules.
+ UAImportant,
+ /// Transitions
+ Transitions,
+}
+
+impl CascadeLevel {
+ /// Convert this level from "unimportant" to "important".
+ pub fn important(&self) -> Self {
+ match *self {
+ Self::UANormal => Self::UAImportant,
+ Self::UserNormal => Self::UserImportant,
+ Self::AuthorNormal {
+ shadow_cascade_order,
+ } => Self::AuthorImportant {
+ shadow_cascade_order: -shadow_cascade_order,
+ },
+ Self::PresHints |
+ Self::SMILOverride |
+ Self::Animations |
+ Self::AuthorImportant { .. } |
+ Self::UserImportant |
+ Self::UAImportant |
+ Self::Transitions => *self,
+ }
+ }
+
+ /// Convert this level from "important" to "non-important".
+ pub fn unimportant(&self) -> Self {
+ match *self {
+ Self::UAImportant => Self::UANormal,
+ Self::UserImportant => Self::UserNormal,
+ Self::AuthorImportant {
+ shadow_cascade_order,
+ } => Self::AuthorNormal {
+ shadow_cascade_order: -shadow_cascade_order,
+ },
+ Self::PresHints |
+ Self::SMILOverride |
+ Self::Animations |
+ Self::AuthorNormal { .. } |
+ Self::UserNormal |
+ Self::UANormal |
+ Self::Transitions => *self,
+ }
+ }
+
+ /// Select a lock guard for this level
+ pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
+ match *self {
+ Self::UANormal | Self::UserNormal | Self::UserImportant | Self::UAImportant => {
+ guards.ua_or_user
+ },
+ _ => guards.author,
+ }
+ }
+
+ /// Returns the cascade level for author important declarations from the
+ /// same tree as the element.
+ #[inline]
+ pub fn same_tree_author_important() -> Self {
+ Self::AuthorImportant {
+ shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
+ }
+ }
+
+ /// Returns the cascade level for author normal declarations from the same
+ /// tree as the element.
+ #[inline]
+ pub fn same_tree_author_normal() -> Self {
+ Self::AuthorNormal {
+ shadow_cascade_order: ShadowCascadeOrder::for_same_tree(),
+ }
+ }
+
+ /// Returns whether this cascade level represents important rules of some
+ /// sort.
+ #[inline]
+ pub fn is_important(&self) -> bool {
+ match *self {
+ Self::AuthorImportant { .. } | Self::UserImportant | Self::UAImportant => true,
+ _ => false,
+ }
+ }
+
+ /// Returns the importance relevant for this rule. Pretty similar to
+ /// `is_important`.
+ #[inline]
+ pub fn importance(&self) -> Importance {
+ if self.is_important() {
+ Importance::Important
+ } else {
+ Importance::Normal
+ }
+ }
+
+ /// Returns the cascade origin of the rule.
+ #[inline]
+ pub fn origin(&self) -> Origin {
+ match *self {
+ Self::UAImportant | Self::UANormal => Origin::UserAgent,
+ Self::UserImportant | Self::UserNormal => Origin::User,
+ Self::PresHints |
+ Self::AuthorNormal { .. } |
+ Self::AuthorImportant { .. } |
+ Self::SMILOverride |
+ Self::Animations |
+ Self::Transitions => Origin::Author,
+ }
+ }
+
+ /// Returns whether this cascade level represents an animation rules.
+ #[inline]
+ pub fn is_animation(&self) -> bool {
+ match *self {
+ Self::SMILOverride | Self::Animations | Self::Transitions => true,
+ _ => false,
+ }
+ }
+}
+
+/// A counter to track how many shadow root rules deep we are. This is used to
+/// handle:
+///
+/// https://drafts.csswg.org/css-scoping/#shadow-cascading
+///
+/// See the static functions for the meaning of different values.
+#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
+pub struct ShadowCascadeOrder(i8);
+
+impl ShadowCascadeOrder {
+ /// We keep a maximum of 3 bits of order as a limit so that we can pack
+ /// CascadeLevel in one byte by using half of it for the order, if that ends
+ /// up being necessary.
+ const MAX: i8 = 0b111;
+ const MIN: i8 = -Self::MAX;
+
+ /// A level for the outermost shadow tree (the shadow tree we own, and the
+ /// ones from the slots we're slotted in).
+ #[inline]
+ pub fn for_outermost_shadow_tree() -> Self {
+ Self(-1)
+ }
+
+ /// A level for the element's tree.
+ #[inline]
+ fn for_same_tree() -> Self {
+ Self(0)
+ }
+
+ /// A level for the innermost containing tree (the one closest to the
+ /// element).
+ #[inline]
+ pub fn for_innermost_containing_tree() -> Self {
+ Self(1)
+ }
+
+ /// Decrement the level, moving inwards. We should only move inwards if
+ /// we're traversing slots.
+ #[inline]
+ pub fn dec(&mut self) {
+ debug_assert!(self.0 < 0);
+ if self.0 != Self::MIN {
+ self.0 -= 1;
+ }
+ }
+
+ /// The level, moving inwards. We should only move inwards if we're
+ /// traversing slots.
+ #[inline]
+ pub fn inc(&mut self) {
+ debug_assert_ne!(self.0, -1);
+ if self.0 != Self::MAX {
+ self.0 += 1;
+ }
+ }
+}
+
+impl std::ops::Neg for ShadowCascadeOrder {
+ type Output = Self;
+ #[inline]
+ fn neg(self) -> Self {
+ Self(self.0.neg())
+ }
+}
diff --git a/servo/components/style/rule_tree/map.rs b/servo/components/style/rule_tree/map.rs
new file mode 100644
index 0000000000..33c470e9c1
--- /dev/null
+++ b/servo/components/style/rule_tree/map.rs
@@ -0,0 +1,201 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![forbid(unsafe_code)]
+
+use fxhash::FxHashMap;
+use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
+use std::collections::hash_map;
+use std::hash::Hash;
+use std::mem;
+
+pub(super) struct Map<K, V> {
+ inner: MapInner<K, V>,
+}
+
+enum MapInner<K, V> {
+ Empty,
+ One(V),
+ Map(Box<FxHashMap<K, V>>),
+}
+
+pub(super) struct MapIter<'a, K, V> {
+ inner: MapIterInner<'a, K, V>,
+}
+
+enum MapIterInner<'a, K, V> {
+ One(std::option::IntoIter<&'a V>),
+ Map(std::collections::hash_map::Values<'a, K, V>),
+}
+
+pub(super) enum Entry<'a, K, V> {
+ Occupied(&'a mut V),
+ Vacant(VacantEntry<'a, K, V>),
+}
+
+pub(super) struct VacantEntry<'a, K, V> {
+ inner: VacantEntryInner<'a, K, V>,
+}
+
+enum VacantEntryInner<'a, K, V> {
+ One(&'a mut MapInner<K, V>),
+ Map(hash_map::VacantEntry<'a, K, V>),
+}
+
+impl<K, V> Default for Map<K, V> {
+ fn default() -> Self {
+ Map {
+ inner: MapInner::Empty,
+ }
+ }
+}
+
+impl<'a, K, V> IntoIterator for &'a Map<K, V> {
+ type Item = &'a V;
+ type IntoIter = MapIter<'a, K, V>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ MapIter {
+ inner: match &self.inner {
+ MapInner::Empty => MapIterInner::One(None.into_iter()),
+ MapInner::One(one) => MapIterInner::One(Some(one).into_iter()),
+ MapInner::Map(map) => MapIterInner::Map(map.values()),
+ },
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for MapIter<'a, K, V> {
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match &mut self.inner {
+ MapIterInner::One(one_iter) => one_iter.next(),
+ MapIterInner::Map(map_iter) => map_iter.next(),
+ }
+ }
+}
+
+impl<K, V> Map<K, V>
+where
+ K: Eq + Hash,
+{
+ pub(super) fn is_empty(&self) -> bool {
+ match &self.inner {
+ MapInner::Empty => true,
+ MapInner::One(_) => false,
+ MapInner::Map(map) => map.is_empty(),
+ }
+ }
+
+ #[cfg(debug_assertions)]
+ pub(super) fn len(&self) -> usize {
+ match &self.inner {
+ MapInner::Empty => 0,
+ MapInner::One(_) => 1,
+ MapInner::Map(map) => map.len(),
+ }
+ }
+
+ pub(super) fn get(&self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option<&V> {
+ match &self.inner {
+ MapInner::One(one) if *key == key_from_value(one) => Some(one),
+ MapInner::Map(map) => map.get(key),
+ MapInner::Empty | MapInner::One(_) => None,
+ }
+ }
+
+ pub(super) fn entry(
+ &mut self,
+ key: K,
+ key_from_value: impl FnOnce(&V) -> K,
+ ) -> Entry<'_, K, V> {
+ match self.inner {
+ ref mut inner @ MapInner::Empty => Entry::Vacant(VacantEntry {
+ inner: VacantEntryInner::One(inner),
+ }),
+ MapInner::One(_) => {
+ let one = match mem::replace(&mut self.inner, MapInner::Empty) {
+ MapInner::One(one) => one,
+ _ => unreachable!(),
+ };
+ // If this panics, the child `one` will be lost.
+ let one_key = key_from_value(&one);
+ // Same for the equality test.
+ if key == one_key {
+ self.inner = MapInner::One(one);
+ let one = match &mut self.inner {
+ MapInner::One(one) => one,
+ _ => unreachable!(),
+ };
+ return Entry::Occupied(one);
+ }
+ self.inner = MapInner::Map(Box::new(FxHashMap::with_capacity_and_hasher(
+ 2,
+ Default::default(),
+ )));
+ let map = match &mut self.inner {
+ MapInner::Map(map) => map,
+ _ => unreachable!(),
+ };
+ map.insert(one_key, one);
+ match map.entry(key) {
+ hash_map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry {
+ inner: VacantEntryInner::Map(entry),
+ }),
+ _ => unreachable!(),
+ }
+ },
+ MapInner::Map(ref mut map) => match map.entry(key) {
+ hash_map::Entry::Occupied(entry) => Entry::Occupied(entry.into_mut()),
+ hash_map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry {
+ inner: VacantEntryInner::Map(entry),
+ }),
+ },
+ }
+ }
+
+ pub(super) fn remove(&mut self, key: &K, key_from_value: impl FnOnce(&V) -> K) -> Option<V> {
+ match &mut self.inner {
+ MapInner::One(one) if *key == key_from_value(one) => {
+ match mem::replace(&mut self.inner, MapInner::Empty) {
+ MapInner::One(one) => Some(one),
+ _ => unreachable!(),
+ }
+ },
+ MapInner::Map(map) => map.remove(key),
+ MapInner::Empty | MapInner::One(_) => None,
+ }
+ }
+}
+
+impl<'a, K, V> VacantEntry<'a, K, V> {
+ pub(super) fn insert(self, value: V) -> &'a mut V {
+ match self.inner {
+ VacantEntryInner::One(map) => {
+ *map = MapInner::One(value);
+ match map {
+ MapInner::One(one) => one,
+ _ => unreachable!(),
+ }
+ },
+ VacantEntryInner::Map(entry) => entry.insert(value),
+ }
+ }
+}
+
+impl<K, V> MallocShallowSizeOf for Map<K, V>
+where
+ K: Eq + Hash,
+{
+ fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ match &self.inner {
+ MapInner::Map(m) => {
+ // We want to account for both the box and the hashmap.
+ m.shallow_size_of(ops) + (**m).shallow_size_of(ops)
+ },
+ MapInner::One(_) | MapInner::Empty => 0,
+ }
+ }
+}
diff --git a/servo/components/style/rule_tree/mod.rs b/servo/components/style/rule_tree/mod.rs
new file mode 100644
index 0000000000..18ee018d64
--- /dev/null
+++ b/servo/components/style/rule_tree/mod.rs
@@ -0,0 +1,403 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![deny(unsafe_code)]
+
+//! The rule tree.
+
+use crate::applicable_declarations::{ApplicableDeclarationList, CascadePriority};
+use crate::properties::{LonghandIdSet, PropertyDeclarationBlock};
+use crate::shared_lock::{Locked, StylesheetGuards};
+use crate::stylesheets::layer_rule::LayerOrder;
+use servo_arc::ArcBorrow;
+use smallvec::SmallVec;
+use std::io::{self, Write};
+
+mod core;
+mod level;
+mod map;
+mod source;
+mod unsafe_box;
+
+pub use self::core::{RuleTree, StrongRuleNode};
+pub use self::level::{CascadeLevel, ShadowCascadeOrder};
+pub use self::source::StyleSource;
+
+impl RuleTree {
+ fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
+ let _ = writeln!(writer, " + RuleTree");
+ self.root().dump(guards, writer, 0);
+ }
+
+ /// Dump the rule tree to stdout.
+ pub fn dump_stdout(&self, guards: &StylesheetGuards) {
+ let mut stdout = io::stdout();
+ self.dump(guards, &mut stdout);
+ }
+
+ /// Inserts the given rules, that must be in proper order by specifity, and
+ /// returns the corresponding rule node representing the last inserted one.
+ ///
+ /// !important rules are detected and inserted into the appropriate position
+ /// in the rule tree. This allows selector matching to ignore importance,
+ /// while still maintaining the appropriate cascade order in the rule tree.
+ pub fn insert_ordered_rules_with_important<'a, I>(
+ &self,
+ iter: I,
+ guards: &StylesheetGuards,
+ ) -> StrongRuleNode
+ where
+ I: Iterator<Item = (StyleSource, CascadePriority)>,
+ {
+ use self::CascadeLevel::*;
+ let mut current = self.root().clone();
+
+ let mut found_important = false;
+
+ let mut important_author = SmallVec::<[(StyleSource, CascadePriority); 4]>::new();
+ let mut important_user = SmallVec::<[(StyleSource, CascadePriority); 4]>::new();
+ let mut important_ua = SmallVec::<[(StyleSource, CascadePriority); 4]>::new();
+ let mut transition = None;
+
+ for (source, priority) in iter {
+ let level = priority.cascade_level();
+ debug_assert!(!level.is_important(), "Important levels handled internally");
+
+ let any_important = {
+ let pdb = source.read(level.guard(guards));
+ pdb.any_important()
+ };
+
+ if any_important {
+ found_important = true;
+ match level {
+ AuthorNormal { .. } => {
+ important_author.push((source.clone(), priority.important()))
+ },
+ UANormal => important_ua.push((source.clone(), priority.important())),
+ UserNormal => important_user.push((source.clone(), priority.important())),
+ _ => {},
+ };
+ }
+
+ // We don't optimize out empty rules, even though we could.
+ //
+ // Inspector relies on every rule being inserted in the normal level
+ // at least once, in order to return the rules with the correct
+ // specificity order.
+ //
+ // TODO(emilio): If we want to apply these optimizations without
+ // breaking inspector's expectations, we'd need to run
+ // selector-matching again at the inspector's request. That may or
+ // may not be a better trade-off.
+ if matches!(level, Transitions) && found_important {
+ // There can be at most one transition, and it will come at
+ // the end of the iterator. Stash it and apply it after
+ // !important rules.
+ debug_assert!(transition.is_none());
+ transition = Some(source);
+ } else {
+ current = current.ensure_child(self.root(), source, priority);
+ }
+ }
+
+ // Early-return in the common case of no !important declarations.
+ if !found_important {
+ return current;
+ }
+
+ // Insert important declarations, in order of increasing importance,
+ // followed by any transition rule.
+ //
+ // Important rules are sorted differently from unimportant ones by
+ // shadow order and cascade order.
+ if !important_author.is_empty() &&
+ important_author.first().unwrap().1 != important_author.last().unwrap().1
+ {
+ // We only need to sort if the important rules come from
+ // different trees, but we need this sort to be stable.
+ //
+ // FIXME(emilio): This could maybe be smarter, probably by chunking
+ // the important rules while inserting, and iterating the outer
+ // chunks in reverse order.
+ //
+ // That is, if we have rules with levels like: -1 -1 -1 0 0 0 1 1 1,
+ // we're really only sorting the chunks, while keeping elements
+ // inside the same chunk already sorted. Seems like we could try to
+ // keep a SmallVec-of-SmallVecs with the chunks and just iterate the
+ // outer in reverse.
+ important_author.sort_by_key(|&(_, priority)| priority);
+ }
+
+ for (source, priority) in important_author.drain(..) {
+ current = current.ensure_child(self.root(), source, priority);
+ }
+
+ for (source, priority) in important_user.drain(..) {
+ current = current.ensure_child(self.root(), source, priority);
+ }
+
+ for (source, priority) in important_ua.drain(..) {
+ current = current.ensure_child(self.root(), source, priority);
+ }
+
+ if let Some(source) = transition {
+ current = current.ensure_child(
+ self.root(),
+ source,
+ CascadePriority::new(Transitions, LayerOrder::root()),
+ );
+ }
+
+ current
+ }
+
+ /// Given a list of applicable declarations, insert the rules and return the
+ /// corresponding rule node.
+ pub fn compute_rule_node(
+ &self,
+ applicable_declarations: &mut ApplicableDeclarationList,
+ guards: &StylesheetGuards,
+ ) -> StrongRuleNode {
+ self.insert_ordered_rules_with_important(
+ applicable_declarations.drain(..).map(|d| d.for_rule_tree()),
+ guards,
+ )
+ }
+
+ /// Insert the given rules, that must be in proper order by specifity, and
+ /// return the corresponding rule node representing the last inserted one.
+ pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
+ where
+ I: Iterator<Item = (StyleSource, CascadePriority)>,
+ {
+ self.insert_ordered_rules_from(self.root().clone(), iter)
+ }
+
+ fn insert_ordered_rules_from<'a, I>(&self, from: StrongRuleNode, iter: I) -> StrongRuleNode
+ where
+ I: Iterator<Item = (StyleSource, CascadePriority)>,
+ {
+ let mut current = from;
+ for (source, priority) in iter {
+ current = current.ensure_child(self.root(), source, priority);
+ }
+ current
+ }
+
+ /// Replaces a rule in a given level (if present) for another rule.
+ ///
+ /// Returns the resulting node that represents the new path, or None if
+ /// the old path is still valid.
+ pub fn update_rule_at_level(
+ &self,
+ level: CascadeLevel,
+ layer_order: LayerOrder,
+ pdb: Option<ArcBorrow<Locked<PropertyDeclarationBlock>>>,
+ path: &StrongRuleNode,
+ guards: &StylesheetGuards,
+ important_rules_changed: &mut bool,
+ ) -> Option<StrongRuleNode> {
+ // TODO(emilio): Being smarter with lifetimes we could avoid a bit of
+ // the refcount churn.
+ let mut current = path.clone();
+ *important_rules_changed = false;
+
+ // First walk up until the first less-or-equally specific rule.
+ let mut children = SmallVec::<[_; 10]>::new();
+ while current.cascade_priority().cascade_level() > level {
+ children.push((
+ current.style_source().unwrap().clone(),
+ current.cascade_priority(),
+ ));
+ current = current.parent().unwrap().clone();
+ }
+
+ // Then remove the one at the level we want to replace, if any.
+ //
+ // NOTE: Here we assume that only one rule can be at the level we're
+ // replacing.
+ //
+ // This is certainly true for HTML style attribute rules, animations and
+ // transitions, but could not be so for SMIL animations, which we'd need
+ // to special-case (isn't hard, it's just about removing the `if` and
+ // special cases, and replacing them for a `while` loop, avoiding the
+ // optimizations).
+ if current.cascade_priority().cascade_level() == level {
+ *important_rules_changed |= level.is_important();
+
+ let current_decls = current.style_source().unwrap().as_declarations();
+
+ // If the only rule at the level we're replacing is exactly the
+ // same as `pdb`, we're done, and `path` is still valid.
+ if let (Some(ref pdb), Some(ref current_decls)) = (pdb, current_decls) {
+ // If the only rule at the level we're replacing is exactly the
+ // same as `pdb`, we're done, and `path` is still valid.
+ //
+ // TODO(emilio): Another potential optimization is the one where
+ // we can just replace the rule at that level for `pdb`, and
+ // then we don't need to re-create the children, and `path` is
+ // also equally valid. This is less likely, and would require an
+ // in-place mutation of the source, which is, at best, fiddly,
+ // so let's skip it for now.
+ let is_here_already = ArcBorrow::ptr_eq(pdb, current_decls);
+ if is_here_already {
+ debug!("Picking the fast path in rule replacement");
+ return None;
+ }
+ }
+
+ if current_decls.is_some() {
+ current = current.parent().unwrap().clone();
+ }
+ }
+
+ // Insert the rule if it's relevant at this level in the cascade.
+ //
+ // These optimizations are likely to be important, because the levels
+ // where replacements apply (style and animations) tend to trigger
+ // pretty bad styling cases already.
+ if let Some(pdb) = pdb {
+ if level.is_important() {
+ if pdb.read_with(level.guard(guards)).any_important() {
+ current = current.ensure_child(
+ self.root(),
+ StyleSource::from_declarations(pdb.clone_arc()),
+ CascadePriority::new(level, layer_order),
+ );
+ *important_rules_changed = true;
+ }
+ } else {
+ if pdb.read_with(level.guard(guards)).any_normal() {
+ current = current.ensure_child(
+ self.root(),
+ StyleSource::from_declarations(pdb.clone_arc()),
+ CascadePriority::new(level, layer_order),
+ );
+ }
+ }
+ }
+
+ // Now the rule is in the relevant place, push the children as
+ // necessary.
+ let rule = self.insert_ordered_rules_from(current, children.drain(..).rev());
+ Some(rule)
+ }
+
+ /// Returns new rule nodes without Transitions level rule.
+ pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
+ // Return a clone if there is no transition level.
+ if path.cascade_level() != CascadeLevel::Transitions {
+ return path.clone();
+ }
+
+ path.parent().unwrap().clone()
+ }
+
+ /// Returns new rule node without rules from declarative animations.
+ pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
+ // Return a clone if there are no animation rules.
+ if !path.has_animation_or_transition_rules() {
+ return path.clone();
+ }
+
+ let iter = path
+ .self_and_ancestors()
+ .take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride);
+ let mut last = path;
+ let mut children = SmallVec::<[_; 10]>::new();
+ for node in iter {
+ if !node.cascade_level().is_animation() {
+ children.push((
+ node.style_source().unwrap().clone(),
+ node.cascade_priority(),
+ ));
+ }
+ last = node;
+ }
+
+ let rule = self
+ .insert_ordered_rules_from(last.parent().unwrap().clone(), children.drain(..).rev());
+ rule
+ }
+}
+
+impl StrongRuleNode {
+ /// Get an iterator for this rule node and its ancestors.
+ pub fn self_and_ancestors(&self) -> SelfAndAncestors {
+ SelfAndAncestors {
+ current: Some(self),
+ }
+ }
+
+ /// Returns true if there is either animation or transition level rule.
+ pub fn has_animation_or_transition_rules(&self) -> bool {
+ self.self_and_ancestors()
+ .take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
+ .any(|node| node.cascade_level().is_animation())
+ }
+
+ /// Get a set of properties whose CascadeLevel are higher than Animations
+ /// but not equal to Transitions.
+ ///
+ /// If there are any custom properties, we set the boolean value of the
+ /// returned tuple to true.
+ pub fn get_properties_overriding_animations(
+ &self,
+ guards: &StylesheetGuards,
+ ) -> (LonghandIdSet, bool) {
+ use crate::properties::PropertyDeclarationId;
+
+ // We want to iterate over cascade levels that override the animations
+ // level, i.e. !important levels and the transitions level.
+ //
+ // However, we actually want to skip the transitions level because
+ // although it is higher in the cascade than animations, when both
+ // transitions and animations are present for a given element and
+ // property, transitions are suppressed so that they don't actually
+ // override animations.
+ let iter = self
+ .self_and_ancestors()
+ .skip_while(|node| node.cascade_level() == CascadeLevel::Transitions)
+ .take_while(|node| node.cascade_level() > CascadeLevel::Animations);
+ let mut result = (LonghandIdSet::new(), false);
+ for node in iter {
+ let style = node.style_source().unwrap();
+ for (decl, important) in style
+ .read(node.cascade_level().guard(guards))
+ .declaration_importance_iter()
+ {
+ // Although we are only iterating over cascade levels that
+ // override animations, in a given property declaration block we
+ // can have a mixture of !important and non-!important
+ // declarations but only the !important declarations actually
+ // override animations.
+ if important.important() {
+ match decl.id() {
+ PropertyDeclarationId::Longhand(id) => result.0.insert(id),
+ PropertyDeclarationId::Custom(_) => result.1 = true,
+ }
+ }
+ }
+ }
+ result
+ }
+}
+
+/// An iterator over a rule node and its ancestors.
+#[derive(Clone)]
+pub struct SelfAndAncestors<'a> {
+ current: Option<&'a StrongRuleNode>,
+}
+
+impl<'a> Iterator for SelfAndAncestors<'a> {
+ type Item = &'a StrongRuleNode;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.current.map(|node| {
+ self.current = node.parent();
+ node
+ })
+ }
+}
diff --git a/servo/components/style/rule_tree/source.rs b/servo/components/style/rule_tree/source.rs
new file mode 100644
index 0000000000..76443692d7
--- /dev/null
+++ b/servo/components/style/rule_tree/source.rs
@@ -0,0 +1,75 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![forbid(unsafe_code)]
+
+use crate::properties::PropertyDeclarationBlock;
+use crate::shared_lock::{Locked, SharedRwLockReadGuard};
+use crate::stylesheets::StyleRule;
+use servo_arc::{Arc, ArcBorrow, ArcUnion, ArcUnionBorrow};
+use std::io::Write;
+use std::ptr;
+
+/// A style source for the rule node. It can either be a CSS style rule or a
+/// declaration block.
+///
+/// Note that, even though the declaration block from inside the style rule
+/// could be enough to implement the rule tree, keeping the whole rule provides
+/// more debuggability, and also the ability of show those selectors to
+/// devtools.
+#[derive(Clone, Debug)]
+pub struct StyleSource(ArcUnion<Locked<StyleRule>, Locked<PropertyDeclarationBlock>>);
+
+impl PartialEq for StyleSource {
+ fn eq(&self, other: &Self) -> bool {
+ ArcUnion::ptr_eq(&self.0, &other.0)
+ }
+}
+
+impl StyleSource {
+ /// Creates a StyleSource from a StyleRule.
+ pub fn from_rule(rule: Arc<Locked<StyleRule>>) -> Self {
+ StyleSource(ArcUnion::from_first(rule))
+ }
+
+ #[inline]
+ pub(super) fn key(&self) -> ptr::NonNull<()> {
+ self.0.ptr()
+ }
+
+ /// Creates a StyleSource from a PropertyDeclarationBlock.
+ pub fn from_declarations(decls: Arc<Locked<PropertyDeclarationBlock>>) -> Self {
+ StyleSource(ArcUnion::from_second(decls))
+ }
+
+ pub(super) fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
+ if let Some(ref rule) = self.0.as_first() {
+ let rule = rule.read_with(guard);
+ let _ = write!(writer, "{:?}", rule.selectors);
+ }
+
+ let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
+ }
+
+ /// Read the style source guard, and obtain thus read access to the
+ /// underlying property declaration block.
+ #[inline]
+ pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
+ let block: &Locked<PropertyDeclarationBlock> = match self.0.borrow() {
+ ArcUnionBorrow::First(ref rule) => &rule.get().read_with(guard).block,
+ ArcUnionBorrow::Second(ref block) => block.get(),
+ };
+ block.read_with(guard)
+ }
+
+ /// Returns the style rule if applicable, otherwise None.
+ pub fn as_rule(&self) -> Option<ArcBorrow<Locked<StyleRule>>> {
+ self.0.as_first()
+ }
+
+ /// Returns the declaration block if applicable, otherwise None.
+ pub fn as_declarations(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
+ self.0.as_second()
+ }
+}
diff --git a/servo/components/style/rule_tree/unsafe_box.rs b/servo/components/style/rule_tree/unsafe_box.rs
new file mode 100644
index 0000000000..eaa441d7b2
--- /dev/null
+++ b/servo/components/style/rule_tree/unsafe_box.rs
@@ -0,0 +1,74 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#![allow(unsafe_code)]
+
+use std::mem::ManuallyDrop;
+use std::ops::Deref;
+use std::ptr;
+
+/// An unsafe box, derefs to `T`.
+pub(super) struct UnsafeBox<T> {
+ inner: ManuallyDrop<Box<T>>,
+}
+
+impl<T> UnsafeBox<T> {
+ /// Creates a new unsafe box.
+ pub(super) fn from_box(value: Box<T>) -> Self {
+ Self {
+ inner: ManuallyDrop::new(value),
+ }
+ }
+
+ /// Creates a new box from a pointer.
+ ///
+ /// # Safety
+ ///
+ /// The input should point to a valid `T`.
+ pub(super) unsafe fn from_raw(ptr: *mut T) -> Self {
+ Self {
+ inner: ManuallyDrop::new(Box::from_raw(ptr)),
+ }
+ }
+
+ /// Creates a new unsafe box from an existing one.
+ ///
+ /// # Safety
+ ///
+ /// There is no refcounting or whatever else in an unsafe box, so this
+ /// operation can lead to double frees.
+ pub(super) unsafe fn clone(this: &Self) -> Self {
+ Self {
+ inner: ptr::read(&this.inner),
+ }
+ }
+
+ /// Returns a mutable reference to the inner value of this unsafe box.
+ ///
+ /// # Safety
+ ///
+ /// Given `Self::clone`, nothing prevents anyone from creating
+ /// multiple mutable references to the inner value, which is completely UB.
+ pub(crate) unsafe fn deref_mut(this: &mut Self) -> &mut T {
+ &mut this.inner
+ }
+
+ /// Drops the inner value of this unsafe box.
+ ///
+ /// # Safety
+ ///
+ /// Given this doesn't consume the unsafe box itself, this has the same
+ /// safety caveats as `ManuallyDrop::drop`.
+ pub(super) unsafe fn drop(this: &mut Self) {
+ ManuallyDrop::drop(&mut this.inner)
+ }
+}
+
+impl<T> Deref for UnsafeBox<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}