summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_query_system/src/ich/impls_syntax.rs
blob: 0bc811eb04412544d2d783c0ca2400490bf081c8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
//! This module contains `HashStable` implementations for various data types
//! from `rustc_ast` in no particular order.

use crate::ich::StableHashingContext;

use rustc_ast as ast;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_span::{BytePos, NormalizedPos, SourceFile};
use std::assert_matches::assert_matches;

use smallvec::SmallVec;

impl<'ctx> rustc_target::HashStableContext for StableHashingContext<'ctx> {}

impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
    fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
        if self.is_empty() {
            self.len().hash_stable(hcx, hasher);
            return;
        }

        // Some attributes are always ignored during hashing.
        let filtered: SmallVec<[&ast::Attribute; 8]> = self
            .iter()
            .filter(|attr| {
                !attr.is_doc_comment()
                    && !attr.ident().map_or(false, |ident| hcx.is_ignored_attr(ident.name))
            })
            .collect();

        filtered.len().hash_stable(hcx, hasher);
        for attr in filtered {
            attr.hash_stable(hcx, hasher);
        }
    }
}

impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> {
    fn hash_attr(&mut self, attr: &ast::Attribute, hasher: &mut StableHasher) {
        // Make sure that these have been filtered out.
        debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
        debug_assert!(!attr.is_doc_comment());

        let ast::Attribute { kind, id: _, style, span } = attr;
        if let ast::AttrKind::Normal(normal) = kind {
            normal.item.hash_stable(self, hasher);
            style.hash_stable(self, hasher);
            span.hash_stable(self, hasher);
            assert_matches!(
                normal.tokens.as_ref(),
                None,
                "Tokens should have been removed during lowering!"
            );
        } else {
            unreachable!();
        }
    }
}

impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
    fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
        let SourceFile {
            name: _, // We hash the smaller name_hash instead of this
            name_hash,
            cnum,
            // Do not hash the source as it is not encoded
            src: _,
            ref src_hash,
            external_src: _,
            start_pos,
            end_pos: _,
            lines: _,
            ref multibyte_chars,
            ref non_narrow_chars,
            ref normalized_pos,
        } = *self;

        (name_hash as u64).hash_stable(hcx, hasher);

        src_hash.hash_stable(hcx, hasher);

        // We are always in `Lines` form by the time we reach here.
        assert!(self.lines.borrow().is_lines());
        self.lines(|lines| {
            // We only hash the relative position within this source_file
            lines.len().hash_stable(hcx, hasher);
            for &line in lines.iter() {
                stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
            }
        });

        // We only hash the relative position within this source_file
        multibyte_chars.len().hash_stable(hcx, hasher);
        for &char_pos in multibyte_chars.iter() {
            stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
        }

        non_narrow_chars.len().hash_stable(hcx, hasher);
        for &char_pos in non_narrow_chars.iter() {
            stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
        }

        normalized_pos.len().hash_stable(hcx, hasher);
        for &char_pos in normalized_pos.iter() {
            stable_normalized_pos(char_pos, start_pos).hash_stable(hcx, hasher);
        }

        cnum.hash_stable(hcx, hasher);
    }
}

fn stable_byte_pos(pos: BytePos, source_file_start: BytePos) -> u32 {
    pos.0 - source_file_start.0
}

fn stable_multibyte_char(mbc: rustc_span::MultiByteChar, source_file_start: BytePos) -> (u32, u32) {
    let rustc_span::MultiByteChar { pos, bytes } = mbc;

    (pos.0 - source_file_start.0, bytes as u32)
}

fn stable_non_narrow_char(
    swc: rustc_span::NonNarrowChar,
    source_file_start: BytePos,
) -> (u32, u32) {
    let pos = swc.pos();
    let width = swc.width();

    (pos.0 - source_file_start.0, width as u32)
}

fn stable_normalized_pos(np: NormalizedPos, source_file_start: BytePos) -> (u32, u32) {
    let NormalizedPos { pos, diff } = np;

    (pos.0 - source_file_start.0, diff)
}

impl<'tcx> HashStable<StableHashingContext<'tcx>> for rustc_feature::Features {
    fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) {
        // Unfortunately we cannot exhaustively list fields here, since the
        // struct is macro generated.
        self.declared_lang_features.hash_stable(hcx, hasher);
        self.declared_lib_features.hash_stable(hcx, hasher);

        self.walk_feature_fields(|feature_name, value| {
            feature_name.hash_stable(hcx, hasher);
            value.hash_stable(hcx, hasher);
        });
    }
}

impl<'ctx> rustc_type_ir::HashStableContext for StableHashingContext<'ctx> {}