summaryrefslogtreecommitdiffstats
path: root/third_party/rust/icu_segmenter/tests/complex_word.rs
blob: c518f91ba66c21a29991217c913b6da2f382b26f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).

use icu_segmenter::WordSegmenter;

// Additional word segmenter tests with complex string.

#[test]
fn word_break_th() {
    for segmenter in [WordSegmenter::new_auto(), WordSegmenter::new_lstm()] {
        // http://wpt.live/css/css-text/word-break/word-break-normal-th-000.html
        let s = "ภาษาไทยภาษาไทย";
        let utf16: Vec<u16> = s.encode_utf16().collect();
        let iter = segmenter.segment_utf16(&utf16);
        assert_eq!(
            iter.collect::<Vec<usize>>(),
            vec![0, 4, 7, 11, 14],
            "word segmenter with Thai"
        );
        let iter = segmenter.segment_str(s);
        assert_eq!(
            iter.collect::<Vec<usize>>(),
            vec![0, 12, 21, 33, 42],
            "word segmenter with Thai"
        );

        // Combine non-Thai and Thai.
        let s = "aภาษาไทยภาษาไทยb";
        let utf16: Vec<u16> = s.encode_utf16().collect();
        let iter = segmenter.segment_utf16(&utf16);
        assert_eq!(
            iter.collect::<Vec<usize>>(),
            vec![0, 1, 5, 8, 12, 15, 16],
            "word segmenter with Thai and ascii"
        );
    }
}

#[test]
fn word_break_my() {
    let segmenter = WordSegmenter::new_auto();

    let s = "မြန်မာစာမြန်မာစာမြန်မာစာ";
    let utf16: Vec<u16> = s.encode_utf16().collect();
    let iter = segmenter.segment_utf16(&utf16);
    assert_eq!(
        iter.collect::<Vec<usize>>(),
        vec![0, 8, 16, 22, 24],
        "word segmenter with Burmese"
    );
}

#[test]
fn word_break_hiragana() {
    for segmenter in [WordSegmenter::new_auto(), WordSegmenter::new_dictionary()] {
        let s = "うなぎうなじ";
        let iter = segmenter.segment_str(s);
        assert_eq!(
            iter.collect::<Vec<usize>>(),
            vec![0, 9, 18],
            "word segmenter with Hiragana"
        );
    }
}

#[test]
fn word_break_mixed_han() {
    for segmenter in [WordSegmenter::new_auto(), WordSegmenter::new_dictionary()] {
        let s = "Welcome龟山岛龟山岛Welcome";
        let iter = segmenter.segment_str(s);
        assert_eq!(
            iter.collect::<Vec<usize>>(),
            vec![0, 7, 16, 25, 32],
            "word segmenter with Chinese and letter"
        );
    }
}

#[test]
fn word_line_th_wikipedia_auto() {
    use icu_segmenter::LineSegmenter;

    let text = "แพนด้าแดง (อังกฤษ: Red panda, Shining cat; จีน: 小熊貓; พินอิน: Xiǎo xióngmāo) สัตว์เลี้ยงลูกด้วยนมชนิดหนึ่ง มีชื่อวิทยาศาสตร์ว่า Ailurus fulgens";
    assert_eq!(text.len(), 297);
    let utf16: Vec<u16> = text.encode_utf16().collect();
    assert_eq!(utf16.len(), 142);

    let segmenter_word_auto = WordSegmenter::new_auto();
    let segmenter_line_auto = LineSegmenter::new_auto();

    let breakpoints_word_utf8 = segmenter_word_auto.segment_str(text).collect::<Vec<_>>();
    assert_eq!(
        breakpoints_word_utf8,
        [
            0, 9, 18, 27, 28, 29, 38, 47, 48, 49, 52, 53, 58, 59, 60, 67, 68, 71, 72, 73, 82, 83,
            84, 90, 93, 94, 95, 104, 113, 114, 115, 120, 121, 131, 132, 133, 148, 166, 175, 187,
            193, 205, 220, 221, 227, 239, 272, 281, 282, 289, 290, 297
        ]
    );

    let breakpoints_line_utf8 = segmenter_line_auto.segment_str(text).collect::<Vec<_>>();
    assert_eq!(
        breakpoints_line_utf8,
        [
            0, 9, 18, 27, 28, 38, 47, 49, 53, 60, 68, 73, 82, 84, 87, 90, 95, 104, 113, 115, 121,
            133, 148, 166, 175, 187, 193, 205, 220, 221, 227, 239, 272, 281, 282, 290, 297
        ]
    );

    let breakpoints_word_utf16 = segmenter_word_auto
        .segment_utf16(&utf16)
        .collect::<Vec<_>>();
    assert_eq!(
        breakpoints_word_utf16,
        [
            0, 3, 6, 9, 10, 11, 14, 17, 18, 19, 22, 23, 28, 29, 30, 37, 38, 41, 42, 43, 46, 47, 48,
            50, 51, 52, 53, 56, 59, 60, 61, 65, 66, 74, 75, 76, 81, 87, 90, 94, 96, 100, 105, 106,
            108, 112, 123, 126, 127, 134, 135, 142
        ]
    );

    let breakpoints_word_utf16 = segmenter_word_auto
        .segment_utf16(&utf16)
        .collect::<Vec<_>>();
    assert_eq!(
        breakpoints_word_utf16,
        [
            0, 3, 6, 9, 10, 11, 14, 17, 18, 19, 22, 23, 28, 29, 30, 37, 38, 41, 42, 43, 46, 47, 48,
            50, 51, 52, 53, 56, 59, 60, 61, 65, 66, 74, 75, 76, 81, 87, 90, 94, 96, 100, 105, 106,
            108, 112, 123, 126, 127, 134, 135, 142
        ]
    );
}