Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions components/segmenter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ default = ["compiled_data", "auto"]
serde = ["dep:serde", "potential_utf/serde", "zerovec/serde", "icu_collections/serde", "icu_provider/serde"]
datagen = ["serde", "dep:databake", "potential_utf/databake", "zerovec/databake", "icu_collections/databake", "icu_provider/export"]
lstm = ["dep:core_maths"]
unstable = []
auto = ["lstm"] # Enables [try_]new_auto constructors
compiled_data = ["dep:icu_segmenter_data", "dep:icu_locale", "icu_locale?/compiled_data", "icu_provider/baked"]

Expand Down
48 changes: 38 additions & 10 deletions components/segmenter/examples/experimental_segmenter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,25 +13,52 @@ mod cnn;

use adaboost::Predictor;
use cnn::{CnnSegmenter, RawCnnData};
use icu_segmenter::provider::Baked;
use icu_segmenter::{options::WordBreakOptions, WordSegmenter, WordSegmenterBorrowed};
use std::time::SystemTime;
use std::time::Instant;

const REPETITIONS: usize = 1000;

fn main_adaboost(args: &[String]) {
let segmenter = Predictor::for_test();
fn main_radaboost(args: &[String]) {
let irg = Baked::SINGLETON_SEGMENTER_UNIHAN_RADICAL_V1;
let segmenter = Predictor::for_test(irg);
let s = &args[0];
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.predict(s);
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.predict_breakpoints(s) {
print!("{}|", &s[prev..breakpoint]);
prev = breakpoint;
}
if prev < s.len() {
print!("{}", &s[prev..]);
}
println!();
println!("{} repetitions done in: {:?}", REPETITIONS, elapsed);
}

fn main_thadaboost(args: &[String]) {
let irg = Baked::SINGLETON_SEGMENTER_UNIHAN_RADICAL_V1;
let segmenter = Predictor::for_test_thai(irg);
let s = &args[0];
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.predict_thai(s);
}
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.predict_thai_breakpoints(s) {
print!("{}|", &s[prev..breakpoint]);
prev = breakpoint;
}
if prev < s.len() {
print!("{}", &s[prev..]);
}
println!();
println!("{} repetitions done in: {:?}", REPETITIONS, elapsed);
}
Expand All @@ -55,11 +82,11 @@ fn main_cnn(args: &[String]) {
.unwrap();
let segmenter = CnnSegmenter::new(&cnndata);
let s = &args[0];
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.segment_str(s);
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.segment_str(s).to_breakpoints() {
Expand All @@ -82,11 +109,11 @@ fn main_lstm(mut args: &[String]) {
}

fn run_word_segmenter(segmenter: WordSegmenterBorrowed, s: &str) {
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.segment_str(s).count(); // consume the iterator
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.segment_str(s) {
Expand All @@ -104,7 +131,8 @@ fn main() {
return;
}
match args[1].as_str() {
"adaboost" => main_adaboost(&args[2..]),
"radaboost" => main_radaboost(&args[2..]),
"thadaboost" => main_thadaboost(&args[2..]),
"dict" | "dictionary" => main_dict(&args[2..]),
"cnn" => main_cnn(&args[2..]),
"lstm" => main_lstm(&args[2..]),
Expand Down
5 changes: 5 additions & 0 deletions components/segmenter/src/provider/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
#![allow(clippy::exhaustive_structs, clippy::exhaustive_enums)]

mod lstm;
mod radical;
pub use lstm::*;
pub use radical::*;
Comment on lines 18 to +21
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
mod lstm;
mod radical;
pub use lstm::*;
pub use radical::*;
mod lstm;
#[cfg(feature = "unstable")]
pub mod radical;
pub use lstm::*;


use crate::options::WordType;
use icu_collections::codepointtrie::CodePointTrie;
Expand Down Expand Up @@ -51,6 +53,8 @@ const _: () = {
impl_segmenter_break_line_v1!(Baked);
#[cfg(feature = "lstm")]
impl_segmenter_lstm_auto_v1!(Baked);
#[cfg(feature = "unstable")]
impl_segmenter_unihan_radical_v1!(Baked);
impl_segmenter_break_word_v1!(Baked);
impl_segmenter_break_word_override_v1!(Baked);
impl_segmenter_break_sentence_override_v1!(Baked);
Expand Down Expand Up @@ -135,6 +139,7 @@ pub const MARKERS: &[DataMarkerInfo] = &[
SegmenterDictionaryAutoV1::INFO,
SegmenterDictionaryExtendedV1::INFO,
SegmenterLstmAutoV1::INFO,
SegmenterUnihanRadicalV1::INFO,
];

/// Pre-processed Unicode data in the form of tables to be used for rule-based breaking.
Expand Down
32 changes: 32 additions & 0 deletions components/segmenter/src/provider/radical.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).

//! Data provider struct definitions for radicals.

use icu_collections::codepointtrie::CodePointTrie;
use icu_provider::prelude::*;

/// Data for Unihan IRG sources (Radicals).
#[derive(Debug, PartialEq, Clone, yoke::Yokeable, zerofrom::ZeroFrom)]
#[cfg_attr(feature = "datagen", derive(serde::Serialize, databake::Bake))]
#[cfg_attr(feature = "datagen", databake(path = icu_segmenter::provider))]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
pub struct UnihanIrgData<'data> {
/// Trie mapping code points to their IRG source radical ID (u8).
#[cfg_attr(feature = "serde", serde(borrow))]
pub trie: CodePointTrie<'data, u8>,
}

icu_provider::data_struct!(
UnihanIrgData<'_>,
#[cfg(feature = "datagen")]
);

icu_provider::data_marker!(
/// `SegmenterUnihanRadicalV1`
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@robertbastian commented on the old PR about these docs. Please write what the marker represents.

SegmenterUnihanRadicalV1,
"segmenter/unihan/radical/v1",
UnihanIrgData<'static>,
is_singleton = true
);
77 changes: 35 additions & 42 deletions components/segmenter/tests/adaboost/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,59 +4,38 @@

#![allow(dead_code)]

use icu_segmenter::provider::{Baked, UnihanIrgData};
use std::collections::HashMap;

fn load_irg_from_baked() -> &'static UnihanIrgData<'static> {
Baked::SINGLETON_SEGMENTER_UNIHAN_RADICAL_V1
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

inline. you can even inline this all the way into Predictor::for_test

}

static MODEL_FOR_TEST: &str = include_str!("model.json");
static MODEL_FOR_TEST_THAI: &str = include_str!("model_thai.json");

static CODEPOINTS: &[u16] = &[
20008, 20022, 20031, 20057, 20101, 20108, 20128, 20154, 20799, 20837, 20843, 20866, 20886,
20907, 20960, 20981, 20992, 21147, 21241, 21269, 21274, 21304, 21313, 21340, 21353, 21378,
21430, 21448, 21475, 22231, 22303, 22763, 22786, 22794, 22805, 22823, 22899, 23376, 23424,
23544, 23567, 23586, 23608, 23662, 23665, 24027, 24037, 24049, 24062, 24178, 24186, 24191,
24308, 24318, 24331, 24339, 24400, 24417, 24435, 24515, 25096, 25142, 25163, 25903, 25908,
25991, 26007, 26020, 26041, 26080, 26085, 26352, 26376, 26408, 27424, 27490, 27513, 27571,
27595, 27604, 27611, 27663, 27668, 27700, 28779, 29226, 29238, 29243, 29247, 29255, 29273,
29275, 29356, 29572, 29577, 29916, 29926, 29976, 29983, 29992, 30000, 30091, 30098, 30326,
30333, 30382, 30399, 30446, 30683, 30690, 30707, 31034, 31160, 31166, 31348, 31435, 31481,
31859, 31992, 32566, 32593, 32650, 32701, 32769, 32780, 32786, 32819, 32895, 32905, 33251,
33258, 33267, 33276, 33292, 33307, 33311, 33390, 33394, 33400, 34381, 34411, 34880, 34892,
34915, 35198, 35211, 35282, 35328, 35895, 35910, 35925, 35960, 35997, 36196, 36208, 36275,
36523, 36554, 36763, 36784, 36789, 37009, 37193, 37318, 37324, 37329, 38263, 38272, 38428,
38582, 38585, 38632, 38737, 38750, 38754, 38761, 38859, 38893, 38899, 38913, 39080, 39131,
39135, 39318, 39321, 39340, 39592, 39640, 39647, 39717, 39727, 39730, 39740, 39770, 40165,
40565, 40575, 40613, 40635, 40643, 40653, 40657, 40697, 40701, 40718, 40723, 40736, 40763,
40778, 40786, 40845, 40860, 40864,
];

pub(crate) fn get_radical(ch: char) -> u8 {
let id = ch as u32;

if !(19968..=40869).contains(&id) {
return 0;
}

let idx = CODEPOINTS.partition_point(|&b| (b as u32) <= id);
(idx as u8) + 1
pub(crate) fn get_radical(irg: &UnihanIrgData<'_>, ch: char) -> u8 {
irg.trie.get(ch)
}

pub(crate) struct Predictor {
pub(crate) struct Predictor<'a> {
pub(crate) model: HashMap<String, HashMap<String, i16>>,
irg: &'a UnihanIrgData<'a>,
}

impl Predictor {
pub(crate) fn from_json(json: &str) -> Self {
impl<'a> Predictor<'a> {
pub(crate) fn from_json(json: &str, irg: &'a UnihanIrgData<'a>) -> Self {
let model: HashMap<String, HashMap<String, i16>> =
serde_json::from_str(json).unwrap_or_default();
Self { model }
Self { model, irg }
}

pub(crate) fn for_test() -> Self {
Self::from_json(MODEL_FOR_TEST)
pub(crate) fn for_test(irg: &'a UnihanIrgData<'a>) -> Self {
Self::from_json(MODEL_FOR_TEST, irg)
}

pub(crate) fn for_test_thai() -> Self {
Self::from_json(MODEL_FOR_TEST_THAI)
pub(crate) fn for_test_thai(irg: &'a UnihanIrgData<'a>) -> Self {
Self::from_json(MODEL_FOR_TEST_THAI, irg)
}

pub(crate) fn predict(&self, sentence: &str) -> Vec<i16> {
Expand All @@ -73,15 +52,15 @@ impl Predictor {

let mut score: i16 = 4;

let rad4 = get_radical(c);
let rad4 = get_radical(self.irg, c);
if rad4 != 0 {
if let Some(map) = self.model.get("RSRID") {
let key = format!("{}:{}", c_prev, rad4);
score += map.get(&key).copied().unwrap_or(0);
}
}

let rad3 = get_radical(c_prev);
let rad3 = get_radical(self.irg, c_prev);
if rad3 != 0 {
if let Some(map) = self.model.get("LSRID") {
let key = format!("{}:{}", rad3, c);
Expand Down Expand Up @@ -247,6 +226,18 @@ impl Predictor {
}
breakpoints
}

pub(crate) fn predict_thai_breakpoints(&self, sentence: &str) -> Vec<usize> {
let mut breakpoints = vec![0];
let mut offset = 0;
for (&score, ch) in self.predict_thai(sentence).iter().zip(sentence.chars()) {
offset += ch.len_utf8();
if score > 0 {
breakpoints.push(offset);
}
}
breakpoints
}
}

#[cfg(test)]
Expand All @@ -271,7 +262,8 @@ fn python_test_output_thai() -> Vec<i16> {

#[test]
fn main() {
let predictor = Predictor::for_test();
let irg = load_irg_from_baked();
let predictor = Predictor::for_test(irg);

let sentence =
"根据最新的财报数据显示,该公司的市盈率已经达到了历史最低点,但是其核心竞争力依然保持稳定增长的态势。"
Expand All @@ -286,8 +278,9 @@ fn main() {
fn rust_matches_python_probs() {
let python = python_test_output();
let python_thai = python_test_output_thai();
let predictor = Predictor::for_test();
let predictor_thai = Predictor::for_test_thai();
let irg = load_irg_from_baked();
let predictor = Predictor::for_test(irg);
let predictor_thai = Predictor::for_test_thai(irg);

let sentence =
"根据最新的财报数据显示,该公司的市盈率已经达到了历史最低点,但是其核心竞争力依然保持稳定增长的态势。"
Expand Down
4 changes: 2 additions & 2 deletions components/segmenter/tests/adaboost/python_test_output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
2404
-1604
2824
440
320
-525
5881
4892
Expand All @@ -46,4 +46,4 @@
4605
4324
-2139
5215
5215
2 changes: 2 additions & 0 deletions provider/data/segmenter/data/mod.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions provider/data/segmenter/fingerprints.csv
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,4 @@ segmenter/lstm/auto/v1, und/Burmese_codepoints_exclusive_model4_heavy, 91365B, 9
segmenter/lstm/auto/v1, und/Khmer_codepoints_exclusive_model4_heavy, 74665B, 74368B, cc6fe9f66fed196d
segmenter/lstm/auto/v1, und/Lao_codepoints_exclusive_model4_heavy, 72160B, 71863B, 3f52a4025c7d618f
segmenter/lstm/auto/v1, und/Thai_codepoints_exclusive_model4_heavy, 72327B, 72030B, 4486b38238d7c651
segmenter/unihan/radical/v1, <singleton>, 13080B, 13020B, d875e702fbedc249
2 changes: 2 additions & 0 deletions provider/data/segmenter/stubdata/mod.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions provider/registry/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,7 @@ macro_rules! registry(
icu::experimental::transliterate::provider::TransliteratorRulesV1: TransliteratorRulesV1,
icu::experimental::units::provider::UnitsInfoV1: UnitsInfoV1,
icu::plurals::provider::PluralsRangesV1: PluralsRangesV1,
icu::segmenter::provider::SegmenterUnihanRadicalV1: SegmenterUnihanRadicalV1,
);
}
);
Expand Down
2 changes: 1 addition & 1 deletion provider/source/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ num-traits = { workspace = true, optional = true }
postcard = { workspace = true, features = ["alloc"] }
icu_provider_export = { workspace = true, features = ["fs_exporter", "baked_exporter", "rayon"] }
icu_provider = { workspace = true, features = ["deserialize_postcard_1"] }
icu_segmenter = { path = "../../components/segmenter", features = ["lstm"] }
icu_segmenter = { path = "../../components/segmenter", features = ["lstm", "unstable"] }
simple_logger = { workspace = true }
icu = { path = "../../components/icu", default-features = false, features = ["unstable"] }
num-bigint = { workspace = true }
Expand Down
Loading
Loading