Skip to content

Commit

Permalink
fix build
Browse files Browse the repository at this point in the history
  • Loading branch information
AlongWY committed Jun 27, 2023
1 parent ee4701f commit 54f36cd
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 68 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ltp-extension-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
- name: Build wheels - universal2
uses: messense/maturin-action@v1
with:
args: --release --universal2 --out dist -m python/extension/Cargo.toml --features="malloc"
args: --release --target universal2-apple-darwin --out dist -m python/extension/Cargo.toml --features="malloc"
- name: Install built wheel - universal2
run: |
pip3 install ltp_extension --no-index --find-links dist --force-reinstall
Expand Down
35 changes: 0 additions & 35 deletions rust/ltp/src/perceptron/feature.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::collections::HashMap;
use std::ops::Deref;
use std::sync::Arc;

pub trait TraitFeature {
fn get_with_key(&self, key: &str) -> Option<usize>;
Expand Down Expand Up @@ -49,15 +48,6 @@ where
}
}

impl<T> TraitFeature for Arc<T>
where
T: TraitFeature,
{
fn get_with_key(&self, key: &str) -> Option<usize> {
self.deref().get_with_key(key)
}
}

impl<T> TraitFeaturesTrainUtils for &T
where
T: TraitFeaturesTrainUtils,
Expand All @@ -83,31 +73,6 @@ where
}
}

impl<T> TraitFeaturesTrainUtils for Arc<T>
where
T: TraitFeaturesTrainUtils,
{
fn feature_num(&self) -> usize {
self.deref().feature_num()
}

fn insert_feature(&mut self, key: String, value: usize) {
self.deref().put_feature(key, value)
}

fn remove_feature(&mut self, key: &str) -> Option<usize> {
self.deref().del_feature(key)
}

fn put_feature(&mut self, key: String, value: usize) {
self.deref().insert_feature(key, value)
}

fn del_feature(&mut self, key: &str) -> Option<usize> {
self.deref().remove_feature(key)
}
}

// HashMap

impl TraitFeature for HashMap<String, usize> {
Expand Down
67 changes: 35 additions & 32 deletions rust/ltp/src/perceptron/trainer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ impl<Param: TraitParameter> Default for Algorithm<Param> {
#[cfg_attr(feature = "serialization", derive(Serialize, Deserialize))]
#[derive(Default, Debug, Clone)]
pub struct Trainer<Define, Param = f64>
where
Define: Definition,
Param: TraitParameter + Display,
where
Define: Definition,
Param: TraitParameter + Display,
{
pub definition: Define,
pub epoch: usize,
Expand Down Expand Up @@ -86,9 +86,9 @@ macro_rules! impl_set_param {
}

impl<Define, Param> Trainer<Define, Param>
where
Param: TraitParameter + Display + Sync + Send + 'static,
Define: Definition + Sync + Send + 'static,
where
Param: TraitParameter + Display + Sync + Send + 'static,
Define: Definition + Sync + Send + 'static,
{
pub fn new() -> Self {
Self {
Expand Down Expand Up @@ -154,15 +154,15 @@ impl<Define, Param> Trainer<Define, Param>
&self,
model: &Perceptron<Define, Feature, ParamStorage, Param>,
) -> Result<(f64, f64, f64)>
where
Feature: TraitFeature,
Param: TraitParameter,
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Define: Definition,
where
Feature: TraitFeature,
Param: TraitParameter,
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Define: Definition,
{
if let Some(eval_set) = &self.eval_set {
#[cfg(feature = "parallel")]
let result = {
let result = {
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(self.eval_threads)
.build()
Expand All @@ -183,7 +183,7 @@ impl<Define, Param> Trainer<Define, Param>
})
};
#[cfg(not(feature = "parallel"))]
let result = eval_set
let result = eval_set
.iter()
.map(|(feature, labels)| model.evaluate(feature, labels))
.reduce(
Expand All @@ -209,17 +209,17 @@ impl<Define, Param> Trainer<Define, Param>
pub fn build<Feature, ParamStorage>(
&self,
) -> Result<Perceptron<Define, Feature, ParamStorage, Param>>
where
ParamStorage: TraitParameterStorage<Param>
where
ParamStorage: TraitParameterStorage<Param>
+ TraitParameterStorageTrainUtils<Param>
+ TraitParameterStorageCompressUtils<Param>
+ Send
+ Sync
+ 'static,
Feature: TraitFeature
Feature: TraitFeature
+ TraitFeaturesTrainUtils
+ TraitFeatureCompressUtils
+ ToOwned<Owned=Feature>
+ ToOwned<Owned = Feature>
+ Send
+ Sync
+ 'static,
Expand Down Expand Up @@ -280,9 +280,9 @@ impl<Define, Param> Trainer<Define, Param>
&self,
features: Feature,
) -> Result<Perceptron<Define, Feature, ParamStorage, Param>>
where
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Feature: TraitFeature + TraitFeaturesTrainUtils,
where
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Feature: TraitFeature + TraitFeaturesTrainUtils,
{
let label_num = self.definition.label_num();
let bias = if self.definition.use_viterbi() {
Expand Down Expand Up @@ -343,9 +343,9 @@ impl<Define, Param> Trainer<Define, Param>
features: Feature,
pa_mode: &PaMode<Param>,
) -> Result<Perceptron<Define, Feature, ParamStorage, Param>>
where
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Feature: TraitFeature + TraitFeaturesTrainUtils,
where
ParamStorage: TraitParameterStorage<Param> + TraitParameterStorageTrainUtils<Param>,
Feature: TraitFeature + TraitFeaturesTrainUtils,
{
let label_num = self.definition.label_num();
let bias = if self.definition.use_viterbi() {
Expand Down Expand Up @@ -412,16 +412,16 @@ impl<Define, Param> Trainer<Define, Param>
features: Feature,
threads: usize,
) -> Result<Perceptron<Define, Feature, ParamStorage, Param>>
where
ParamStorage: TraitParameterStorage<Param>
where
ParamStorage: TraitParameterStorage<Param>
+ TraitParameterStorageTrainUtils<Param>
+ Send
+ Sync
+ 'static,
Feature: TraitFeature
Feature: TraitFeature
+ TraitFeaturesTrainUtils
+ TraitFeatureCompressUtils
+ ToOwned<Owned=Feature>
+ ToOwned<Owned = Feature>
+ Send
+ Sync
+ 'static,
Expand Down Expand Up @@ -458,8 +458,11 @@ impl<Define, Param> Trainer<Define, Param>
let definition = self.definition.clone();
let parameters = parameters.pop().unwrap();
children.push(thread::spawn(move || -> ParamStorage {
let mut perceptron =
Perceptron::new_with_parameters(definition, clone_feature, parameters);
let mut perceptron = Perceptron::new_with_parameters(
definition,
clone_feature.deref(),
parameters,
);
let shared = train_set_clone.read().unwrap();
if let Some(chunk) = shared.chunks(chunk_size).nth(thread) {
for (feature, labels) in chunk {
Expand Down Expand Up @@ -516,9 +519,9 @@ impl<Define, Param> Trainer<Define, Param>
}

impl<Define, Param> Display for Trainer<Define, Param>
where
Define: Definition,
Param: TraitParameter + Display,
where
Define: Definition,
Param: TraitParameter + Display,
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
writeln!(f, "Trainer {{")?;
Expand Down

0 comments on commit 54f36cd

Please sign in to comment.