Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
qinwf
committed
Sep 27, 2016
1 parent
3db45e6
commit 6ab6269
Showing
33 changed files
with
1,666 additions
and
1,666 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,57 +1,57 @@ | ||
# Generated by roxygen2: do not edit by hand | ||
S3method("<=",keywords) | ||
S3method("<=",qseg) | ||
S3method("<=",segment) | ||
S3method("<=",simhash) | ||
S3method("<=",tagger) | ||
S3method("[",keywords) | ||
S3method("[",qseg) | ||
S3method("[",segment) | ||
S3method("[",simhash) | ||
S3method("[",tagger) | ||
S3method(print,inv) | ||
S3method(print,jieba) | ||
S3method(print,keywords) | ||
S3method(print,qseg) | ||
S3method(print,simhash) | ||
export(DICTPATH) | ||
export(HMMPATH) | ||
export(IDFPATH) | ||
export(STOPPATH) | ||
export(USERPATH) | ||
export(apply_list) | ||
export(distance) | ||
export(edit_dict) | ||
export(file_coding) | ||
export(filecoding) | ||
export(filter_segment) | ||
export(freq) | ||
export(get_idf) | ||
export(get_qsegmodel) | ||
export(get_tuple) | ||
export(keywords) | ||
export(new_user_word) | ||
export(qseg) | ||
export(query_threshold) | ||
export(reset_qsegmodel) | ||
export(segment) | ||
export(set_qsegmodel) | ||
export(show_dictpath) | ||
export(simhash) | ||
export(simhash_dist) | ||
export(simhash_dist_mat) | ||
export(tagging) | ||
export(tobin) | ||
export(vector_distance) | ||
export(vector_keywords) | ||
export(vector_simhash) | ||
export(vector_tag) | ||
export(words_locate) | ||
export(worker) | ||
import(Rcpp) | ||
import(jiebaRD) | ||
importFrom(utils,file.edit) | ||
importFrom(utils,unzip) | ||
importFrom(utils,write.table) | ||
useDynLib(jiebaR) | ||
# Generated by roxygen2: do not edit by hand | ||
|
||
S3method("<=",keywords) | ||
S3method("<=",qseg) | ||
S3method("<=",segment) | ||
S3method("<=",simhash) | ||
S3method("<=",tagger) | ||
S3method("[",keywords) | ||
S3method("[",qseg) | ||
S3method("[",segment) | ||
S3method("[",simhash) | ||
S3method("[",tagger) | ||
S3method(print,inv) | ||
S3method(print,jieba) | ||
S3method(print,keywords) | ||
S3method(print,qseg) | ||
S3method(print,simhash) | ||
export(DICTPATH) | ||
export(HMMPATH) | ||
export(IDFPATH) | ||
export(STOPPATH) | ||
export(USERPATH) | ||
export(apply_list) | ||
export(distance) | ||
export(edit_dict) | ||
export(file_coding) | ||
export(filecoding) | ||
export(filter_segment) | ||
export(freq) | ||
export(get_idf) | ||
export(get_qsegmodel) | ||
export(get_tuple) | ||
export(keywords) | ||
export(new_user_word) | ||
export(qseg) | ||
export(query_threshold) | ||
export(reset_qsegmodel) | ||
export(segment) | ||
export(set_qsegmodel) | ||
export(show_dictpath) | ||
export(simhash) | ||
export(simhash_dist) | ||
export(simhash_dist_mat) | ||
export(tagging) | ||
export(tobin) | ||
export(vector_distance) | ||
export(vector_keywords) | ||
export(vector_simhash) | ||
export(vector_tag) | ||
export(words_locate) | ||
export(worker) | ||
import(Rcpp) | ||
import(jiebaRD) | ||
importFrom(utils,file.edit) | ||
importFrom(utils,unzip) | ||
importFrom(utils,write.table) | ||
useDynLib(jiebaR) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,141 +1,141 @@ | ||
# This file was generated by Rcpp::compileAttributes | ||
# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 | ||
|
||
#' Files encoding detection | ||
#' | ||
#' This function detects the encoding of input files. | ||
#' You can also check encoding with checkenc package which is on GitHub. | ||
#' @param file A file path. | ||
#' @return The encoding of file | ||
#' @author Wu Yongwei, Qin wenfeng | ||
#' @references \url{https://github.com/adah1972/tellenc} | ||
#' @seealso \url{https://github.com/qinwf/checkenc} | ||
#' @export | ||
file_coding <- function(file) { | ||
.Call('jiebaR_file_coding', PACKAGE = 'jiebaR', file) | ||
} | ||
|
||
get_idf_cpp <- function(x, stop_) { | ||
.Call('jiebaR_get_idf_cpp', PACKAGE = 'jiebaR', x, stop_) | ||
} | ||
|
||
get_tuple_list <- function(x, step) { | ||
.Call('jiebaR_get_tuple_list', PACKAGE = 'jiebaR', x, step) | ||
} | ||
|
||
get_tuple_vector <- function(x, step) { | ||
.Call('jiebaR_get_tuple_vector', PACKAGE = 'jiebaR', x, step) | ||
} | ||
|
||
jiebaclass_ptr <- function(dict, model, user, stop) { | ||
.Call('jiebaR_jiebaclass_ptr', PACKAGE = 'jiebaR', dict, model, user, stop) | ||
} | ||
|
||
jiebaclass_ptr_v2 <- function(dict, model, user, stop, uw) { | ||
.Call('jiebaR_jiebaclass_ptr_v2', PACKAGE = 'jiebaR', dict, model, user, stop, uw) | ||
} | ||
|
||
jiebaclass_mix_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_mix_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_mp_cut <- function(x, num, cutter) { | ||
.Call('jiebaR_jiebaclass_mp_cut', PACKAGE = 'jiebaR', x, num, cutter) | ||
} | ||
|
||
jiebaclass_hmm_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_hmm_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_full_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_full_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_query_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_query_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_level_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_level_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_level_cut_pair <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_level_cut_pair', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_tag <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_tag', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_file <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_file', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_vec <- function(code, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_vec', PACKAGE = 'jiebaR', code, cutter) | ||
} | ||
|
||
set_query_threshold <- function(num, cutter) { | ||
.Call('jiebaR_set_query_threshold', PACKAGE = 'jiebaR', num, cutter) | ||
} | ||
|
||
add_user_word <- function(x, tag, cutter) { | ||
.Call('jiebaR_add_user_word', PACKAGE = 'jiebaR', x, tag, cutter) | ||
} | ||
|
||
key_ptr <- function(n, dict, model, idf, stop, user) { | ||
.Call('jiebaR_key_ptr', PACKAGE = 'jiebaR', n, dict, model, idf, stop, user) | ||
} | ||
|
||
key_tag <- function(x, cutter) { | ||
.Call('jiebaR_key_tag', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
key_cut <- function(x, cutter) { | ||
.Call('jiebaR_key_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
key_keys <- function(x, cutter) { | ||
.Call('jiebaR_key_keys', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
sim_ptr <- function(dict, model, idf, stop, user) { | ||
.Call('jiebaR_sim_ptr', PACKAGE = 'jiebaR', dict, model, idf, stop, user) | ||
} | ||
|
||
sim_sim <- function(code, topn, cutter) { | ||
.Call('jiebaR_sim_sim', PACKAGE = 'jiebaR', code, topn, cutter) | ||
} | ||
|
||
sim_vec <- function(code, topn, cutter) { | ||
.Call('jiebaR_sim_vec', PACKAGE = 'jiebaR', code, topn, cutter) | ||
} | ||
|
||
sim_distance <- function(lhs, rhs, topn, cutter) { | ||
.Call('jiebaR_sim_distance', PACKAGE = 'jiebaR', lhs, rhs, topn, cutter) | ||
} | ||
|
||
sim_distance_vec <- function(lcode, rcode, topn, cutter) { | ||
.Call('jiebaR_sim_distance_vec', PACKAGE = 'jiebaR', lcode, rcode, topn, cutter) | ||
} | ||
|
||
u64tobin <- function(x) { | ||
.Call('jiebaR_u64tobin', PACKAGE = 'jiebaR', x) | ||
} | ||
|
||
cpp_ham_dist <- function(x, y) { | ||
.Call('jiebaR_cpp_ham_dist', PACKAGE = 'jiebaR', x, y) | ||
} | ||
|
||
cpp_ham_dist_mat <- function(x, y) { | ||
.Call('jiebaR_cpp_ham_dist_mat', PACKAGE = 'jiebaR', x, y) | ||
} | ||
|
||
get_loc <- function(word) { | ||
.Call('jiebaR_get_loc', PACKAGE = 'jiebaR', word) | ||
} | ||
|
||
words_freq <- function(x) { | ||
.Call('jiebaR_words_freq', PACKAGE = 'jiebaR', x) | ||
} | ||
|
||
# Generated by using Rcpp::compileAttributes() -> do not edit by hand | ||
# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 | ||
|
||
#' Files encoding detection | ||
#' | ||
#' This function detects the encoding of input files. | ||
#' You can also check encoding with checkenc package which is on GitHub. | ||
#' @param file A file path. | ||
#' @return The encoding of file | ||
#' @author Wu Yongwei, Qin wenfeng | ||
#' @references \url{https://github.com/adah1972/tellenc} | ||
#' @seealso \url{https://github.com/qinwf/checkenc} | ||
#' @export | ||
file_coding <- function(file) { | ||
.Call('jiebaR_file_coding', PACKAGE = 'jiebaR', file) | ||
} | ||
|
||
get_idf_cpp <- function(x, stop_) { | ||
.Call('jiebaR_get_idf_cpp', PACKAGE = 'jiebaR', x, stop_) | ||
} | ||
|
||
get_tuple_list <- function(x, step) { | ||
.Call('jiebaR_get_tuple_list', PACKAGE = 'jiebaR', x, step) | ||
} | ||
|
||
get_tuple_vector <- function(x, step) { | ||
.Call('jiebaR_get_tuple_vector', PACKAGE = 'jiebaR', x, step) | ||
} | ||
|
||
jiebaclass_ptr <- function(dict, model, user, stop) { | ||
.Call('jiebaR_jiebaclass_ptr', PACKAGE = 'jiebaR', dict, model, user, stop) | ||
} | ||
|
||
jiebaclass_ptr_v2 <- function(dict, model, user, stop, uw) { | ||
.Call('jiebaR_jiebaclass_ptr_v2', PACKAGE = 'jiebaR', dict, model, user, stop, uw) | ||
} | ||
|
||
jiebaclass_mix_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_mix_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_mp_cut <- function(x, num, cutter) { | ||
.Call('jiebaR_jiebaclass_mp_cut', PACKAGE = 'jiebaR', x, num, cutter) | ||
} | ||
|
||
jiebaclass_hmm_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_hmm_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_full_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_full_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_query_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_query_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_level_cut <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_level_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_level_cut_pair <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_level_cut_pair', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_tag <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_tag', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_file <- function(x, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_file', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
jiebaclass_tag_vec <- function(code, cutter) { | ||
.Call('jiebaR_jiebaclass_tag_vec', PACKAGE = 'jiebaR', code, cutter) | ||
} | ||
|
||
set_query_threshold <- function(num, cutter) { | ||
.Call('jiebaR_set_query_threshold', PACKAGE = 'jiebaR', num, cutter) | ||
} | ||
|
||
add_user_word <- function(x, tag, cutter) { | ||
.Call('jiebaR_add_user_word', PACKAGE = 'jiebaR', x, tag, cutter) | ||
} | ||
|
||
key_ptr <- function(n, dict, model, idf, stop, user) { | ||
.Call('jiebaR_key_ptr', PACKAGE = 'jiebaR', n, dict, model, idf, stop, user) | ||
} | ||
|
||
key_tag <- function(x, cutter) { | ||
.Call('jiebaR_key_tag', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
key_cut <- function(x, cutter) { | ||
.Call('jiebaR_key_cut', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
key_keys <- function(x, cutter) { | ||
.Call('jiebaR_key_keys', PACKAGE = 'jiebaR', x, cutter) | ||
} | ||
|
||
sim_ptr <- function(dict, model, idf, stop, user) { | ||
.Call('jiebaR_sim_ptr', PACKAGE = 'jiebaR', dict, model, idf, stop, user) | ||
} | ||
|
||
sim_sim <- function(code, topn, cutter) { | ||
.Call('jiebaR_sim_sim', PACKAGE = 'jiebaR', code, topn, cutter) | ||
} | ||
|
||
sim_vec <- function(code, topn, cutter) { | ||
.Call('jiebaR_sim_vec', PACKAGE = 'jiebaR', code, topn, cutter) | ||
} | ||
|
||
sim_distance <- function(lhs, rhs, topn, cutter) { | ||
.Call('jiebaR_sim_distance', PACKAGE = 'jiebaR', lhs, rhs, topn, cutter) | ||
} | ||
|
||
sim_distance_vec <- function(lcode, rcode, topn, cutter) { | ||
.Call('jiebaR_sim_distance_vec', PACKAGE = 'jiebaR', lcode, rcode, topn, cutter) | ||
} | ||
|
||
u64tobin <- function(x) { | ||
.Call('jiebaR_u64tobin', PACKAGE = 'jiebaR', x) | ||
} | ||
|
||
cpp_ham_dist <- function(x, y) { | ||
.Call('jiebaR_cpp_ham_dist', PACKAGE = 'jiebaR', x, y) | ||
} | ||
|
||
cpp_ham_dist_mat <- function(x, y) { | ||
.Call('jiebaR_cpp_ham_dist_mat', PACKAGE = 'jiebaR', x, y) | ||
} | ||
|
||
get_loc <- function(word) { | ||
.Call('jiebaR_get_loc', PACKAGE = 'jiebaR', word) | ||
} | ||
|
||
words_freq <- function(x) { | ||
.Call('jiebaR_words_freq', PACKAGE = 'jiebaR', x) | ||
} | ||
|
Oops, something went wrong.