Contents:
TokenText
calc_cosine_similarity_opt()
calc_keywords_rating()
get_tokens()
sort_search_list()
LanguageNotFoundException
TokenizeException
find_similar()
HashebleSet
add_nltk_stopwords()
get_normal_form()
get_parsed_text()
get_stopwords_from_nltk()
prepare_dictionary()
remove_part_speech()
replace_yio()
replacing()
spacing()
split_text_and_digits()
tokenize()
use_dictionary_multiple()