Skip to content

Commit 6081bdf

Browse files
Alexandre LandeauAlexandre Landeau
authored andcommitted
fixed unit tests
1 parent 97bb661 commit 6081bdf

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

tests/python/unit/test_spacy_tokenizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def test_tokenize_df_multilingual():
4545
output_df = tokenizer.tokenize_df(df=input_df, text_column="input_text", language_column="language")
4646
tokenized_documents = output_df[tokenizer.tokenized_column]
4747
tokenized_documents_length = [len(doc) for doc in tokenized_documents]
48-
assert tokenized_documents_length == [12, 8, 13, 9]
48+
assert tokenized_documents_length == [12, 8, 19, 9]
4949

5050

5151
def test_tokenize_df_long_text():

tests/python/unit/test_wordcloud_visualizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def test_tokenize_and_count_multilingual():
5454
assert frequencies == [
5555
("en", Counter({"hope": 1, "Nothing": 3, "fear": 1, "free": 1})),
5656
("fr", Counter({"sanglots": 1, "longs": 1, "violons": 1, "automne": 1})),
57-
("zh", Counter({"子": 1, "曰": 1, "學而": 1, "不思則": 1, "罔": 1, "思而": 1, "不學則": 1}),),
57+
('zh', Counter({'則': 2, '學': 2, '思': 2, '子': 1, '曰': 1, '罔': 1}))
5858
]
5959

6060

0 commit comments

Comments
 (0)