We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f94caf2 commit bff3f6aCopy full SHA for bff3f6a
tests/models/test_llamacpp.py
@@ -182,7 +182,7 @@ class Foo(BaseModel):
182
generator = model.stream("foo?", Foo)
183
184
# NOTE: The first few chunks may be empty (role info, control tokens, finish chunks)
185
- # Relavant issue: https://github.com/abetlen/llama-cpp-python/issues/372
+ # Relevant issue: https://github.com/abetlen/llama-cpp-python/issues/372
186
first_non_empty_token = next(x for x in generator if x)
187
assert first_non_empty_token == "{"
188
0 commit comments