-
Notifications
You must be signed in to change notification settings - Fork 66
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: add build_model function #584
Changes from all commits
b0cb260
af0fa12
65b4f46
e736eff
cbb4e03
4819826
c861967
a2d1f3e
652306f
97ea276
770bfb1
f35f98f
0966215
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
import numpy as np | ||
import pytest | ||
from _finetuner.excepts import SelectModelRequired | ||
from _finetuner.models.inference import ONNXRuntimeInferenceEngine, TorchInferenceEngine | ||
from docarray import Document, DocumentArray | ||
|
||
import finetuner | ||
|
||
|
||
@pytest.mark.parametrize( | ||
'descriptor, select_model, is_onnx, expect_error', | ||
[ | ||
('bert-base-cased', None, False, None), | ||
('bert-base-cased', None, True, None), | ||
('openai/clip-vit-base-patch16', 'clip-text', False, None), | ||
('openai/clip-vit-base-patch16', 'clip-vision', False, None), | ||
('openai/clip-vit-base-patch16', None, False, SelectModelRequired), | ||
('MADE UP MODEL', None, False, ValueError), | ||
], | ||
) | ||
def test_build_model(descriptor, select_model, is_onnx, expect_error): | ||
|
||
if expect_error: | ||
with pytest.raises(expect_error): | ||
model = finetuner.build_model( | ||
name=descriptor, | ||
select_model=select_model, | ||
is_onnx=is_onnx, | ||
) | ||
else: | ||
model = finetuner.build_model( | ||
name=descriptor, select_model=select_model, is_onnx=is_onnx | ||
) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. if build_model returns nothing it would actually pass. Testing something with the model would be better. Maybe you can just check if you can pass an input and the result is a vector of the expected shape. If this is too much, you could at least check if the type of |
||
|
||
if is_onnx: | ||
assert isinstance(model, ONNXRuntimeInferenceEngine) | ||
else: | ||
assert isinstance(model, TorchInferenceEngine) | ||
|
||
|
||
@pytest.mark.parametrize('is_onnx', [True, False]) | ||
def test_build_model_embedding(is_onnx): | ||
|
||
model = finetuner.build_model(name="bert-base-cased", is_onnx=is_onnx) | ||
|
||
da = DocumentArray(Document(text="TEST TEXT")) | ||
finetuner.encode(model=model, data=da) | ||
assert da.embeddings is not None | ||
assert isinstance(da.embeddings, np.ndarray) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
no need to change, but a better way is
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The more you know!