-
Notifications
You must be signed in to change notification settings - Fork 2.1k
/
clip_model.py
41 lines (32 loc) 路 1.17 KB
/
clip_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
from clip_server.model.pretrained_models import (
_OPENCLIP_MODELS,
_MULTILINGUALCLIP_MODELS,
_VISUAL_MODEL_IMAGE_SIZE,
)
class BaseCLIPModel:
def __init__(self, name: str, **kwargs):
super().__init__()
self._name = name
@staticmethod
def get_model_name(name: str):
return name
@property
def model_name(self):
return self.__class__.get_model_name(self._name)
@property
def image_size(self):
return _VISUAL_MODEL_IMAGE_SIZE.get(self.model_name, None)
class CLIPModel(BaseCLIPModel):
def __new__(cls, name: str, **kwargs):
if cls is CLIPModel:
if name in _OPENCLIP_MODELS:
from clip_server.model.openclip_model import OpenCLIPModel
instance = super().__new__(OpenCLIPModel)
elif name in _MULTILINGUALCLIP_MODELS:
from clip_server.model.mclip_model import MultilingualCLIPModel
instance = super().__new__(MultilingualCLIPModel)
else:
raise ValueError(f'The CLIP model name=`{name}` is not supported.')
else:
instance = super().__new__(cls)
return instance