Fix the instlalation requirements'
This commit is contained in:
parent
e1b7bac3ec
commit
50d5659d73
@ -9,8 +9,8 @@ from PIL import Image
|
||||
from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor, Normalize
|
||||
from tqdm import tqdm
|
||||
|
||||
from model import build_model
|
||||
from simple_tokenizer import SimpleTokenizer as _Tokenizer
|
||||
from .model import build_model
|
||||
from .simple_tokenizer import SimpleTokenizer as _Tokenizer
|
||||
|
||||
__all__ = ["available_models", "load", "tokenize"]
|
||||
_tokenizer = _Tokenizer()
|
||||
|
5
setup.py
5
setup.py
@ -5,7 +5,8 @@ import os
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
core_req = ['ftfy', 'regex', 'tqdm', 'torch==1.7.1', 'torchvision']
|
||||
extra_requires={'cuda': ['cudatoolkit==11.0']}
|
||||
extras_require={'cuda': ['cudatoolkit==11.0'],
|
||||
'dev': ['pytest']}
|
||||
|
||||
setup(
|
||||
name='clip_by_openai',
|
||||
@ -20,7 +21,7 @@ setup(
|
||||
packages=find_packages(exclude=["tests*"]),
|
||||
python_requires=">=3",
|
||||
install_requires=core_req,
|
||||
extra_requires=extra_requires,
|
||||
extras_require=extras_require,
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
|
@ -1,4 +1,16 @@
|
||||
import clip
|
||||
import torch
|
||||
import torch
|
||||
from PIL import Image
|
||||
def test_simple_cpu():
|
||||
device = 'cpu'
|
||||
model, preprocess = clip.load("ViT-B/32", device=device)
|
||||
image = preprocess(Image.open('CLIP.png')).unsqueeze(0).to(device)
|
||||
text = clip.tokenize(["a diagram", "a dog", "a cat"]).to(device)
|
||||
with torhc.no_grad():
|
||||
assert model.encode_image(image), "Encoding an image does not work"
|
||||
assert model.encode_text(text), "Encoding text does not work"
|
||||
logits_per_image, logits_per_text = model(image, text)
|
||||
probs = logits_per_image.softmax(dim=-1).cpu().numpy()
|
||||
print("Label probs:", probs)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user