There is example how to use RoBERTa model on page: https://github.com/pytorch/fairseq/tree/master/examples/roberta
import torch
roberta = torch.hub.load('pytorch/fairseq', 'roberta.large')
roberta.eval() # disable dropout (or leave in train mode to finetune)
It doesn't work after commit 86857a58bf2919c7bec3c29c58234aa4c434d566 with error
ImportError Traceback (most recent call last)
<ipython-input-1-9914d0fa65af> in <module>
1 import torch
----> 2 roberta = torch.hub.load('pytorch/fairseq', 'roberta.large')
3 roberta.eval() # disable dropout (or leave in train mode to finetune)
/usr/local/lib/python3.6/dist-packages/torch/hub.py in load(github, model, *args, **kwargs)
334 sys.path.insert(0, repo_dir)
335
--> 336 hub_module = import_module(MODULE_HUBCONF, repo_dir + '/' + MODULE_HUBCONF)
337
338 entry = _load_entry_from_hubconf(hub_module, model)
/usr/local/lib/python3.6/dist-packages/torch/hub.py in import_module(name, path)
68 spec = importlib.util.spec_from_file_location(name, path)
69 module = importlib.util.module_from_spec(spec)
---> 70 spec.loader.exec_module(module)
71 return module
72 elif sys.version_info >= (3, 0):
/usr/lib/python3.6/importlib/_bootstrap_external.py in exec_module(self, module)
/usr/lib/python3.6/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)
~/.cache/torch/hub/pytorch_fairseq_master/hubconf.py in <module>
6 import functools
7
----> 8 from fairseq.hub_utils import BPEHubInterface as bpe # noqa
9 from fairseq.hub_utils import TokenizerHubInterface as tokenizer # noqa
10 from fairseq.models import MODEL_REGISTRY
~/.cache/torch/hub/pytorch_fairseq_master/fairseq/__init__.py in <module>
8
9 import fairseq.criterions # noqa
---> 10 import fairseq.models # noqa
11 import fairseq.modules # noqa
12 import fairseq.optim # noqa
~/.cache/torch/hub/pytorch_fairseq_master/fairseq/models/__init__.py in <module>
126 if not file.startswith('_') and not file.startswith('.') and (file.endswith('.py') or os.path.isdir(path)):
127 model_name = file[:file.find('.py')] if file.endswith('.py') else file
--> 128 module = importlib.import_module('fairseq.models.' + model_name)
129
130 # extra `model_parser` for sphinx
/usr/lib/python3.6/importlib/__init__.py in import_module(name, package)
124 break
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
127
128
~/.cache/torch/hub/pytorch_fairseq_master/fairseq/models/insertion_transformer.py in <module>
7 import torch
8 import torch.nn.functional as F
----> 9 from fairseq import libnat
10 from fairseq.models import register_model, register_model_architecture
11 from fairseq.models.levenshtein_transformer import (
ImportError: cannot import name 'libnat'
Kinda getting same
en2de = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.en-de', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt',tokenizer='moses', bpe='fastbpe')
Using cache found in /home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master
Traceback (most recent call last):
File "", line 1, in
File "/home/tuhinusc577/miniconda3/lib/python3.7/site-packages/torch/hub.py", line 359, in load
hub_module = import_module(MODULE_HUBCONF, repo_dir + '/' + MODULE_HUBCONF)
File "/home/tuhinusc577/miniconda3/lib/python3.7/site-packages/torch/hub.py", line 72, in import_module
spec.loader.exec_module(module)
File "", line 728, in exec_module
File "", line 219, in _call_with_frames_removed
File "/home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master/hubconf.py", line 8, in
from fairseq.hub_utils import BPEHubInterface as bpe # noqa
File "/home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master/fairseq/__init__.py", line 10, in
import fairseq.models # noqa
File "/home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master/fairseq/models/__init__.py", line 128, in
module = importlib.import_module('fairseq.models.' + model_name)
File "/home/tuhinusc577/miniconda3/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "/home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master/fairseq/models/insertion_transformer.py", line 9, in
from fairseq import libnat
ImportError: cannot import name 'libnat' from 'fairseq' (/home/tuhinusc577/.cache/torch/hub/pytorch_fairseq_master/fairseq/__init__.py)
Same for me.
It's only been happening for the last 2-3 days
7 import torch
8 import torch.nn.functional as F
----> 9 from fairseq import libnat
10 from fairseq.models import register_model, register_model_architecture
11 from fairseq.models.levenshtein_transformer import (
ImportError: cannot import name 'libnat'
Hmm, I'm unable to repro this. How are you installing fairseq? I did
$ git pull
$ pip install --editable .
$ python
Python 3.6.6 |Anaconda, Inc.| (default, Jun 28 2018, 17:14:51)
[GCC 7.2.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> from fairseq import libnat
>>> from fairseq import models
>>>
I just cleaned the cache .cache/torch/hub/pytorch_fairseq_master and installed upstream version.
Same for me, crashing on libnat import
The problem is that torch.hub doesn鈥檛 build the extensions causing the import to fail.
This should be fixed now: https://github.com/pytorch/fairseq/commit/acb6fba005f45e363a6da98d7ce79c36c011d473
I can confirm that example works correctly after clearing ~/.cache/torch
This issue has returned.
facing the same issue ... removing cache file doesn't help.
same issue here. It happened after I re-installed fairseq with some changes making on the source code.
I found the solution. Just run python setup.py build_ext --inplace in the fairseq source folder and all extensions will be built. Besides, if you cannot use libnat_cuda, you need to set a CUDA_HOME path.
I found the solution. Just run
python setup.py build_ext --inplacein the fairseq source folder and all extensions will be built. Besides, if you cannot use libnat_cuda, you need to set a CUDA_HOME path.
3Q for you help.This have take hours of my time .
FIRST shoud do python setup.py build_ext --inplace and pip install --editable . at index
Second MUST set a CUDA_HOME path
the order is:
put
export PATH=/usr/lib/cuda/bin:$PATH
export LD_LIBRARY_PATH=/usr/lib/cuda/bin/lib64:$LD_LIBRARY_PATH
in ./bashrc
export CUDA_HOME=/usr/lib/cuda
lib/cuda is nvida driver path
Final source ~/.bashrc
Most helpful comment
3Q for you help.This have take hours of my time .
FIRST shoud do python setup.py build_ext --inplace and pip install --editable . at index
Second MUST set a CUDA_HOME path
the order is:
put
export PATH=/usr/lib/cuda/bin:$PATH
export LD_LIBRARY_PATH=/usr/lib/cuda/bin/lib64:$LD_LIBRARY_PATH
in ./bashrc
export CUDA_HOME=/usr/lib/cuda
lib/cuda is nvida driver path
Final source ~/.bashrc