3434_BIOBERT_PATH = "https://github.com/naver/biobert-pretrained/releases/download/"
3535_SCIBERT_PATH = "https://s3-us-west-2.amazonaws.com/ai2-s2-research/" \
3636 "scibert/tensorflow_models/"
37- _BERT_MSMARCO_PATH = "https://drive.google.com/file/d/"
37+ _BERT_MSMARCO_NOGUEIRA19_PATH = "https://drive.google.com/file/d/"
3838
3939
4040class PretrainedBERTMixin (PretrainedMixin , ABC ):
@@ -103,9 +103,9 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
103103 (`Nguyen et al`., 2016) dataset. It's the best performing model (on Jan
104104 8th 2019) on MS-MARCO Passage re-ranking task. Two models are included:
105105
106- * ``bert-msmarco-base``: Original BERT base model fine-tuned on
106+ * ``bert-msmarco-nogueira19- base``: Original BERT base model fine-tuned on
107107 MS-MARCO.
108- * ``bert-msmarco-large``: Original BERT large model fine-tuned on
108+ * ``bert-msmarco-nogueira19- large``: Original BERT large model fine-tuned on
109109 MS-MARCO.
110110
111111 We provide the following BERT classes:
@@ -167,9 +167,9 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
167167
168168 # BERT for MS-MARCO
169169 'bert-msmarco-base' :
170- _BERT_MSMARCO_PATH + '1cyUrhs7JaCJTTu-DjFUqP6Bs4f8a6JTX/' ,
170+ _BERT_MSMARCO_NOGUEIRA19_PATH + '1cyUrhs7JaCJTTu-DjFUqP6Bs4f8a6JTX/' ,
171171 'bert-msmarco-large' :
172- _BERT_MSMARCO_PATH + '1crlASTMlsihALlkabAQP6JTYIZwC1Wm8/'
172+ _BERT_MSMARCO_NOGUEIRA19_PATH + '1crlASTMlsihALlkabAQP6JTYIZwC1Wm8/'
173173 }
174174 _MODEL2CKPT = {
175175 # Standard BERT
@@ -194,8 +194,8 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
194194 'scibert-basevocab-cased' : 'bert_model.ckpt' ,
195195
196196 # BERT for MSMARCO
197- 'bert-msmarco-base' : 'model.ckpt-100000' ,
198- 'bert-msmarco-large' : 'model.ckpt-100000' ,
197+ 'bert-msmarco-nogueira19- base' : 'model.ckpt-100000' ,
198+ 'bert-msmarco-nogueira19- large' : 'model.ckpt-100000' ,
199199 }
200200
201201 @classmethod
0 commit comments