@@ -747,7 +747,7 @@ def bert_12_768_12(dataset_name=None, vocab=None, pretrained=True, ctx=mx.cpu(),
747
747
'openwebtext_book_corpus_wiki_en_uncased',
748
748
'wiki_multilingual_uncased', 'wiki_multilingual_cased',
749
749
'scibert_scivocab_uncased', 'scibert_scivocab_cased',
750
- 'scibert_basevocab_uncased','scibert_basevocab_cased',
750
+ 'scibert_basevocab_uncased', 'scibert_basevocab_cased',
751
751
'biobert_v1.0_pmc', 'biobert_v1.0_pubmed', 'biobert_v1.0_pubmed_pmc',
752
752
'biobert_v1.1_pubmed',
753
753
'clinicalbert'
@@ -789,6 +789,30 @@ def bert_12_768_12(dataset_name=None, vocab=None, pretrained=True, ctx=mx.cpu(),
789
789
parameters will be left uninitialized. Otherwise AssertionError is
790
790
raised.
791
791
792
+ The pretrained parameters for dataset_name
793
+ 'openwebtext_book_corpus_wiki_en_uncased' were obtained by running the
794
+ GluonNLP BERT pre-training script on OpenWebText.
795
+
796
+ The pretrained parameters for dataset_name 'scibert_scivocab_uncased',
797
+ 'scibert_scivocab_cased', 'scibert_basevocab_uncased',
798
+ 'scibert_basevocab_cased' were obtained by converting the parameters
799
+ published by "Beltagy, I., Cohan, A., & Lo, K. (2019). Scibert: Pretrained
800
+ contextualized embeddings for scientific text. arXiv preprint
801
+ arXiv:1903.10676."
802
+
803
+ The pretrained parameters for dataset_name 'biobert_v1.0_pmc',
804
+ 'biobert_v1.0_pubmed', 'biobert_v1.0_pubmed_pmc', 'biobert_v1.1_pubmed'
805
+ were obtained by converting the parameters published by "Lee, J., Yoon, W.,
806
+ Kim, S., Kim, D., Kim, S., So, C. H., & Kang, J. (2019). Biobert:
807
+ pre-trained biomedical language representation model for biomedical text
808
+ mining. arXiv preprint arXiv:1901.08746."
809
+
810
+ The pretrained parameters for dataset_name 'clinicalbert' were obtained by
811
+ converting the parameters published by "Huang, K., Altosaar, J., &
812
+ Ranganath, R. (2019). ClinicalBERT: Modeling Clinical Notes and Predicting
813
+ Hospital Readmission. arXiv preprint arXiv:1904.05342."
814
+
815
+
792
816
Returns
793
817
-------
794
818
BERTModel, gluonnlp.vocab.BERTVocab
0 commit comments