@inproceedings{cui-etal-2020-revisiting,
title = "Revisiting Pre-Trained Models for {C}hinese Natural Language Processing",
author = "Cui, Yiming and
Che, Wanxiang and
Liu, Ting and
Qin, Bing and
Wang, Shijin and
Hu, Guoping",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Findings",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.findings-emnlp.58",
pages = "657--668",
}
@article{chinese-bert-wwm,
title={Pre-Training with Whole Word Masking for Chinese BERT},
author={Cui, Yiming and Che, Wanxiang and Liu, Ting and Qin, Bing and Yang, Ziqing and Wang, Shijin and Hu, Guoping},
journal={arXiv preprint arXiv:1906.08101},
year={2019}
}
rbt6 huggingface.co is an AI model on huggingface.co that provides rbt6's model effect (), which can be used instantly with this hfl rbt6 model. huggingface.co supports a free trial of the rbt6 model, and also provides paid use of the rbt6. Support call rbt6 model through api, including Node.js, Python, http.
rbt6 huggingface.co is an online trial and call api platform, which integrates rbt6's modeling effects, including api services, and provides a free online trial of rbt6, you can try rbt6 online for free by clicking the link below.
rbt6 is an open source model from GitHub that offers a free installation service, and any user can find rbt6 on GitHub to install. At the same time, huggingface.co provides the effect of rbt6 install, users can directly use rbt6 installed effect in huggingface.co for debugging and trial. It also supports api for free installation.