Our adapters from the MultiCQA paper (https://arxiv.org/abs/2010.00980) trained on the different StackExchange forums (see "version") with self-supervised training signals of unlabeled questions.
model = BertForSequenceClassification.from_pretrained("bert-base-uncased") config = AdapterConfig.load("pfeiffer", reduction_factor=12) model.load_adapter("sts/stackexchange@ukp", "text_task", config=config)
{ "ln_after": false, "ln_before": false, "mh_adapter": false, "output_adapter": true, "adapter_residual_before_ln": false, "non_linearity": "relu", "original_ln_after": true, "original_ln_before": true, "reduction_factor": 16, "residual_before_ln": true, "invertible_adapter": { "block_type": "nice", "non_linearity": "relu", "reduction_factor": 2 } }
@inproceedings{rueckle-etal-2020-multicqa, title = "{MultiCQA}: Zero-Shot Transfer of Self-Supervised Text Matching Models on a Massive Scale", author = {R{\"u}ckl{\'e}, Andreas and Pfeiffer, Jonas and Gurevych, Iryna}, booktitle = "Proceedings of The 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP-2020)", year = "2020", address = "Virtual Conference", url = "https://arxiv.org/abs/2010.00980", }
@misc{pfeiffer2020adapterfusion, title={AdapterFusion: Non-Destructive Task Composition for Transfer Learning}, author={Jonas Pfeiffer and Aishwarya Kamath and Andreas Rücklé and Kyunghyun Cho and Iryna Gurevych}, year={2020}, eprint={2005.00247}, archivePrefix={arXiv}, primaryClass={cs.CL} }
@inproceedings{rueckle-etal-2020-multicqa, title = "{MultiCQA}: Zero-Shot Transfer of Self-Supervised Text Matching Models on a Massive Scale", author = {R{\"u}ckl{\'e}, Andreas and Pfeiffer, Jonas and Gurevych, Iryna}, booktitle = "Proceedings of The 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP-2020)", year = "2020", address = "Virtual Conference", url = "https://arxiv.org/abs/2010.00980", }