model = AutoAdapterModel.from_pretrained("distilbert-base-uncased")
config = AdapterConfig.load("pfeiffer")
model.load_adapter("rc/multirc@ukp", config=config){
"ln_after": false,
"ln_before": false,
"mh_adapter": false,
"output_adapter": true,
"adapter_residual_before_ln": false,
"non_linearity": null,
"original_ln_after": true,
"original_ln_before": true,
"reduction_factor": null,
"residual_before_ln": true
}| Identifier | Comment | Score | Download |
|---|---|---|---|
| 1 DEFAULT |
@misc{pfeiffer2020adapterfusion,
title={AdapterFusion: Non-Destructive Task Composition for Transfer Learning},
author={Jonas Pfeiffer and Aishwarya Kamath and Andreas Rücklé and Kyunghyun Cho and Iryna Gurevych},
year={2020},
eprint={2005.00247},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
@inproceedings{MultiRC2018,
author = {Daniel Khashabi and Snigdha Chaturvedi and Michael Roth and Shyam Upadhyay and Dan Roth},
title = {Looking Beyond the Surface:A Challenge Set for Reading Comprehension over Multiple Sentences},
booktitle = {Proceedings of North American Chapter of the Association for Computational Linguistics (NAACL)},
year = {2018}
}