model = AutoAdapterModel.from_pretrained("facebook/bart-base")
config = AdapterConfig.load("pfeiffer", non_linearity="relu", reduction_factor=16)
model.load_adapter("nli/qnli@ukp", config=config)
{
"ln_after": false,
"ln_before": false,
"mh_adapter": false,
"output_adapter": true,
"adapter_residual_before_ln": false,
"non_linearity": "relu",
"original_ln_after": true,
"original_ln_before": true,
"reduction_factor": 16,
"residual_before_ln": true
}
| Identifier | Comment | Score | Download |
|---|---|---|---|
| 1 DEFAULT |
@misc{pfeiffer2020adapterfusion,
title={AdapterFusion: Non-Destructive Task Composition for Transfer Learning},
author={Jonas Pfeiffer and Aishwarya Kamath and Andreas Rücklé and Kyunghyun Cho and Iryna Gurevych},
year={2020},
eprint={2005.00247},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
@article{wang2018glue,
title={Glue: A multi-task benchmark and analysis platform for natural language understanding},
author={Wang, Alex and Singh, Amanpreet and Michael, Julian and Hill, Felix and Levy, Omer and Bowman, Samuel R},
journal={arXiv preprint arXiv:1804.07461},
year={2018}
}