model = AutoAdapterModel.from_pretrained("gpt2")
config = AdapterConfig.load("pfeiffer", non_linearity="relu", reduction_factor=16)
model.load_adapter("sts/qqp@ukp", config=config)
{
"ln_after": false,
"ln_before": false,
"mh_adapter": false,
"output_adapter": true,
"adapter_residual_before_ln": false,
"non_linearity": "relu",
"original_ln_after": true,
"original_ln_before": true,
"reduction_factor": 16,
"residual_before_ln": true
}
| Identifier | Comment | Score | Download |
|---|---|---|---|
| 1 DEFAULT |
@misc{pfeiffer2020adapterfusion,
title={AdapterFusion: Non-Destructive Task Composition for Transfer Learning},
author={Jonas Pfeiffer and Aishwarya Kamath and Andreas Rücklé and Kyunghyun Cho and Iryna Gurevych},
year={2020},
eprint={2005.00247},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
@misc{chen2018quora,
title={Quora question pairs},
author={Chen, Zihan and Zhang, Hongbo and Zhang, Xiaoji and Zhao, Leqi},
year={2018},
publisher={Quora}
}