File size: 427 Bytes
60cf82b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
{
"backbone_checkpoint_name": "t5-large",
"backbone_class": "T5ForConditionalGeneration",
"backbone_hash": "ceb8394f4ffa0d804ed6b65e02530858",
"common_structure": null,
"delta_type": "lora",
"lora_alpha": 16,
"lora_dropout": 0.0,
"lora_r": 8,
"modified_modules": [
"SelfAttention.q",
"SelfAttention.v",
"DenseReluDense.wi"
],
"opendelta_version": "0.0.1",
"transformers_version": "4.17.0"
}
|