This repository was archived by the owner on Jul 2, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 43
/
Copy pathbert-base-uncased_sentiment_sst-2_pfeiffer.yaml
75 lines (60 loc) · 2.49 KB
/
bert-base-uncased_sentiment_sst-2_pfeiffer.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# Adapter-Hub adapter entry
# Defines a single adapter entry in Adapter-Hub
# --------------------
# The type of adapter (one of the options available in `adapter_type`.
type: text_task
# The string identifier of the task this adapter belongs to.
task: sentiment
# The string identifier of the subtask this adapter belongs to.
subtask: sst-2
# The model type.
# Example: bert
model_type: bert
# The string identifier of the pre-trained model (by which it is identified at Huggingface).
# Example: bert-base-uncased
model_name: bert-base-uncased
# The name of the author(s) of this adapter.
author: Clifton Poth
# Describes the adapter architecture used by this adapter
config: # TODO: REQUIRED
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder).
# Example: pfeiffer
using: pfeiffer
default_version: '1'
# A list of different versions of this adapter available for download.
files: # TODO: REQUIRED
- version: '1'
url: "https://public.ukp.informatik.tu-darmstadt.de/AdapterHub/text_task/sst/bert-base-uncased/pfeiffer/bert-base-uncased_sentiment_sst-2_pfeiffer.zip"
sha1: fcf8d1a39235e6ea3bbb40f186ef7cd81712cee2
sha256: 7c1009209ec99f58efd2c96eb8a211e48c164e4ea9b31fe5d93e11ddb17fd7e7
citation: |
@article{pfeiffer2020AdapterHub,
title={AdapterHub: A Framework for Adapting Transformers},
author={Jonas Pfeiffer and
Andreas R\"uckl\'{e} and
Clifton Poth and
Aishwarya Kamath and
Ivan Vuli\'{c} and
Sebastian Ruder and
Kyunghyun Cho and
Iryna Gurevych},
journal={arXiv preprint},
year={2020},
url={https://arxiv.org/abs/2007.07779}
}
# A short description of this adapter.
description: |
Adapter in Pfeiffer architecture trained on the binary SST task for 20 epochs with early stopping and a learning rate of 1e-4.
See https://arxiv.org/pdf/2007.07779.pdf.
# A contact email of the author(s).
email: poth@ukp.informatik.tu-darmstadt.de
# A GitHub handle associated with the author(s).
github: calpt
# The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads.
# Example: BertModelWithHeads
model_class: BertForSequenceClassification
# A Twitter handle associated with the author(s).
twitter: clifapt
# A URL providing more information on this adapter/ the authors/ the organization.
url: https://www.informatik.tu-darmstadt.de/ukp
prediction_head: true