/
xlm-roberta-base_mlki_ts_pfeiffer.yaml
68 lines (52 loc) 路 2.4 KB
/
xlm-roberta-base_mlki_ts_pfeiffer.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# Adapter-Hub adapter entry
# Defines a single adapter entry in Adapter-Hub
# --------------------
# The type of adapter (one of the options available in `adapter_type`.
type: text_task
# The string identifier of the task this adapter belongs to.
task: mlki
# The string identifier of the subtask this adapter belongs to.
subtask: ts
# The model type.
# Example: bert
model_type: xlm-roberta
# The string identifier of the pre-trained model (by which it is identified at Huggingface).
# Example: bert-base-uncased
model_name: xlm-roberta-base
# The name of the author(s) of this adapter.
author: Yifan Hou
# Describes the adapter architecture used by this adapter
config:
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder).
# Example: pfeiffer
using: pfeiffer
non_linearity: relu
reduction_factor: 16
default_version: '1'
# A list of different versions of this adapter available for download.
files:
- version: '1'
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/xlm-roberta-base_mlki_ts_pfeiffer.zip
sha1: b8372ef1ba9ef4ba19290914bcb714b7692dd127
sha256: 2b0bb5dcd1ec4b3976764b6f8eddeae9fbcd75b86eb5fe8f8d31d76d18c8ac2f
citation: '@article{hou2022adapters,
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text},
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya},
journal={arXiv preprint arXiv:2210.13617},
year={2022}
}'
# (optional) A short description of this adapter.
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for factual triple enhancement (sentence-level). We trained it with triples from T-REx across 84 languages.'
# (optional) A contact email of the author(s).
email: yifan.hou@inf.ethz.ch
# (optional) A GitHub handle associated with the author(s).
github: eth-nlped
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads.
# Example: BertModelWithHeads
model_class: XLMRobertaModel
# (optional) If the adapter has a pre-trained prediction head included.
prediction_head: false
# (optional) A Twitter handle associated with the author(s).
twitter: https://twitter.com/yyyyyyyyifan
# (optional) A URL providing more information on this adapter/ the authors/ the organization.
url: https://yifan-h.github.io/