toind commited on
Commit
2bf5d65
·
verified ·
1 Parent(s): b8836f1
Files changed (1) hide show
  1. __init__.py +39 -0
__init__.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoConfig, AutoModel
2
+ from transformers import PreTrainedModel, PretrainedConfig
3
+ import torch
4
+ import torch.nn as nn
5
+
6
+ class CustomConfig(PretrainedConfig):
7
+ model_type = "custom_model"
8
+
9
+ def __init__(self, vocab_size=30522, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, num_labels=2, **kwargs):
10
+ super().__init__(**kwargs)
11
+ self.vocab_size = vocab_size
12
+ self.hidden_size = hidden_size
13
+ self.num_hidden_layers = num_hidden_layers
14
+ self.num_attention_heads = num_attention_heads
15
+ self.num_labels = num_labels
16
+
17
+ class CustomModel(PreTrainedModel):
18
+ config_class = CustomConfig
19
+
20
+ def __init__(self, config):
21
+ super().__init__(config)
22
+ self.embedding = nn.Embedding(config.vocab_size, config.hidden_size)
23
+ self.layers = nn.ModuleList([nn.TransformerEncoderLayer(d_model=config.hidden_size, nhead=config.num_attention_heads) for _ in range(config.num_hidden_layers)])
24
+ self.classifier = nn.Linear(config.hidden_size, config.num_labels)
25
+
26
+ self.init_weights()
27
+
28
+ def forward(self, input_ids):
29
+ embeddings = self.embedding(input_ids)
30
+ x = embeddings
31
+ for layer in self.layers:
32
+ x = layer(x)
33
+ logits = self.classifier(x.mean(dim=1)) # Example: taking the mean of the output as input to the classifier
34
+ return logits
35
+
36
+
37
+ # Register the custom classes
38
+ AutoConfig.register("custom_model", CustomConfig)
39
+ AutoModel.register(CustomConfig, CustomModel)