Skip to content

Commit f809f9d

Browse files
authored
Merge pull request #369 from ASUS-AICS/simplify_model
Remove `embed_vecs` from Model and PLTModel
2 parents f33a366 + 5e7148b commit f809f9d

File tree

3 files changed

+0
-7
lines changed

3 files changed

+0
-7
lines changed

libmultilabel/nn/attentionxml.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -381,7 +381,6 @@ def fit(self, datasets):
381381
model_1 = PLTModel(
382382
classes=self.classes,
383383
word_dict=self.word_dict,
384-
embed_vecs=self.embed_vecs,
385384
network=network,
386385
log_path=self.log_path,
387386
learning_rate=self.learning_rate,
@@ -521,7 +520,6 @@ def __init__(
521520
self,
522521
classes,
523522
word_dict,
524-
embed_vecs,
525523
network,
526524
loss_function="binary_cross_entropy_with_logits",
527525
log_path=None,
@@ -530,7 +528,6 @@ def __init__(
530528
super().__init__(
531529
classes=classes,
532530
word_dict=word_dict,
533-
embed_vecs=embed_vecs,
534531
network=network,
535532
loss_function=loss_function,
536533
log_path=log_path,

libmultilabel/nn/model.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,6 @@ class Model(MultiLabelModel):
182182
Args:
183183
classes (list): List of class names.
184184
word_dict (torchtext.vocab.Vocab): A vocab object which maps tokens to indices.
185-
embed_vecs (torch.Tensor): The pre-trained word vectors of shape (vocab_size, embed_dim).
186185
network (nn.Module): Network (i.e., CAML, KimCNN, or XMLCNN).
187186
loss_function (str, optional): Loss function name (i.e., binary_cross_entropy_with_logits,
188187
cross_entropy). Defaults to 'binary_cross_entropy_with_logits'.
@@ -193,7 +192,6 @@ def __init__(
193192
self,
194193
classes,
195194
word_dict,
196-
embed_vecs,
197195
network,
198196
loss_function="binary_cross_entropy_with_logits",
199197
log_path=None,
@@ -204,7 +202,6 @@ def __init__(
204202
ignore=["log_path"]
205203
) # If log_path is saved, loading the checkpoint will cause an error since each experiment has unique log_path (result_dir).
206204
self.word_dict = word_dict
207-
self.embed_vecs = embed_vecs
208205
self.classes = classes
209206
self.network = network
210207
self.configure_loss_function(loss_function)

libmultilabel/nn/nn_utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,6 @@ def init_model(
100100
model = Model(
101101
classes=classes,
102102
word_dict=word_dict,
103-
embed_vecs=embed_vecs,
104103
network=network,
105104
log_path=log_path,
106105
learning_rate=learning_rate,

0 commit comments

Comments
 (0)