Skip to content
This repository was archived by the owner on Jul 7, 2023. It is now read-only.

Commit 15e0be6

Browse files
sharannarangcopybara-github
authored andcommitted
Update hparams for neural assistant base model.
PiperOrigin-RevId: 289154271
1 parent e908bcf commit 15e0be6

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed

tensor2tensor/models/neural_assistant.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ def compute_summary_embedding(input_embeddings, input_lengths, hparams):
508508
@registry.register_hparams
509509
def neural_assistant_base():
510510
"""HParams for a base neural_assistant model."""
511-
hparams = transformer.transformer_base()
511+
hparams = transformer.transformer_tpu()
512512
hparams.add_hparam("pos_weight", 1.0) # weight for positive triples
513513
hparams.add_hparam("similarity_fuction",
514514
"bilinear") # dot_product or bilinear
@@ -521,6 +521,12 @@ def neural_assistant_base():
521521
hparams.add_hparam("kb_loss_weight", 0.0) # weight for distant supervision
522522
hparams.add_hparam("test_triple_num",
523523
28483) # max triples of KB
524+
hparams.add_hparam("margin", 0.0) # KB training max-margin loss
525+
hparams.add_hparam(
526+
"num_negative_samples",
527+
1) # Sampling number of different adversarial training examples
528+
hparams.add_hparam("kb_train_weight", 0.0)
529+
# KB_training loss weight which combines Language model and KB selection loss
524530
return hparams
525531

526532

0 commit comments

Comments
 (0)