From 7ea9b2db3732904014b9121fb8a5c896ae00d4cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Felipe=20Cruz?= Date: Tue, 4 Aug 2020 00:23:28 -0700 Subject: [PATCH] Encoder decoder config docs (#6195) * Adding docs for how to load encoder_decoder pretrained model with individual config objects * Adding docs for loading encoder_decoder config from pretrained folder * Fixing W293 blank line contains whitespace * Update src/transformers/modeling_encoder_decoder.py * Update src/transformers/modeling_encoder_decoder.py * Update src/transformers/modeling_encoder_decoder.py * Apply suggestions from code review model file should only show examples for how to load save model * Update src/transformers/configuration_encoder_decoder.py * Update src/transformers/configuration_encoder_decoder.py * fix space Co-authored-by: Patrick von Platen --- src/transformers/configuration_encoder_decoder.py | 9 +++++++++ src/transformers/modeling_encoder_decoder.py | 8 +++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/transformers/configuration_encoder_decoder.py b/src/transformers/configuration_encoder_decoder.py index ae71dbecc..785a4c654 100644 --- a/src/transformers/configuration_encoder_decoder.py +++ b/src/transformers/configuration_encoder_decoder.py @@ -56,6 +56,15 @@ class EncoderDecoderConfig(PretrainedConfig): >>> # Accessing the model configuration >>> config_encoder = model.config.encoder >>> config_decoder = model.config.decoder + >>> # set decoder config to causal lm + >>> config_decoder.is_decoder = True + + >>> # Saving the model, including its configuration + >>> model.save_pretrained('my-model') + + >>> # loading model and config from pretrained folder + >>> encoder_decoder_config = EncoderDecoderConfig.from_pretrained('my-model') + >>> model = EncoderDecoderModel.from_pretrained('my-model', config=encoder_decoder_config) """ model_type = "encoder_decoder" diff --git a/src/transformers/modeling_encoder_decoder.py b/src/transformers/modeling_encoder_decoder.py index ec98a250d..772ae74e2 100644 --- a/src/transformers/modeling_encoder_decoder.py +++ b/src/transformers/modeling_encoder_decoder.py @@ -127,7 +127,13 @@ class EncoderDecoderModel(PreTrainedModel): Examples:: >>> from transformers import EncoderDecoderModel - >>> model = EncoderDecoderModel.from_encoder_decoder_pretrained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert + >>> # initialize a bert2bert from two pretrained BERT models. Note that the cross-attention layers will be randomly initialized + >>> model = EncoderDecoderModel.from_encoder_decoder_pretrained('bert-base-uncased', 'bert-base-uncased') + >>> # saving model after fine-tuning + >>> model.save_pretrained("./bert2bert") + >>> # load fine-tuned model + >>> model = EncoderDecoderModel.from_pretrained("./bert2bert") + """ kwargs_encoder = {