From 05432c0a45295dec8f7fe7236b67568c4f622449 Mon Sep 17 00:00:00 2001 From: lots-o <39071632+lots-o@users.noreply.github.com> Date: Thu, 12 Oct 2023 16:19:58 +0900 Subject: [PATCH] Modify typo (#297) Co-authored-by: zhezhaoa <1152543959@qq.com> --- scripts/convert_bert_from_uer_to_huggingface.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/scripts/convert_bert_from_uer_to_huggingface.py b/scripts/convert_bert_from_uer_to_huggingface.py index 1805d904..664aef89 100644 --- a/scripts/convert_bert_from_uer_to_huggingface.py +++ b/scripts/convert_bert_from_uer_to_huggingface.py @@ -63,18 +63,6 @@ def main(): convert_bert_transformer_encoder_from_uer_to_huggingface(input_model, output_model, args.layers_num) - if args.type == "bert": - output_model["bert.pooler.dense.weight"] = input_model["target.sp.linear_1.weight"] - output_model["bert.pooler.dense.bias"] = input_model["target.sp.linear_1.bias"] - output_model["cls.seq_relationship.weight"] = input_model["target.sp.linear_2.weight"] - output_model["cls.seq_relationship.bias"] = input_model["target.sp.linear_2.bias"] - output_model["cls.predictions.transform.dense.weight"] = input_model["target.mlm.linear_1.weight"] - output_model["cls.predictions.transform.dense.bias"] = input_model["target.mlm.linear_1.bias"] - output_model["cls.predictions.transform.LayerNorm.weight"] = input_model["target.layer_norm.gamma"] - output_model["cls.predictions.transform.LayerNorm.bias"] = input_model["target.layer_norm.beta"] - output_model["cls.predictions.decoder.weight"] = input_model["target.mlm.linear_2.weight"] - output_model["cls.predictions.bias"] = input_model["target.mlm.linear_2.bias"] - torch.save(output_model, args.output_model_path)