From 469bacc8511178ba94967b8f80e45429a22b82ac Mon Sep 17 00:00:00 2001 From: Koichi Yasuoka Date: Fri, 5 Jul 2024 07:58:33 +0900 Subject: [PATCH] bug fix --- demo/2024-07-05/ja-swallow-upos.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/demo/2024-07-05/ja-swallow-upos.ipynb b/demo/2024-07-05/ja-swallow-upos.ipynb index 79de870..b945f3b 100644 --- a/demo/2024-07-05/ja-swallow-upos.ipynb +++ b/demo/2024-07-05/ja-swallow-upos.ipynb @@ -21,7 +21,7 @@ "!pip install transformers accelerate deplacy\n", "from transformers import AutoTokenizer,AutoModelForTokenClassification,TokenClassificationPipeline\n", "tkz=AutoTokenizer.from_pretrained(\"tokyotech-llm/Swallow-MS-7b-v0.1\")\n", - "mdl=AutoModelForTokenClassification.from_pretrained(\"KoichiYasuoka/Swallow-MS-7b-char-upos",trust_remote_code=True,device_map=\"auto\")\n", + "mdl=AutoModelForTokenClassification.from_pretrained(\"KoichiYasuoka/Swallow-MS-7b-char-upos\",trust_remote_code=True,device_map=\"auto\")\n", "class TCP(TokenClassificationPipeline):\n", " def check_model_type(self,supported_models):\n", " pass\n", @@ -43,7 +43,7 @@ "cell_type":"code", "metadata":{ "colab_type":"code" }, "source": [ - "tkz=AutoTokenizer.from_pretrained(\"KoichiYasuoka/Swallow-MS-7b-char-upos")\n", + "tkz=AutoTokenizer.from_pretrained(\"KoichiYasuoka/Swallow-MS-7b-char-upos\")\n", "nlp=TCP(model=mdl,tokenizer=tkz)\n", "txt=\"どこの村でも作っていた。\"\n", "doc=\"\\n\".join(\"\\t\".join([str(i+1),txt[t[\"start\"]:t[\"end\"]],\"_\",t[\"entity\"].split(\"|\")[0]]+[\"_\"]*5+[\"SpaceAfter=No\"]) for i,t in enumerate(nlp(txt)))+\"\\n\\n\"\n",