File tree Expand file tree Collapse file tree
Expand file tree Collapse file tree Original file line number Diff line number Diff line change @@ -14,17 +14,22 @@ def get_model() -> str:
1414 model_url = "https://huggingface.co/Sosaka/Alpaca-native-4bit-ggml/resolve/main/ggml-alpaca-7b-q4.bin"
1515 tokenizer_url = "https://huggingface.co/decapoda-research/llama-7b-hf/resolve/main/tokenizer.model"
1616 conversion_script = "https://github.com/ggerganov/llama.cpp/raw/master/convert-unversioned-ggml-to-ggml.py"
17+ migrate_script = "https://github.com/ggerganov/llama.cpp/raw/master/migrate-ggml-2023-03-30-pr613.py"
1718 local_filename = model_url .split ("/" )[- 1 ]
19+ local_filename_ggjt = local_filename .split ('.' )[0 ] + '-ggjt.' + local_filename .split ('.' )[1 ]
1820
1921 if not os .path .exists ("convert-unversioned-ggml-to-ggml.py" ):
2022 urlretrieve (conversion_script , "convert-unversioned-ggml-to-ggml.py" )
23+ if not os .path .exists ("migrate-ggml-2023-03-30-pr613.py" ):
24+ urlretrieve (migrate_script , "migrate-ggml-2023-03-30-pr613.py" )
2125 if not os .path .exists ("tokenizer.model" ):
2226 urlretrieve (tokenizer_url , "tokenizer.model" )
2327 if not os .path .exists (local_filename ):
2428 urlretrieve (model_url , local_filename )
25- os .system ("python convert-unversioned-ggml-to-ggml.py . tokenizer.model" )
29+ os .system (f"python convert-unversioned-ggml-to-ggml.py . tokenizer.model" )
30+ os .system (f"python migrate-ggml-2023-03-30-pr613.py { local_filename } { local_filename_ggjt } " )
2631
27- return local_filename
32+ return local_filename_ggjt
2833
2934
3035def test_llamacpp_embedding_documents () -> None :
Original file line number Diff line number Diff line change @@ -13,17 +13,22 @@ def get_model() -> str:
1313 model_url = "https://huggingface.co/Sosaka/Alpaca-native-4bit-ggml/resolve/main/ggml-alpaca-7b-q4.bin"
1414 tokenizer_url = "https://huggingface.co/decapoda-research/llama-7b-hf/resolve/main/tokenizer.model"
1515 conversion_script = "https://github.com/ggerganov/llama.cpp/raw/master/convert-unversioned-ggml-to-ggml.py"
16+ migrate_script = "https://github.com/ggerganov/llama.cpp/raw/master/migrate-ggml-2023-03-30-pr613.py"
1617 local_filename = model_url .split ("/" )[- 1 ]
18+ local_filename_ggjt = local_filename .split ('.' )[0 ] + '-ggjt.' + local_filename .split ('.' )[1 ]
1719
1820 if not os .path .exists ("convert-unversioned-ggml-to-ggml.py" ):
1921 urlretrieve (conversion_script , "convert-unversioned-ggml-to-ggml.py" )
22+ if not os .path .exists ("migrate-ggml-2023-03-30-pr613.py" ):
23+ urlretrieve (migrate_script , "migrate-ggml-2023-03-30-pr613.py" )
2024 if not os .path .exists ("tokenizer.model" ):
2125 urlretrieve (tokenizer_url , "tokenizer.model" )
2226 if not os .path .exists (local_filename ):
2327 urlretrieve (model_url , local_filename )
2428 os .system (f"python convert-unversioned-ggml-to-ggml.py . tokenizer.model" )
29+ os .system (f"python migrate-ggml-2023-03-30-pr613.py { local_filename } { local_filename_ggjt } " )
2530
26- return local_filename
31+ return local_filename_ggjt
2732
2833
2934def test_llamacpp_inference () -> None :
You can’t perform that action at this time.
0 commit comments