Colab Improvements (#498)
* Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb * Update colab.ipynb
This commit is contained in:
parent
15f525c580
commit
eb9a93097b
1 changed files with 17 additions and 3 deletions
18
colab.ipynb
18
colab.ipynb
|
@ -16,7 +16,7 @@
|
||||||
"source": [
|
"source": [
|
||||||
"## Welcome to the Official KoboldCpp Colab Notebook\r\n",
|
"## Welcome to the Official KoboldCpp Colab Notebook\r\n",
|
||||||
"It's really easy to get started. Just press the two **Play** buttons below, and then connect to the **Cloudflare URL** shown at the end. \r\n",
|
"It's really easy to get started. Just press the two **Play** buttons below, and then connect to the **Cloudflare URL** shown at the end. \r\n",
|
||||||
"You can also change the model URL to use any GGUF model from Huggingface."
|
"You can select a model from the dropdown or enter a URL into the Manual URL field to use any GGUF model from Huggingface. Formating for a manual URL should look like `https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf`"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -42,9 +42,23 @@
|
||||||
"source": [
|
"source": [
|
||||||
"#@title <b>v-- Enter your model below and then click this to start Koboldcpp</b>\r\n",
|
"#@title <b>v-- Enter your model below and then click this to start Koboldcpp</b>\r\n",
|
||||||
"\r\n",
|
"\r\n",
|
||||||
"Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"\"]{allow-input: true}\r\n",
|
"Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"mythomax-l2-13b.Q4_K_M.gguf\",\"remm-slerp-l2-13b.Q4_K_M.gguf\",\"xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"stheno-l2-13b.Q4_K_M.gguf\"]{allow-input: true}\r\n",
|
||||||
|
"Manual_Link = False #@param {type:\"boolean\"}\r\n",
|
||||||
|
"Model_URL = \"OPTIONAL: Place URL Here\" #@param [\"\"]{allow-input: true}\r\n",
|
||||||
"Layers = 43 #@param [43]{allow-input: true}\r\n",
|
"Layers = 43 #@param [43]{allow-input: true}\r\n",
|
||||||
"ContextSize = 4096 #@param [4096] {allow-input: true}\r\n",
|
"ContextSize = 4096 #@param [4096] {allow-input: true}\r\n",
|
||||||
|
"if Model == \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\":\n",
|
||||||
|
" Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\"\n",
|
||||||
|
"elif Model == \"mythomax-l2-13b.Q4_K_M.gguf\":\n",
|
||||||
|
" Model = \"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\"\n",
|
||||||
|
"elif Model == \"remm-slerp-l2-13b.Q4_K_M.gguf\":\n",
|
||||||
|
" Model = \"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\"\n",
|
||||||
|
"elif Model == \"xwin-lm-13b-v0.2.Q4_K_M.gguf\":\n",
|
||||||
|
" Model = \"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\"\n",
|
||||||
|
"elif Model == \"stheno-l2-13b.Q4_K_M.gguf\":\n",
|
||||||
|
" Model = \"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\"\n",
|
||||||
|
"if Manual_Link:\n",
|
||||||
|
" Model = \"$Model_URL\"\n",
|
||||||
"\r\n",
|
"\r\n",
|
||||||
"%cd /content\r\n",
|
"%cd /content\r\n",
|
||||||
"!git clone https://github.com/LostRuins/koboldcpp\r\n",
|
"!git clone https://github.com/LostRuins/koboldcpp\r\n",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue