├── 2025-01-14 15 37 24.png ├── 2025-01-14 15 39 21.png ├── 2025-01-14 15 40 05.png ├── testChoices.bat ├── batchCLORSescape.txt ├── LSERVER.bat └── README.md /2025-01-14 15 37 24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fabiomatricardi/Batchfile-llms/main/2025-01-14 15 37 24.png -------------------------------------------------------------------------------- /2025-01-14 15 39 21.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fabiomatricardi/Batchfile-llms/main/2025-01-14 15 39 21.png -------------------------------------------------------------------------------- /2025-01-14 15 40 05.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fabiomatricardi/Batchfile-llms/main/2025-01-14 15 40 05.png -------------------------------------------------------------------------------- /testChoices.bat: -------------------------------------------------------------------------------- 1 | :: source https://stackoverflow.com/questions/14529246/multiple-choices-menu-on-batch-file 2 | :: color codes from 3 | :: https://gist.githubusercontent.com/mlocati/fdabcaeb8071d5c75a2d51712db24011/raw/b710612d6320df7e146508094e84b92b34c77d48/win10colors.cmd 4 | 5 | @echo off 6 | :START 7 | echo  8 | set SRV=C:\Users\FabioMatricardi\Documents\DEV\SmolLM2-360M_gradio\llamacpp 9 | echo. 10 | echo =================================================== 11 | echo What MODEL would you like to Run as a llama-server? 12 | echo =================================================== 13 | echo 1 - SmolLM2-360M-Instruct.Q8_0.gguf 14 | echo 2 - SmolLM2-135M-Instruct-f16.gguf 15 | echo 3 - llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 16 | echo 4 - EXIT 17 | 18 | set /p whatapp= 19 | 20 | if %whatapp%==1 (goto MODEL1 21 | ) else if %whatapp%==2 (goto MODEL2 22 | ) else if %whatapp%==3 (goto MODEL3 23 | ) else if %whatapp%==4 (goto QUIT 24 | ) else (goto :INVALID) 25 | 26 | :MODEL1 27 | cls 28 | echo Starting llama-server API for SmolLM2-360M-Instruct.Q8_0.gguf 29 | echo start cmd.exe /k D:\SmolLM2-360M_gradio\llamacpp\llama-server.exe -m D:\SmolLM2-360M_gradio\llamacpp\model\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0 30 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0" 31 | goto :START 32 | 33 | :MODEL2 34 | cls 35 | echo Starting llama-server API for SmolLM2-135M-Instruct-f16.gguf 36 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\SmolLM2-135M-Instruct-f16.gguf -c 8192 -ngl 0" 37 | goto :START 38 | 39 | :MODEL3 40 | cls 41 | echo Starting llama-server API for llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 42 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf -c 8192 -ngl 0" 43 | goto :START 44 | 45 | :INVALID 46 | cls 47 | echo    48 | echo INVALID CHOICE 49 | echo  50 | goto :START 51 | 52 | :QUIT 53 | cls 54 | echo BYE BYE 55 | echo  56 | pause 57 | exit 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /batchCLORSescape.txt: -------------------------------------------------------------------------------- 1 | @echo off 2 | cls 3 | echo  STYLES  4 | echo ^[0m Reset 5 | echo ^[1m Bold 6 | echo ^[4m Underline 7 | echo ^[7m Inverse 8 | echo. 9 | echo  NORMAL FOREGROUND COLORS  10 | echo ^[30m Black (black) 11 | echo ^[31m Red 12 | echo ^[32m Green 13 | echo ^[33m Yellow 14 | echo ^[34m Blue 15 | echo ^[35m Magenta 16 | echo ^[36m Cyan 17 | echo ^[37m White 18 | echo. 19 | echo  NORMAL BACKGROUND COLORS  20 | echo ^[40m Black 21 | echo ^[41m Red 22 | echo ^[42m Green 23 | echo ^[43m Yellow 24 | echo ^[44m Blue 25 | echo ^[45m Magenta 26 | echo ^[46m Cyan 27 | echo ^[47m White (white) 28 | echo. 29 | echo  STRONG FOREGROUND COLORS  30 | echo ^[90m White 31 | echo ^[91m Red 32 | echo ^[92m Green 33 | echo ^[93m Yellow 34 | echo ^[94m Blue 35 | echo ^[95m Magenta 36 | echo ^[96m Cyan 37 | echo ^[97m White 38 | echo. 39 | echo  STRONG BACKGROUND COLORS  40 | echo ^[100m Black 41 | echo ^[101m Red 42 | echo ^[102m Green 43 | echo ^[103m Yellow 44 | echo ^[104m Blue 45 | echo ^[105m Magenta 46 | echo ^[106m Cyan 47 | echo ^[107m White 48 | echo. 49 | echo  COMBINATIONS  50 | echo ^[31m red foreground color 51 | echo ^[7m inverse foreground ^<-^> background 52 | echo ^[7;31m inverse red foreground color 53 | echo ^[7m and nested ^[31m before nested 54 | echo ^[31m and nested ^[7m before nested -------------------------------------------------------------------------------- /LSERVER.bat: -------------------------------------------------------------------------------- 1 | :: source https://stackoverflow.com/questions/14529246/multiple-choices-menu-on-batch-file 2 | :: color codes from 3 | :: https://gist.githubusercontent.com/mlocati/fdabcaeb8071d5c75a2d51712db24011/raw/b710612d6320df7e146508094e84b92b34c77d48/win10colors.cmd 4 | 5 | @echo off 6 | :START 7 | echo  8 | set SRV=D:\LLAMACPP-MASTER 9 | set DML=D:\LLM-Small 10 | echo. 11 | echo =================================================== 12 | echo What MODEL would you like to Run as a llama-server? 13 | echo =================================================== 14 | echo 1 - SmolLM2-360M-Instruct.Q8_0.gguf 15 | echo 2 - SmolLM2-135M-Instruct-f16.gguf 16 | echo 3 - llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 17 | echo 4 - gemma-2-2b-it-Q5_K_M.gguf 18 | echo 5 - granite-3.1-2b-instruct-Q5_K_L.gguf 19 | echo 6 - granite-3.1-3b-a800m-instruct-Q5_K_L.gguf 20 | echo 7 - flan-t5-small-q8_0.gguf 21 | echo 0 - EXIT 22 | 23 | set /p whatapp= 24 | 25 | if %whatapp%==1 (goto MODEL1 26 | ) else if %whatapp%==2 (goto MODEL2 27 | ) else if %whatapp%==3 (goto MODEL3 28 | ) else if %whatapp%==4 (goto MODEL4 29 | ) else if %whatapp%==5 (goto MODEL5 30 | ) else if %whatapp%==6 (goto MODEL6 31 | ) else if %whatapp%==7 (goto MODEL7 32 | ) else if %whatapp%==0 (goto QUIT 33 | ) else (goto :INVALID) 34 | 35 | :MODEL1 36 | cls 37 | echo Starting llama-server API for SmolLM2-360M-Instruct.Q8_0.gguf 38 | echo start cmd.exe /k D:\SmolLM2-360M_gradio\llamacpp\llama-server.exe -m D:\SmolLM2-360M_gradio\llamacpp\model\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0 39 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0" 40 | goto :START 41 | 42 | :MODEL2 43 | cls 44 | echo Starting llama-server API for SmolLM2-135M-Instruct-f16.gguf 45 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\SmolLM2-135M-Instruct-f16.gguf -c 8192 -ngl 0" 46 | goto :START 47 | 48 | :MODEL3 49 | cls 50 | echo Starting llama-server API for llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 51 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf -c 8192 -ngl 0" 52 | goto :START 53 | 54 | :MODEL4 55 | cls 56 | echo Starting llama-server API for GEMMA2-2B gemma-2-2b-it-Q5_K_M.gguf 57 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\gemma-2-2b-it-Q5_K_M.gguf -c 8192 -ngl 0" 58 | goto :START 59 | 60 | :MODEL5 61 | cls 62 | echo Starting llama-server API for GRANITE-3.1-2B-DENSE granite-3.1-2b-instruct-Q5_K_L.gguf 63 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\granite-3.1-2b-instruct-Q5_K_L.gguf -c 8192 -ngl 0" 64 | goto :START 65 | 66 | :MODEL6 67 | cls 68 | echo Starting llama-server API for GRANITE-3.1-MOE 3B ACTIVE 800M 69 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\granite-3.1-3b-a800m-instruct-Q5_K_L.gguf -c 8192 -ngl 0" 70 | goto :START 71 | 72 | :MODEL7 73 | cls 74 | echo Starting llama-server API for Encoder-Decoder flan-t5-small-q8_0.gguf 75 | start cmd.exe /k "%SRV%\llama-server.exe -m %DML%\flan-t5-small-q8_0.gguf -c 512 -ngl 0" 76 | goto :START 77 | 78 | 79 | :INVALID 80 | cls 81 | echo    82 | echo INVALID CHOICE 83 | echo  84 | goto :START 85 | 86 | :QUIT 87 | cls 88 | echo BYE BYE 89 | echo  90 | pause 91 | exit 92 | 93 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Batchfile-llms 2 | Multiple choice batch file to run llama-server instances from terminal 3 | 4 | 5 | 6 | ### The main idea: 7 | being able to run llama-server instances for a multiple choice of LLMs from the terminal 8 | 9 | ### Why? 10 | In the past months I worked on several projects using an unified process 11 | - llama-server as the backend API server 12 | - gradio/streamlit as GUI for the chatbots 13 | 14 | ### how it is done 15 | The re-compliled llama.cpp binaries are really convenient, so that simply extracting the ZIP archive makes you up and running. 16 | 17 | This means that we can have a single place for the binaries, and another one for the models 18 | 19 | Using a batch file, easily customizable, with `PATH` for llama-cpp and `PATH` for the models, will make it fast to test them 20 | 21 | When you press a valid choice a new terminal will run the llama-server instance 22 | 23 | 24 | 25 | ### Content 26 | - an example file with my code - `testChoices.bat` 27 | - a text file with the correct escape code working fine on normal windows `cmd` 28 | 29 | 30 | Take a look 31 | ```batch 32 | :: source https://stackoverflow.com/questions/14529246/multiple-choices-menu-on-batch-file 33 | :: color codes from 34 | :: https://gist.githubusercontent.com/mlocati/fdabcaeb8071d5c75a2d51712db24011/raw/b710612d6320df7e146508094e84b92b34c77d48/win10colors.cmd 35 | 36 | @echo off 37 | :START 38 | echo  39 | set SRV=C:\Users\FabioMatricardi\Documents\DEV\SmolLM2-360M_gradio\llamacpp 40 | echo. 41 | echo =================================================== 42 | echo What MODEL would you like to Run as a llama-server? 43 | echo =================================================== 44 | echo 1 - SmolLM2-360M-Instruct.Q8_0.gguf 45 | echo 2 - SmolLM2-135M-Instruct-f16.gguf 46 | echo 3 - llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 47 | echo 4 - EXIT 48 | 49 | set /p whatapp= 50 | 51 | if %whatapp%==1 (goto MODEL1 52 | ) else if %whatapp%==2 (goto MODEL2 53 | ) else if %whatapp%==3 (goto MODEL3 54 | ) else if %whatapp%==4 (goto QUIT 55 | ) else (goto :INVALID) 56 | 57 | :MODEL1 58 | cls 59 | echo Starting llama-server API for SmolLM2-360M-Instruct.Q8_0.gguf 60 | echo start cmd.exe /k D:\SmolLM2-360M_gradio\llamacpp\llama-server.exe -m D:\SmolLM2-360M_gradio\llamacpp\model\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0 61 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\SmolLM2-360M-Instruct.Q8_0.gguf -c 8192 -ngl 0" 62 | goto :START 63 | 64 | :MODEL2 65 | cls 66 | echo Starting llama-server API for SmolLM2-135M-Instruct-f16.gguf 67 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\SmolLM2-135M-Instruct-f16.gguf -c 8192 -ngl 0" 68 | goto :START 69 | 70 | :MODEL3 71 | cls 72 | echo Starting llama-server API for llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf 73 | start cmd.exe /k "%SRV%\llama-server.exe -m %SRV%\model\llamaestra-3.2-1b-instruct-v0.1-q8_0.gguf -c 8192 -ngl 0" 74 | goto :START 75 | 76 | :INVALID 77 | cls 78 | echo    79 | echo INVALID CHOICE 80 | echo  81 | goto :START 82 | 83 | :QUIT 84 | cls 85 | echo BYE BYE 86 | echo  87 | pause 88 | exit 89 | 90 | ``` 91 | 92 | --------------------------------------------------------------------------------