Skip to content

Commit f2077b8

Browse files
authored
Merge pull request #179 from henk717/united
1.19.2
2 parents 09b5ffc + 2603f1f commit f2077b8

18 files changed

+1184
-73
lines changed

aiserver.py

Lines changed: 47 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/python3
22
#==================================================================#
33
# KoboldAI
4-
# Version: 1.19.1
4+
# Version: 1.19.2
55
# By: The KoboldAI Community
66
#==================================================================#
77

@@ -125,6 +125,7 @@ class colors:
125125
["NSFW Models", "nsfwlist", "", True],
126126
["Untuned OPT", "optlist", "", True],
127127
["Untuned GPT-Neo/J", "gptneolist", "", True],
128+
["Untuned Pythia", "pythialist", "", True],
128129
["Untuned Fairseq Dense", "fsdlist", "", True],
129130
["Untuned Bloom", "bloomlist", "", True],
130131
["Untuned XGLM", "xglmlist", "", True],
@@ -154,6 +155,7 @@ class colors:
154155
["OPT Nerys 6B V2 (Hybrid)", "KoboldAI/OPT-6B-nerys-v2", "16GB", False],
155156
["Janeway FSD 6.7B", "KoboldAI/fairseq-dense-6.7B-Janeway", "16GB", False],
156157
["Janeway Neo 6B", "KoboldAI/GPT-J-6B-Janeway", "16GB", False],
158+
["Qilin Lit 6B (SFW)", "rexwang8/qilin-lit-6b", "16GB", False],
157159
["Janeway Neo 2.7B", "KoboldAI/GPT-Neo-2.7B-Janeway", "8GB", False],
158160
["Janeway FSD 2.7B", "KoboldAI/fairseq-dense-2.7B-Janeway", "8GB", False],
159161
["Nerys FSD 2.7B (Hybrid)", "KoboldAI/fairseq-dense-2.7B-Nerys", "8GB", False],
@@ -183,12 +185,31 @@ class colors:
183185
],
184186
'gptneolist': [
185187
["GPT-NeoX 20B", "EleutherAI/gpt-neox-20b", "64GB", False],
188+
["Pythia 13B (NeoX, Same dataset)", "EleutherAI/pythia-13b", "32GB", False],
186189
["GPT-J 6B", "EleutherAI/gpt-j-6B", "16GB", False],
187190
["GPT-Neo 2.7B", "EleutherAI/gpt-neo-2.7B", "8GB", False],
188191
["GPT-Neo 1.3B", "EleutherAI/gpt-neo-1.3B", "6GB", False],
192+
["Pythia 800M (NeoX, Same dataset)", "EleutherAI/pythia-800m", "4GB", False],
193+
["Pythia 350M (NeoX, Same dataset)", "EleutherAI/pythia-350m", "2GB", False],
189194
["GPT-Neo 125M", "EleutherAI/gpt-neo-125M", "2GB", False],
190195
["Return to Main Menu", "mainmenu", "", True],
191196
],
197+
'pythialist': [
198+
["Pythia 13B Deduped", "EleutherAI/pythia-13b-deduped", "32GB", False],
199+
["Pythia 13B", "EleutherAI/pythia-13b", "32GB", False],
200+
["Pythia 6.7B Deduped", "EleutherAI/pythia-6.7b-deduped", "16GB", False],
201+
["Pythia 6.7B", "EleutherAI/pythia-6.7b", "16GB", False],
202+
["Pythia 1.3B Deduped", "EleutherAI/pythia-1.3b-deduped", "6GB", False],
203+
["Pythia 1.3B", "EleutherAI/pythia-1.3b", "6GB", False],
204+
["Pythia 800M", "EleutherAI/pythia-800m", "4GB", False],
205+
["Pythia 350M Deduped", "EleutherAI/pythia-350m-deduped", "2GB", False],
206+
["Pythia 350M", "EleutherAI/pythia-350m", "2GB", False],
207+
["Pythia 125M Deduped", "EleutherAI/pythia-125m-deduped", "2GB", False],
208+
["Pythia 125M", "EleutherAI/pythia-125m", "2GB", False],
209+
["Pythia 19M Deduped", "EleutherAI/pythia-19m-deduped", "1GB", False],
210+
["Pythia 19M", "EleutherAI/pythia-19m", "1GB", False],
211+
["Return to Main Menu", "mainmenu", "", True],
212+
],
192213
'gpt2list': [
193214
["GPT-2 XL", "gpt2-xl", "6GB", False],
194215
["GPT-2 Large", "gpt2-large", "4GB", False],
@@ -452,6 +473,7 @@ def emit(*args, **kwargs):
452473
return _emit(*args, **kwargs)
453474
except AttributeError:
454475
return socketio.emit(*args, **kwargs)
476+
utils.emit = emit
455477

456478
# marshmallow/apispec setup
457479
from apispec import APISpec
@@ -756,6 +778,12 @@ def getmodelname():
756778
modelname = vars.model
757779
return modelname
758780

781+
#==================================================================#
782+
# Get hidden size from model
783+
#==================================================================#
784+
def get_hidden_size_from_model(model):
785+
return model.get_input_embeddings().embedding_dim
786+
759787
#==================================================================#
760788
# Breakmodel configuration functions
761789
#==================================================================#
@@ -873,7 +901,7 @@ def device_config(config):
873901
print(f"{colors.RED}Please enter an integer between -1 and {n_layers}.{colors.END}")
874902

875903
logger.init_ok("Final device configuration:", status="Info")
876-
device_list(n_layers)
904+
device_list(n_layers, primary=breakmodel.primary_device)
877905

878906
# If all layers are on the same device, use the old GPU generation mode
879907
while(len(breakmodel.gpu_blocks) and breakmodel.gpu_blocks[-1] == 0):
@@ -989,7 +1017,7 @@ def loadmodelsettings():
9891017
if("nobreakmodel" in js):
9901018
vars.nobreakmodel = js["nobreakmodel"]
9911019
if("sampler_order" in js):
992-
sampler_order = vars.sampler_order
1020+
sampler_order = js["sampler_order"]
9931021
if(len(sampler_order) < 7):
9941022
sampler_order = [6] + sampler_order
9951023
vars.sampler_order = sampler_order
@@ -1127,7 +1155,7 @@ def processsettings(js):
11271155
if("andepth" in js):
11281156
vars.andepth = js["andepth"]
11291157
if("sampler_order" in js):
1130-
sampler_order = vars.sampler_order
1158+
sampler_order = js["sampler_order"]
11311159
if(len(sampler_order) < 7):
11321160
sampler_order = [6] + sampler_order
11331161
vars.sampler_order = sampler_order
@@ -1354,6 +1382,8 @@ def general_startup(override_args=None):
13541382
args = parser.parse_args(shlex.split(os.environ["KOBOLDAI_ARGS"]))
13551383
else:
13561384
args = parser.parse_args()
1385+
1386+
utils.args = args
13571387

13581388
set_logger_verbosity(args.verbosity)
13591389
quiesce_logger(args.quiesce)
@@ -1790,7 +1820,9 @@ def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwarg
17901820
if not args.no_aria2:
17911821
utils.aria2_hook(pretrained_model_name_or_path, **kwargs)
17921822
return old_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
1793-
PreTrainedModel.from_pretrained = new_from_pretrained
1823+
if(not hasattr(PreTrainedModel, "_kai_patched")):
1824+
PreTrainedModel.from_pretrained = new_from_pretrained
1825+
PreTrainedModel._kai_patched = True
17941826
if(hasattr(modeling_utils, "get_checkpoint_shard_files")):
17951827
old_get_checkpoint_shard_files = modeling_utils.get_checkpoint_shard_files
17961828
def new_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs):
@@ -2424,9 +2456,6 @@ def get_original_key(key):
24242456
return lazy_load_callback
24252457

24262458

2427-
def get_hidden_size_from_model(model):
2428-
return model.get_input_embeddings().embedding_dim
2429-
24302459
def maybe_low_cpu_mem_usage() -> Dict[str, Any]:
24312460
if(packaging.version.parse(transformers_version) < packaging.version.parse("4.11.0")):
24322461
logger.warning(f"Please upgrade to transformers 4.11.0 for lower RAM usage. You have transformers {transformers_version}.")
@@ -2668,7 +2697,9 @@ def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwarg
26682697
if not args.no_aria2:
26692698
utils.aria2_hook(pretrained_model_name_or_path, **kwargs)
26702699
return old_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
2671-
PreTrainedModel.from_pretrained = new_from_pretrained
2700+
if(not hasattr(PreTrainedModel, "_kai_patched")):
2701+
PreTrainedModel.from_pretrained = new_from_pretrained
2702+
PreTrainedModel._kai_patched = True
26722703
if(hasattr(modeling_utils, "get_checkpoint_shard_files")):
26732704
old_get_checkpoint_shard_files = modeling_utils.get_checkpoint_shard_files
26742705
def new_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs):
@@ -2914,7 +2945,7 @@ def lua_startup():
29142945
except lupa.LuaError as e:
29152946
print(colors.RED + "ERROR!" + colors.END)
29162947
vars.lua_koboldbridge.obliterate_multiverse()
2917-
logger.debug('LUA ERROR: ' + str(e).replace("\033", ""))
2948+
logger.error('LUA ERROR: ' + str(e).replace("\033", ""))
29182949
logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.")
29192950
exit(1)
29202951
logger.init_ok("LUA bridge", status="OK")
@@ -3055,6 +3086,8 @@ def lua_compute_context(submission, entries, folders, kwargs):
30553086
force_use_txt=True,
30563087
scan_story=kwargs["scan_story"] if kwargs["scan_story"] != None else True,
30573088
)
3089+
if kwargs["include_anote"] is not None and not kwargs["include_anote"]:
3090+
anotetxt = ""
30583091
txt, _, _ = calcsubmitbudget(
30593092
len(actions),
30603093
winfo,
@@ -3470,7 +3503,7 @@ def execute_inmod():
34703503
vars.lua_running = False
34713504
emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True)
34723505
sendUSStatItems()
3473-
logger.debug('LUA ERROR: ' + str(e).replace("\033", ""))
3506+
logger.error('LUA ERROR: ' + str(e).replace("\033", ""))
34743507
logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.")
34753508
set_aibusy(0)
34763509

@@ -3487,7 +3520,7 @@ def execute_outmod():
34873520
vars.lua_running = False
34883521
emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True)
34893522
sendUSStatItems()
3490-
logger.debug('LUA ERROR: ' + str(e).replace("\033", ""))
3523+
logger.error('LUA ERROR: ' + str(e).replace("\033", ""))
34913524
logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.")
34923525
set_aibusy(0)
34933526
if(vars.lua_koboldbridge.resend_settings_required):
@@ -4907,7 +4940,7 @@ def generate(txt, minimum, maximum, found_entries=None):
49074940
vars.lua_running = False
49084941
emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True)
49094942
sendUSStatItems()
4910-
logger.debug('LUA ERROR: ' + str(e).replace("\033", ""))
4943+
logger.error('LUA ERROR: ' + str(e).replace("\033", ""))
49114944
logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.")
49124945
else:
49134946
emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True)
@@ -5415,7 +5448,7 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None):
54155448
vars.lua_running = False
54165449
emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True)
54175450
sendUSStatItems()
5418-
logger.debug('LUA ERROR: ' + str(e).replace("\033", ""))
5451+
logger.error('LUA ERROR: ' + str(e).replace("\033", ""))
54195452
logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.")
54205453
else:
54215454
emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True)

colab/GPU.ipynb

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,10 +70,18 @@
7070
"Model = \"Nerys 2.7B\" #@param [\"Nerys 2.7B\", \"AID 2.7B\", \"Erebus 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"OPT 2.7B\", \"Fairseq Dense 2.7B\", \"Neo 2.7B\"] {allow-input: true}\n",
7171
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
7272
"Provider = \"Localtunnel\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
73+
"use_google_drive = True #@param {type:\"boolean\"}\n",
7374
"\n",
7475
"!nvidia-smi\n",
7576
"from google.colab import drive\n",
76-
"drive.mount('/content/drive/')\n",
77+
"if use_google_drive:\n",
78+
" drive.mount('/content/drive/')\n",
79+
"else:\n",
80+
" import os\n",
81+
" if not os.path.exists(\"/content/drive\"):\n",
82+
" os.mkdir(\"/content/drive\")\n",
83+
" if not os.path.exists(\"/content/drive/MyDrive/\"):\n",
84+
" os.mkdir(\"/content/drive/MyDrive/\")\n",
7785
"\n",
7886
"if Model == \"Nerys 2.7B\":\n",
7987
" Model = \"KoboldAI/fairseq-dense-2.7B-Nerys\"\n",

colab/TPU.ipynb

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@
6969
"Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Erebus 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Erebus 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Lit V2 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
7070
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
7171
"Provider = \"Localtunnel\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
72+
"use_google_drive = True #@param {type:\"boolean\"}\n",
7273
"\n",
7374
"import os\n",
7475
"try:\n",
@@ -79,7 +80,14 @@
7980
" raise RuntimeError(\"⚠️You can not run this notebook without the TPU accelerator, go to Runtime->Sessions, terminate your session and then try again.⚠️\")\n",
8081
"print('Now we will need your Google Drive to store settings and saves, you must login with the same account you used for Colab.')\n",
8182
"from google.colab import drive\n",
82-
"drive.mount('/content/drive/')\n",
83+
"if use_google_drive:\n",
84+
" drive.mount('/content/drive/')\n",
85+
"else:\n",
86+
" import os\n",
87+
" if not os.path.exists(\"/content/drive\"):\n",
88+
" os.mkdir(\"/content/drive\")\n",
89+
" if not os.path.exists(\"/content/drive/MyDrive/\"):\n",
90+
" os.mkdir(\"/content/drive/MyDrive/\")\n",
8391
"\n",
8492
"if Model == \"Janeway 13B\":\n",
8593
" Model = \"KoboldAI/fairseq-dense-13B-Janeway\"\n",

commandline.bat

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
@echo off
22
cd /D %~dp0
3+
SET CONDA_SHLVL=
4+
35
TITLE CMD for KoboldAI Runtime
46
SET /P M=<loader.settings
57
IF %M%==1 GOTO drivemap

environments/finetuneanon.yml

Lines changed: 0 additions & 26 deletions
This file was deleted.

environments/huggingface.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,13 @@ dependencies:
2020
- marshmallow>=3.13
2121
- apispec-webframeworks
2222
- loguru
23+
- termcolor
24+
- psutil
2325
- pip:
2426
- flask-cloudflared
2527
- flask-ngrok
2628
- lupa==1.10
2729
- transformers>=4.20.1
2830
- huggingface_hub>=0.10.1
2931
- accelerate
32+
- git+https://github.com/VE-FORBRYDERNE/mkultra

environments/rocm-finetune.yml

Lines changed: 0 additions & 25 deletions
This file was deleted.

environments/rocm.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ dependencies:
1717
- marshmallow>=3.13
1818
- apispec-webframeworks
1919
- loguru
20+
- termcolor
21+
- psutil
2022
- pip:
2123
- --extra-index-url https://download.pytorch.org/whl/rocm5.1.1
2224
- torch
@@ -27,3 +29,4 @@ dependencies:
2729
- transformers>=4.20.1
2830
- huggingface_hub>=0.10.1
2931
- accelerate
32+
- git+https://github.com/VE-FORBRYDERNE/mkultra

fileops.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def uspath(filename):
8686
def getstoryfiles():
8787
list = []
8888
for file in listdir("stories"):
89-
if file.endswith(".json"):
89+
if file.endswith(".json") and not file.endswith(".v2.json"):
9090
ob = {}
9191
ob["name"] = file.replace(".json", "")
9292
f = open("stories/"+file, "r")

install_requirements.bat

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ echo.
88

99
Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled" /t REG_DWORD /d "1" /f 2>nul
1010
cd /D %~dp0
11+
SET CONDA_SHLVL=
1112

1213
if exist miniconda3\ (
1314
echo Delete existing installation?

0 commit comments

Comments
 (0)