11#!/usr/bin/python3
22#==================================================================#
33# KoboldAI
4- # Version: 1.19.1
4+ # Version: 1.19.2
55# By: The KoboldAI Community
66#==================================================================#
77
@@ -125,6 +125,7 @@ class colors:
125125 ["NSFW Models" , "nsfwlist" , "" , True ],
126126 ["Untuned OPT" , "optlist" , "" , True ],
127127 ["Untuned GPT-Neo/J" , "gptneolist" , "" , True ],
128+ ["Untuned Pythia" , "pythialist" , "" , True ],
128129 ["Untuned Fairseq Dense" , "fsdlist" , "" , True ],
129130 ["Untuned Bloom" , "bloomlist" , "" , True ],
130131 ["Untuned XGLM" , "xglmlist" , "" , True ],
@@ -154,6 +155,7 @@ class colors:
154155 ["OPT Nerys 6B V2 (Hybrid)" , "KoboldAI/OPT-6B-nerys-v2" , "16GB" , False ],
155156 ["Janeway FSD 6.7B" , "KoboldAI/fairseq-dense-6.7B-Janeway" , "16GB" , False ],
156157 ["Janeway Neo 6B" , "KoboldAI/GPT-J-6B-Janeway" , "16GB" , False ],
158+ ["Qilin Lit 6B (SFW)" , "rexwang8/qilin-lit-6b" , "16GB" , False ],
157159 ["Janeway Neo 2.7B" , "KoboldAI/GPT-Neo-2.7B-Janeway" , "8GB" , False ],
158160 ["Janeway FSD 2.7B" , "KoboldAI/fairseq-dense-2.7B-Janeway" , "8GB" , False ],
159161 ["Nerys FSD 2.7B (Hybrid)" , "KoboldAI/fairseq-dense-2.7B-Nerys" , "8GB" , False ],
@@ -183,12 +185,31 @@ class colors:
183185 ],
184186 'gptneolist' : [
185187 ["GPT-NeoX 20B" , "EleutherAI/gpt-neox-20b" , "64GB" , False ],
188+ ["Pythia 13B (NeoX, Same dataset)" , "EleutherAI/pythia-13b" , "32GB" , False ],
186189 ["GPT-J 6B" , "EleutherAI/gpt-j-6B" , "16GB" , False ],
187190 ["GPT-Neo 2.7B" , "EleutherAI/gpt-neo-2.7B" , "8GB" , False ],
188191 ["GPT-Neo 1.3B" , "EleutherAI/gpt-neo-1.3B" , "6GB" , False ],
192+ ["Pythia 800M (NeoX, Same dataset)" , "EleutherAI/pythia-800m" , "4GB" , False ],
193+ ["Pythia 350M (NeoX, Same dataset)" , "EleutherAI/pythia-350m" , "2GB" , False ],
189194 ["GPT-Neo 125M" , "EleutherAI/gpt-neo-125M" , "2GB" , False ],
190195 ["Return to Main Menu" , "mainmenu" , "" , True ],
191196 ],
197+ 'pythialist' : [
198+ ["Pythia 13B Deduped" , "EleutherAI/pythia-13b-deduped" , "32GB" , False ],
199+ ["Pythia 13B" , "EleutherAI/pythia-13b" , "32GB" , False ],
200+ ["Pythia 6.7B Deduped" , "EleutherAI/pythia-6.7b-deduped" , "16GB" , False ],
201+ ["Pythia 6.7B" , "EleutherAI/pythia-6.7b" , "16GB" , False ],
202+ ["Pythia 1.3B Deduped" , "EleutherAI/pythia-1.3b-deduped" , "6GB" , False ],
203+ ["Pythia 1.3B" , "EleutherAI/pythia-1.3b" , "6GB" , False ],
204+ ["Pythia 800M" , "EleutherAI/pythia-800m" , "4GB" , False ],
205+ ["Pythia 350M Deduped" , "EleutherAI/pythia-350m-deduped" , "2GB" , False ],
206+ ["Pythia 350M" , "EleutherAI/pythia-350m" , "2GB" , False ],
207+ ["Pythia 125M Deduped" , "EleutherAI/pythia-125m-deduped" , "2GB" , False ],
208+ ["Pythia 125M" , "EleutherAI/pythia-125m" , "2GB" , False ],
209+ ["Pythia 19M Deduped" , "EleutherAI/pythia-19m-deduped" , "1GB" , False ],
210+ ["Pythia 19M" , "EleutherAI/pythia-19m" , "1GB" , False ],
211+ ["Return to Main Menu" , "mainmenu" , "" , True ],
212+ ],
192213 'gpt2list' : [
193214 ["GPT-2 XL" , "gpt2-xl" , "6GB" , False ],
194215 ["GPT-2 Large" , "gpt2-large" , "4GB" , False ],
@@ -452,6 +473,7 @@ def emit(*args, **kwargs):
452473 return _emit (* args , ** kwargs )
453474 except AttributeError :
454475 return socketio .emit (* args , ** kwargs )
476+ utils .emit = emit
455477
456478# marshmallow/apispec setup
457479from apispec import APISpec
@@ -756,6 +778,12 @@ def getmodelname():
756778 modelname = vars .model
757779 return modelname
758780
781+ #==================================================================#
782+ # Get hidden size from model
783+ #==================================================================#
784+ def get_hidden_size_from_model (model ):
785+ return model .get_input_embeddings ().embedding_dim
786+
759787#==================================================================#
760788# Breakmodel configuration functions
761789#==================================================================#
@@ -873,7 +901,7 @@ def device_config(config):
873901 print (f"{ colors .RED } Please enter an integer between -1 and { n_layers } .{ colors .END } " )
874902
875903 logger .init_ok ("Final device configuration:" , status = "Info" )
876- device_list (n_layers )
904+ device_list (n_layers , primary = breakmodel . primary_device )
877905
878906 # If all layers are on the same device, use the old GPU generation mode
879907 while (len (breakmodel .gpu_blocks ) and breakmodel .gpu_blocks [- 1 ] == 0 ):
@@ -989,7 +1017,7 @@ def loadmodelsettings():
9891017 if ("nobreakmodel" in js ):
9901018 vars .nobreakmodel = js ["nobreakmodel" ]
9911019 if ("sampler_order" in js ):
992- sampler_order = vars . sampler_order
1020+ sampler_order = js [ " sampler_order" ]
9931021 if (len (sampler_order ) < 7 ):
9941022 sampler_order = [6 ] + sampler_order
9951023 vars .sampler_order = sampler_order
@@ -1127,7 +1155,7 @@ def processsettings(js):
11271155 if ("andepth" in js ):
11281156 vars .andepth = js ["andepth" ]
11291157 if ("sampler_order" in js ):
1130- sampler_order = vars . sampler_order
1158+ sampler_order = js [ " sampler_order" ]
11311159 if (len (sampler_order ) < 7 ):
11321160 sampler_order = [6 ] + sampler_order
11331161 vars .sampler_order = sampler_order
@@ -1354,6 +1382,8 @@ def general_startup(override_args=None):
13541382 args = parser .parse_args (shlex .split (os .environ ["KOBOLDAI_ARGS" ]))
13551383 else :
13561384 args = parser .parse_args ()
1385+
1386+ utils .args = args
13571387
13581388 set_logger_verbosity (args .verbosity )
13591389 quiesce_logger (args .quiesce )
@@ -1790,7 +1820,9 @@ def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwarg
17901820 if not args .no_aria2 :
17911821 utils .aria2_hook (pretrained_model_name_or_path , ** kwargs )
17921822 return old_from_pretrained (cls , pretrained_model_name_or_path , * model_args , ** kwargs )
1793- PreTrainedModel .from_pretrained = new_from_pretrained
1823+ if (not hasattr (PreTrainedModel , "_kai_patched" )):
1824+ PreTrainedModel .from_pretrained = new_from_pretrained
1825+ PreTrainedModel ._kai_patched = True
17941826 if (hasattr (modeling_utils , "get_checkpoint_shard_files" )):
17951827 old_get_checkpoint_shard_files = modeling_utils .get_checkpoint_shard_files
17961828 def new_get_checkpoint_shard_files (pretrained_model_name_or_path , index_filename , * args , ** kwargs ):
@@ -2424,9 +2456,6 @@ def get_original_key(key):
24242456 return lazy_load_callback
24252457
24262458
2427- def get_hidden_size_from_model (model ):
2428- return model .get_input_embeddings ().embedding_dim
2429-
24302459 def maybe_low_cpu_mem_usage () -> Dict [str , Any ]:
24312460 if (packaging .version .parse (transformers_version ) < packaging .version .parse ("4.11.0" )):
24322461 logger .warning (f"Please upgrade to transformers 4.11.0 for lower RAM usage. You have transformers { transformers_version } ." )
@@ -2668,7 +2697,9 @@ def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwarg
26682697 if not args .no_aria2 :
26692698 utils .aria2_hook (pretrained_model_name_or_path , ** kwargs )
26702699 return old_from_pretrained (cls , pretrained_model_name_or_path , * model_args , ** kwargs )
2671- PreTrainedModel .from_pretrained = new_from_pretrained
2700+ if (not hasattr (PreTrainedModel , "_kai_patched" )):
2701+ PreTrainedModel .from_pretrained = new_from_pretrained
2702+ PreTrainedModel ._kai_patched = True
26722703 if (hasattr (modeling_utils , "get_checkpoint_shard_files" )):
26732704 old_get_checkpoint_shard_files = modeling_utils .get_checkpoint_shard_files
26742705 def new_get_checkpoint_shard_files (pretrained_model_name_or_path , index_filename , * args , ** kwargs ):
@@ -2914,7 +2945,7 @@ def lua_startup():
29142945 except lupa .LuaError as e :
29152946 print (colors .RED + "ERROR!" + colors .END )
29162947 vars .lua_koboldbridge .obliterate_multiverse ()
2917- logger .debug ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
2948+ logger .error ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
29182949 logger .warning ("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts." )
29192950 exit (1 )
29202951 logger .init_ok ("LUA bridge" , status = "OK" )
@@ -3055,6 +3086,8 @@ def lua_compute_context(submission, entries, folders, kwargs):
30553086 force_use_txt = True ,
30563087 scan_story = kwargs ["scan_story" ] if kwargs ["scan_story" ] != None else True ,
30573088 )
3089+ if kwargs ["include_anote" ] is not None and not kwargs ["include_anote" ]:
3090+ anotetxt = ""
30583091 txt , _ , _ = calcsubmitbudget (
30593092 len (actions ),
30603093 winfo ,
@@ -3470,7 +3503,7 @@ def execute_inmod():
34703503 vars .lua_running = False
34713504 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Lua script error; please check console.' }, broadcast = True )
34723505 sendUSStatItems ()
3473- logger .debug ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
3506+ logger .error ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
34743507 logger .warning ("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts." )
34753508 set_aibusy (0 )
34763509
@@ -3487,7 +3520,7 @@ def execute_outmod():
34873520 vars .lua_running = False
34883521 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Lua script error; please check console.' }, broadcast = True )
34893522 sendUSStatItems ()
3490- logger .debug ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
3523+ logger .error ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
34913524 logger .warning ("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts." )
34923525 set_aibusy (0 )
34933526 if (vars .lua_koboldbridge .resend_settings_required ):
@@ -4907,7 +4940,7 @@ def generate(txt, minimum, maximum, found_entries=None):
49074940 vars .lua_running = False
49084941 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Lua script error; please check console.' }, broadcast = True )
49094942 sendUSStatItems ()
4910- logger .debug ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
4943+ logger .error ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
49114944 logger .warning ("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts." )
49124945 else :
49134946 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Error occurred during generator call; please check console.' }, broadcast = True )
@@ -5415,7 +5448,7 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None):
54155448 vars .lua_running = False
54165449 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Lua script error; please check console.' }, broadcast = True )
54175450 sendUSStatItems ()
5418- logger .debug ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
5451+ logger .error ('LUA ERROR: ' + str (e ).replace ("\033 " , "" ))
54195452 logger .warning ("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts." )
54205453 else :
54215454 emit ('from_server' , {'cmd' : 'errmsg' , 'data' : 'Error occurred during generator call; please check console.' }, broadcast = True )
0 commit comments