Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
koboldai
GitHub Repository: koboldai/koboldai-client
Path: blob/main/userscripts/examples/various_examples.lua
935 views
1
-- Example script
2
-- Description goes on
3
--[[subsequent lines including
4
in multiline comments]]
5
6
kobold = require("bridge")() -- This line is optional and is only for EmmyLua type annotations
7
8
-- You can import libraries that are in extern/lualibs/
9
local inspect = require("inspect")
10
local mt19937ar = require("mt19937ar")
11
12
13
---@class KoboldUserScript
14
local userscript = {}
15
16
17
local twister = mt19937ar.new()
18
local seed = math.random(0, 2147483647)
19
20
local token_num = 0
21
local lifetime_token_num = 0
22
23
-- This gets run when user submits a string to the AI (right after the input
24
-- formatting is applied but before the string is actually sent to the AI)
25
function userscript.inmod()
26
warn("\nINPUT MODIFIER")
27
token_num = 0
28
twister:init_genrand(seed)
29
print("Submitted text: " .. kobold.submission) -- You can also write to kobold.submission to alter the user's input
30
print("top-p sampling value: " .. kobold.settings.settopp)
31
end
32
33
-- This gets run every time the AI generates a token (before the token is
34
-- actually sampled, so this is where you can make certain tokens more likely
35
-- to appear than others)
36
function userscript.genmod()
37
warn("\nGENERATION MODIFIER")
38
39
print("Tokens generated in the current generation: " .. token_num)
40
print("Tokens generated since this script started up: " .. lifetime_token_num)
41
42
local r = twister:genrand_real3()
43
print("Setting top-p sampling value to " .. r)
44
kobold.settings.settopp = r
45
46
local generated = {}
47
for sequence_number, tokens in ipairs(kobold.generated) do
48
generated[sequence_number] = kobold.decode(tokens)
49
end
50
print("Current generated strings: " .. inspect(generated))
51
52
if token_num == math.floor(kobold.settings.genamt/2) then
53
print("\n\n\n\n\n\nMaking all subsequent tokens more likely to be exclamation marks...")
54
end
55
if token_num >= math.floor(kobold.settings.genamt/2) then
56
for i = 1, kobold.settings.numseqs do
57
kobold.logits[i][1] = 13.37
58
end
59
end
60
61
token_num = token_num + 1
62
lifetime_token_num = lifetime_token_num + 1
63
end
64
65
-- This gets run right before the output formatting is applied after generation
66
-- is finished
67
function userscript.outmod()
68
warn("\nOUTPUT MODIFIER")
69
for chunk in kobold.story:reverse_iter() do
70
print(chunk.num, chunk.content)
71
end
72
print("Wrapping first output in brackets")
73
kobold.outputs[1] = "[" .. kobold.outputs[1] .. "]"
74
end
75
76
return userscript
77
78