Add packaging & split code.

This commit is contained in:
JOLIMAITRE Matthieu 2025-05-28 18:42:43 +02:00
parent 8346df8a57
commit 34123a7e5a
10 changed files with 445 additions and 93 deletions

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
/history

7
build.sh Executable file
View file

@ -0,0 +1,7 @@
#!/usr/bin/bash
set -e
cd "$(dirname "$(realpath "$0")")"
mkdir -p target
deno compile --allow-all --output=target/kub-tcp-service src/kub-tcp-service.ts

1
history.json Normal file
View file

@ -0,0 +1 @@
{"messages":[]}

24
package/aur/PKGBUILD Normal file
View file

@ -0,0 +1,24 @@
# Maintainer: JOLIMAITRE Matthieu <matthieu@imagevo.fr>
pkgname=kub-tcp
pkgver=1.0.0
pkgrel=1
pkgdesc="TODO list management service."
url="https://git.barnulf.net/mb/kub-tcp"
license=("GPL-3.0+")
arch=("x86_64")
makedepends=("deno")
depends=("ollama")
provides=("kub-tcp-service" "todo")
conflicts=("kub-tcp-service" "todo")
source=("git+https://git.barnulf.net/mb/kub-tcp.git#branch=master")
sha256sums=("SKIP")
OPTIONS=(!strip !docs libtool emptydirs)
package() {
feseur/build.sh
mkdir -p "$pkgdir/usr/bin/" "$pkgdir/usr/lib/systemd/user/"
install -Dm755 feseur/target/kub-tcp-service "$pkgdir/usr/bin/"
install -Dm644 feseur/kub-tcp.service "$pkgdir/usr/lib/systemd/user/"
}

17
src/kub-tcp-service.ts Executable file
View file

@ -0,0 +1,17 @@
#!/usr/bin/env -S deno run -A
import { History } from "./lib/history.ts"
import { pick_name_for } from "./lib/names.ts"
import { listen } from "./lib/listen.ts"
import { session } from "./lib/session.ts"
const history_limit = 500
const history_path = "./history.json"
const port = 4800
async function main() {
const history = await History.load(history_path, history_limit) ?? await History.create(history_path, history_limit)
listen(port, (conn) => session(conn, pick_name_for(conn.remoteAddr.hostname), history))
}
if (import.meta.main) await main()

View file

@ -1,93 +0,0 @@
#!/usr/bin/env -S deno run -A
import { TextLineStream } from "https://deno.land/std@0.224.0/streams/mod.ts"
import ollama, { Message } from "npm:ollama@0.5.15"
function main() {
const names = new Names()
const messages = new SpillingQueue<Message>(500)
listen(8080, (conn) => session(conn, names.pick_for(conn.remoteAddr.hostname), messages))
}
class SpillingQueue<T> {
public items: T[] = []
public constructor(
public limit: number,
) {}
public push(value: T) {
this.items.push(value)
if (this.items.length > this.limit) return this.items.splice(0, 1)[0]
}
}
async function listen(port: number, handler: (conn: Deno.TcpConn) => Promise<unknown>) {
console.log("Listening on port", port)
for await (const connection of Deno.listen({ transport: "tcp", port })) {
handler(connection).catch((except) => console.log("Session closed", except))
}
}
const header = (name: string) => `
KUB
Kub is a LLM running on TCP which includes all messages in the same context.
As Kub is having several discussions at once, he will identify you as : ${name}
`
async function session(conn: Deno.TcpConn, name: string, messages: SpillingQueue<Message>) {
console.log("Opening session for", name, "at", conn.remoteAddr.hostname, conn.remoteAddr.port)
conn.setNoDelay(true)
await conn.write(new TextEncoder().encode(header(name)))
async function prompt() {
await conn.write(new TextEncoder().encode("> "))
}
const lines = conn.readable
.pipeThrough(new TextDecoderStream())
.pipeThrough(new TextLineStream())
await prompt()
for await (const line of lines) {
await conn.write(new TextEncoder().encode("\n< "))
const user_content = `(message from:${name}) ${line}`
const user_message = { role: "user", content: user_content }
const response = await ollama.chat({
model: "llama3.2",
messages: request_messages(messages.items, user_message),
stream: true,
options: {},
})
let content = "", role = "assistant"
for await (const part of response) {
await conn.write(new TextEncoder().encode(part.message.content))
content += part.message.content
role = part.message.role
}
await conn.write(new TextEncoder().encode("\n\n"))
await prompt()
messages.push(user_message)
messages.push({ role, content })
}
}
function request_messages(history_messages: Message[], user_message: Message) {
return [
{ role: "system", content: "You are 'Kub', a bot which talks to people on the internet. Be concise and helpful." },
...history_messages,
user_message,
] as Message[]
}
class Names {
options = ["Adam", "Bastien", "Corentin", "Dominique", "Ethan"]
public pick_for(key: string) {
let sum = 0
for (const letter of key) sum += letter.charCodeAt(0)
const index = Math.floor((sum + Math.random() * 100) % this.options.length)
return this.options[index]
}
}
if (import.meta.main) main()

35
src/lib/history.ts Normal file
View file

@ -0,0 +1,35 @@
import { Message } from "npm:ollama@0.5.15"
export class History {
private constructor(
public messages: Message[],
private path: string,
private limit: number,
) {}
public push(value: Message) {
this.messages.push(value)
while (this.messages.length > this.limit) this.messages.splice(0, 1)
}
public static async load(path: string, limit: number) {
try {
const content = await Deno.readTextFile(path)
const { messages } = JSON.parse(content)
return new History(messages, path, limit)
} catch (except) {
console.error("Failed to load spilling queue :", except)
return undefined
}
}
public static async create(path: string, limit: number) {
const result = new History([], path, limit)
await result.save()
return result
}
private async save() {
await Deno.writeTextFile(this.path, JSON.stringify({ messages: this.messages }))
}
}

6
src/lib/listen.ts Normal file
View file

@ -0,0 +1,6 @@
export async function listen(port: number, handler: (conn: Deno.TcpConn) => Promise<unknown>) {
console.log("Listening on port", port)
for await (const connection of Deno.listen({ transport: "tcp", port })) {
handler(connection).catch((except) => console.log("Session closed", except))
}
}

284
src/lib/names.ts Normal file
View file

@ -0,0 +1,284 @@
export function pick_name_for(key: string) {
let sum = 0
for (const letter of key) sum += letter.charCodeAt(0)
const index = Math.floor((sum + Math.random() * 100) % options.length)
return options[index]
}
const options = [
"Abel",
"Absolon",
"Achille",
"Adam",
"Adolphe",
"Adrien",
"Aimé",
"Alain",
"Albert",
"Alexandre",
"Alexis",
"Alfred",
"Alison",
"Alphonse",
"Amaury",
"Ambroise",
"Amédée",
"Anatole",
"André",
"Anselme",
"Antoine",
"Apollinaire",
"Aristide",
"Armand",
"Armel",
"Arnaud",
"Auguste",
"Augustin",
"Aurèle",
"Aurelien",
"Baptiste",
"Barnabé",
"Barthélémy",
"Basile",
"Bastien",
"Baudouin",
"Benjamin",
"Benoit",
"Bernard",
"Bertrand",
"Blaise",
"Boniface",
"Brice",
"Bruno",
"Camille",
"Célestin",
"Cesaire",
"César",
"Charles",
"Charlot",
"Christian",
"Christophe",
"Claude",
"Clément",
"Colombain",
"Colombe",
"Constant",
"Constantin",
"Corentin",
"Corin",
"Cyrille",
"Damien",
"Daniel",
"David",
"Denis",
"Dennis",
"Désiré",
"Didier",
"Dieudonné",
"Dimitri",
"Diodore",
"Dion",
"Dominique",
"Donat",
"Donatien",
"Edgar",
"Edgard",
"Edmond",
"édouard",
"Eloi",
"émile",
"émilien",
"Emmanuel",
"Eric",
"Ermenegilde",
"Esmé",
"étienne",
"Eugène",
"Eustache",
"évariste",
"Evrard",
"Fabien",
"Fabrice",
"Felicien",
"Félix",
"Ferdinand",
"Fernand",
"Fiacre",
"Firmin",
"Florence",
"Florentin",
"Florian",
"Franck",
"François",
"Frédéric",
"Gabin",
"Gabriel",
"Gaétan",
"Gaspard",
"Gaston",
"Gautier",
"Geoffroi",
"Georges",
"Gerald",
"Gérard",
"Géraud",
"Germain",
"Gervais",
"Gervaise",
"Ghislain",
"Gilbert",
"Gilles",
"Godelieve",
"Gratien",
"Grégoire",
"Guillaume",
"Gustave",
"Guy",
"Hector",
"Henri",
"Herbert",
"Hercule",
"Hervé",
"Hilaire",
"Hippolyte",
"Honoré",
"Horace",
"Hubert",
"Hugues",
"Humbert",
"Ignace",
"Iréné",
"Isidore",
"Jacques",
"Jean",
"Jeannot",
"Jérémie",
"Jérôme",
"Joachim",
"Joël",
"Joseph",
"Josue",
"Jourdain",
"Jules",
"Julien",
"Juste",
"Justin",
"Lambert",
"Laurence",
"Laurent",
"Lazare",
"Léandre",
"Léon",
"Léonard",
"Léonce",
"Léopold",
"Lionel",
"Loic",
"Lothaire",
"Louis",
"Loup",
"Luc",
"Lucas",
"Lucien",
"Marc",
"Marcel",
"Marcellin",
"Marin",
"Marius",
"Martin",
"Mathieu",
"Mathis",
"Matthieu",
"Maurice",
"Maxime",
"Maximilien",
"Michel",
"Modeste",
"Modestine",
"Narcisse",
"Nazaire",
"Nicholas",
"Nicodème",
"Nicolas",
"Noah",
"Noé",
"Noel",
"Odilon",
"Olivier",
"Onesime",
"Osanne",
"Ozanne",
"Papillion",
"Pascal",
"Paschal",
"Patrice",
"Patrick",
"Paul",
"Perceval",
"Philbert",
"Philibert",
"Philippe",
"Pierre",
"Pierrick",
"Pons",
"Prosper",
"Quentin",
"Rainier",
"Raoul",
"Raphaël",
"Raphael",
"Raymond",
"Régis",
"Rémi",
"Rémy",
"Renard",
"Renaud",
"René",
"Reynaud",
"Richard",
"Robert",
"Roch",
"Rodolph",
"Rodolphe",
"Rodrigue",
"Roger",
"Roland",
"Romain",
"Sacha",
"Samuel",
"Sébastien",
"Serge",
"Séverin",
"Simon",
"Simone",
"Stéphane",
"Sylvain",
"Sylvestre",
"Telesphore",
"Theirn",
"Théo",
"Théodore",
"Théophile",
"Thibault",
"Thierry",
"Thomas",
"Timothée",
"Toussaint",
"Tristan",
"Ulrich",
"Urbain",
"Valentin",
"Valère",
"Valéry",
"Vespasien",
"Victor",
"Vincent",
"Vivien",
"Xavier",
"Yanick",
"Yann",
"Yannic",
"Yannick",
"Yves",
"Zacharie",
]

69
src/lib/session.ts Normal file
View file

@ -0,0 +1,69 @@
import { TextLineStream } from "https://deno.land/std@0.224.0/streams/mod.ts"
import { writeAll } from "https://deno.land/std@0.224.0/io/write_all.ts"
import ollama, { Message } from "npm:ollama@0.5.15"
import { History } from "./history.ts"
const header = (name: string) => `
KUB
Kub is a LLM running on TCP which includes all messages in the same context.
As Kub is having several discussions at once, he will know you as : ${name}
`
export async function session(conn: Deno.TcpConn, name: string, history: History) {
console.log("Opening session for", name, "at", conn.remoteAddr.hostname, conn.remoteAddr.port)
conn.setNoDelay(true)
await write_encoded(conn, header(name))
await write_encoded(conn, "> ")
const lines = conn.readable
.pipeThrough(new TextDecoderStream())
.pipeThrough(new TextLineStream())
for await (const line of lines) {
if (line === "") continue
if (line.length > 10_000) return
console.log("prompted '", line, "' by ", conn.remoteAddr.hostname)
await write_encoded(conn, "\n< ")
const user_message = make_message(name, line)
const response_parts = await get_response_parts(history, user_message)
let content = "", role = "assistant"
for await (const part of response_parts) {
await write_encoded(conn, part.message.content)
content += part.message.content
role = part.message.role
}
await write_encoded(conn, "\n\n>")
history.push(user_message)
history.push({ role, content })
}
}
async function write_encoded(connection: Deno.TcpConn, text: string) {
const encoded = new TextEncoder().encode(text)
await writeAll(connection, encoded)
}
async function get_response_parts(messages: History, user_message: { role: string; content: string }) {
return await ollama.chat({
model: "llama3.2",
messages: request_messages(messages.messages, user_message),
stream: true,
options: {},
})
}
function make_message(name: string, line: string) {
const user_content = `(message from:${name}) ${line}`
const user_message = { role: "user", content: user_content }
return user_message
}
function request_messages(history_messages: Message[], user_message: Message) {
return [
{ role: "system", content: "You are 'Kub', a bot which talks to people on the internet. Be concise and helpful." },
...history_messages,
user_message,
] as Message[]
}