From 38a77c88378de74f72517d2fd2420d19df14c4a0 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 22 Nov 2024 21:30:21 -0800 Subject: [PATCH] Display name of model at top of web ui --- llamafile/server/flagz.cpp | 2 ++ llamafile/server/worker.cpp | 7 ++++++- llamafile/server/www/chatbot.js | 8 ++++++++ llamafile/server/www/index.html | 5 ++++- llamafile/string.cpp | 8 ++++++++ llamafile/string.h | 1 + 6 files changed, 29 insertions(+), 2 deletions(-) diff --git a/llamafile/server/flagz.cpp b/llamafile/server/flagz.cpp index 1e81039f54..5e6f462e97 100644 --- a/llamafile/server/flagz.cpp +++ b/llamafile/server/flagz.cpp @@ -18,6 +18,7 @@ #include "client.h" #include "llama.cpp/llama.h" #include "llamafile/llamafile.h" +#include "llamafile/string.h" #include "llamafile/server/json.h" namespace lf { @@ -27,6 +28,7 @@ bool Client::flagz() { jt::Json json; + json["model"] = stripext(basename(FLAG_model)); json["prompt"] = FLAG_prompt; json["no_display_prompt"] = FLAG_no_display_prompt; json["nologo"] = FLAG_nologo; diff --git a/llamafile/server/worker.cpp b/llamafile/server/worker.cpp index 668a115c5a..b04741a4ea 100644 --- a/llamafile/server/worker.cpp +++ b/llamafile/server/worker.cpp @@ -24,6 +24,7 @@ #include "llamafile/server/tokenbucket.h" #include "llamafile/threadlocal.h" #include "llamafile/trust.h" +#include #include #include #include @@ -109,7 +110,11 @@ void Worker::handle() { if ((client_.fd_ = server_->accept(&client_.client_ip_)) == -1) { - SLOG("accept returned %m"); + if (IsWindows() && errno == ENOTSOCK) { + // Server::shutdown() calls close() on the listening socket + } else { + SLOG("accept returned %m"); + } return; } diff --git a/llamafile/server/www/chatbot.js b/llamafile/server/www/chatbot.js index f0fc3699bf..19104f2c44 100644 --- a/llamafile/server/www/chatbot.js +++ b/llamafile/server/www/chatbot.js @@ -21,6 +21,7 @@ const DEFAULT_SYSTEM_PROMPT = "detailed, and polite answers to the human's questions."; const DEFAULT_FLAGZ = { + "model": null, "prompt": null, "no_display_prompt": false, "frequency_penalty": 0, @@ -342,6 +343,12 @@ function getSystemPrompt() { return prompt; } +function updateModelInfo() { + if (flagz.model) { + document.getElementById("model").textContent = flagz.model; + } +} + function startChat(history) { chatHistory = history; chatMessages.innerHTML = ""; @@ -356,6 +363,7 @@ function startChat(history) { async function chatbot() { flagz = await fetchFlagz(); + updateModelInfo(); startChat([{ role: "system", content: getSystemPrompt() }]); sendButton.addEventListener("click", sendMessage); stopButton.addEventListener("click", stopMessage); diff --git a/llamafile/server/www/index.html b/llamafile/server/www/index.html index 8866a784ec..b01c5fb16e 100644 --- a/llamafile/server/www/index.html +++ b/llamafile/server/www/index.html @@ -8,7 +8,10 @@
-

[logo] llamafile

+

+ [llamafile] + llamafile +

diff --git a/llamafile/string.cpp b/llamafile/string.cpp index 8462d79d8f..ef67d58144 100644 --- a/llamafile/string.cpp +++ b/llamafile/string.cpp @@ -102,6 +102,14 @@ std::string basename(const std::string_view &path) { } } +std::string stripext(const std::string &path) { + size_t i = path.size(); + while (i--) + if (path[i] == '.') + return path.substr(0, i); + return path; +} + std::string_view extname(const std::string_view &path) { size_t i = path.size(); while (i--) diff --git a/llamafile/string.h b/llamafile/string.h index 5b9b49eb2d..9a74b23510 100644 --- a/llamafile/string.h +++ b/llamafile/string.h @@ -33,6 +33,7 @@ std::string format(const char *, ...) __attribute__((format(printf, 1, 2))); std::string iso8601(struct timespec); std::string join(const std::vector &, const std::string_view &); std::string resolve(const std::string_view &, const std::string_view &); +std::string stripext(const std::string &); std::string tolower(const std::string_view &); std::string_view extname(const std::string_view &); void append_wchar(std::string *, wchar_t);