Skip to content

Commit

Permalink
main: use dprintf and add --ctrl-token-no-out and --ctrl-token-fd-out
Browse files Browse the repository at this point in the history
  • Loading branch information
mofosyne committed May 20, 2024
1 parent bcd24f8 commit 9f445a7
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 22 deletions.
13 changes: 12 additions & 1 deletion common/common.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -905,6 +905,14 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa
params.interactive_specials = true;
return true;
}
if (arg == "--ctrl-token-no-out") {
params.ctrl_token_no_out = true;
return true;
}
if (arg == "--ctrl-token-fd-out") {
params.ctrl_token_fd_out = true;
return true;
}
if (arg == "--embedding") {
params.embedding = true;
return true;
Expand Down Expand Up @@ -1433,7 +1441,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) {
printf(" --version show version and build info\n");
printf(" -i, --interactive run in interactive mode\n");
printf(" --interactive-specials allow special tokens in user text, in interactive mode\n");
printf(" --interactive-first run in interactive mode and wait for input right away\n");
printf(" --ctrl-token-no-out control tokens output disabled\n");
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
printf(" --ctrl-token-fd-out control tokens sent to file descriptor 3 out of band\n");
#endif
printf(" -cnv, --conversation run in conversation mode (does not print special tokens and suffix/prefix)\n");
printf(" -ins, --instruct run in instruction mode (use with Alpaca models)\n");
printf(" -cml, --chatml run in chatml mode (use with ChatML-compatible models)\n");
Expand Down
2 changes: 2 additions & 0 deletions common/common.h
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,8 @@ struct gpt_params {
bool use_color = false; // use color to distinguish generations and inputs
bool interactive = false; // interactive mode
bool interactive_specials = false; // whether to allow special tokens from user, during interactive mode
bool ctrl_token_no_out = false; // disable control token output
bool ctrl_token_fd_out = false; // enable control token output and redirect it to file descriptor 3
bool conversation = false; // conversation mode (does not print special tokens and suffix/prefix)
bool chatml = false; // chatml mode (used for models trained on chatml syntax)
bool prompt_cache_all = false; // save user input and generations to prompt cache
Expand Down
36 changes: 15 additions & 21 deletions examples/main/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
#include <signal.h>
#include <unistd.h>
#define CONTROL_TOKEN_FILE_DESCRIPTOR (3)
#elif defined (_WIN32)
#define WIN32_LEAN_AND_MEAN
#ifndef NOMINMAX
Expand Down Expand Up @@ -528,21 +529,6 @@ int main(int argc, char ** argv) {
exit(1);
}

// Create the pipe for special token handling
int stok_pipe[2] = {0};
if (pipe(stok_pipe) == -1) {
fprintf(stderr, "%s: failed to initialize special token output stream\n", __func__);
exit(1);
}

close(stok_pipe[0]); // Read Special Token Not In Use

FILE *special_token_stream_output_fd = fdopen(stok_pipe[1], "w");
if (special_token_stream_output_fd == NULL) {
fprintf(stderr, "%s: failed to open special token output stream\n", __func__);
exit(1);
}

while ((n_remain != 0 && !is_antiprompt) || params.interactive) {
// predict
if (!embd.empty()) {
Expand Down Expand Up @@ -758,12 +744,22 @@ int main(int argc, char ** argv) {
const std::string token_str = llama_token_to_piece(ctx, id);

// Console/Stream Output
if (llama_token_is_control_token(llama_get_model(ctx), id)) {
// Stream Output Token To Special Token Output
fprintf(special_token_stream_output_fd, "%s", token_str.c_str());
} else {
if (!llama_token_is_control_token(llama_get_model(ctx), id)) {
// Stream Output Token To Standard Output
fprintf(stdout, "%s", token_str.c_str());
} else if (!params.ctrl_token_no_out) {
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
if (params.ctrl_token_fd_out) {
// Stream Control Token To Special Token Output. Useful for debugging control token behaviour
dprintf(CONTROL_TOKEN_FILE_DESCRIPTOR, "%s", token_str.c_str());
}
else
#endif
if (!params.conversation && sparams.grammar.empty())
{
// Stream Control Token To Standard Output as long as we are not in a conversation or grammar output
fprintf(stdout, "%s", token_str.c_str());
}
}

// Record Displayed Tokens To Log
Expand Down Expand Up @@ -983,8 +979,6 @@ int main(int argc, char ** argv) {
llama_sampling_free(ctx_sampling);
llama_backend_free();

fclose(special_token_stream_output_fd);

#ifndef LOG_DISABLE_LOGS
LOG_TEE("Log end\n");
#endif // LOG_DISABLE_LOGS
Expand Down

0 comments on commit 9f445a7

Please sign in to comment.