Skip to content

Commit

Permalink
Merge pull request #172915 from Homebrew/llama.cpp-new
Browse files Browse the repository at this point in the history
llama.cpp b3003 (new formula)
  • Loading branch information
BrewTestBot authored May 27, 2024
2 parents 179f1ed + 7759a1f commit 67cd1c0
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 0 deletions.
1 change: 1 addition & 0 deletions .github/autobump.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1374,6 +1374,7 @@ literate-git
little-cms2
livekit
livekit-cli
llama.cpp
llm
llvm
lmdb
Expand Down
68 changes: 68 additions & 0 deletions Formula/l/llama.cpp.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
class LlamaCpp < Formula
desc "LLM inference in C/C++"
homepage "https://github.com/ggerganov/llama.cpp"
# CMake uses Git to generate version information.
url "https://github.com/ggerganov/llama.cpp.git",
tag: "b3003",
revision: "d298382ad977ec89c8de7b57459b9d7965d2c272"
version "b3003"
license "MIT"
head "https://github.com/ggerganov/llama.cpp.git", branch: "master"

bottle do
sha256 cellar: :any, arm64_sonoma: "8393f0f181d2e4874d6de3c2134ccde0f843dbdfefb5aa647acc0dd01629fecb"
sha256 cellar: :any, arm64_ventura: "2869e829ff336df54b963f290a55a9093e43c961301b25e4aac799c2cc32f12b"
sha256 cellar: :any, arm64_monterey: "6d01d125f7366d138e91c2e1d2b7c7017f6673efa349b58d0abffc12f153b855"
sha256 cellar: :any, sonoma: "cc27f441a2ca1a8514b00b470affd3bee28e7a0f6bd3ab8cd297e6344f384a4a"
sha256 cellar: :any, ventura: "6ad4b827154f548407ff12bd569aa1bc37ca787873d74a00b7763a7e0beb687a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "d76e6219a8d3b76b8736724f2904d59895db7b0b4bd32dcc7098fada637fc69c"
end

depends_on "cmake" => :build
uses_from_macos "curl"

on_linux do
depends_on "openblas"
end

def install
args = %W[
-DBUILD_SHARED_LIBS=ON
-DLLAMA_LTO=ON
-DLLAMA_CCACHE=OFF
-DLLAMA_ALL_WARNINGS=OFF
-DLLAMA_NATIVE=#{build.bottle? ? "OFF" : "ON"}
-DLLAMA_ACCELLERATE=#{OS.mac? ? "ON" : "OFF"}
-DLLAMA_BLAS=#{OS.linux? ? "ON" : "OFF"}
-DLLAMA_BLAS_VENDOR=OpenBLAS
-DLLAMA_METAL=#{OS.mac? ? "ON" : "OFF"}
-DLLAMA_METAL_EMBED_LIBRARY=ON
-DLLAMA_CURL=ON
-DCMAKE_INSTALL_RPATH=#{rpath}
]
args << "-DLLAMA_METAL_MACOSX_VERSION_MIN=#{MacOS.version}" if OS.mac?

system "cmake", "-S", ".", "-B", "build", *args, *std_cmake_args
system "cmake", "--build", "build"
system "cmake", "--install", "build"

libexec.install bin.children
libexec.children.each do |file|
next unless file.executable?

new_name = if file.basename.to_s == "main"
"llama"
else
"llama-#{file.basename}"
end

bin.install_symlink file => new_name
end
end

test do

Check warning on line 63 in Formula/l/llama.cpp.rb

View workflow job for this annotation

GitHub Actions / bottle (12-9251234471, false)

`brew test --verbose llama.cpp` failed on macOS Monterey (12)!

/usr/local/Homebrew/Library/Homebrew/vendor/portable-ruby/3.3.1/bin/bundle clean ==> Testing llama.cpp ==> /usr/local/Cellar/llama.cpp/b3003/bin/llama --hf-repo ggml-org/tiny-llamas -m stories15M-q4_0.gguf -n 400 -p I -ngl 0 Log start main: build = 3003 (d298382a) main: built with Apple clang version 14.0.0 (clang-1400.0.29.202) for x86_64-apple-darwin21.6.0 main: seed = 1716797583 llama_download_file: no previous model file found stories15M-q4_0.gguf llama_download_file: downloading from https://huggingface.co/ggml-org/tiny-llamas/resolve/main/stories15M-q4_0.gguf to stories15M-q4_0.gguf (server_etag:"f15a5ea82f07243d28aed38819d443c6-2", server_last_modified:Wed, 22 May 2024 13:14:21 GMT)... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 100 1146 100 1146 0 0 11186 0 --:--:-- --:--:-- --:--:-- 11186 100 18.1M 100 18.1M 0 0 60.9M 0 --:--:-- --:--:-- --:--:-- 60.9M Error: llama.cpp: failed ::error::llama.cpp: failed An exception occurred within a child process: BuildError: Failed executing: /usr/local/Cellar/llama.cpp/b3003/bin/llama --hf-repo ggml-org/tiny-llamas -m stories15M-q4_0.gguf -n 400 -p I -ngl 0 /usr/local/Homebrew/Library/Homebrew/formula.rb:3044:in `block in system' /usr/local/Homebrew/Library/Homebrew/formula.rb:2980:in `open' /usr/local/Homebrew/Library/Homebrew/formula.rb:2980:in `system' /usr/local/Homebrew/Library/Homebrew/vendor/bundle/ruby/3.3.0/gems/sorbet-runtime-0.5.11391/lib/types/private/methods/call_validation.rb:270:in `bind_call' /usr/local/Homebrew/Library/Homebrew/vendor/bundle/ruby/3.3.0/gems/sorbet-runtime-0.5.11391/lib/types/private/methods/call_validation.rb:270:in `validate_call' /usr/local/Homebrew/Library/Homebrew/vendor/bundle/ruby/3.3.0/gems/sorbet-runtime-0.5.11391/lib/types/private/methods/_methods.rb:277:in `block in _on_method_added' /usr/local/Homebrew/Library/Taps/homebrew/homebrew-core/Formula/l/llama.cpp.rb:65:in `block in <class:LlamaCpp>' /usr/local/Homebrew/Library/Homebrew/formula.rb:2786:in `block (3 levels) in run_test' /usr/local/Homebrew/Library/Homebrew/extend/kernel.rb:529:in `with_env' /usr/local/Homebrew/Library/Homebrew/formula.rb:2785:in `block (2 levels) in run_test' /usr/local/Homebrew/Library/Homebrew/formula.rb:1189:in `with_logging' /usr/local/Homebrew/Library/Homebrew/formula.rb:2784:in `block in run_test' /usr/local/Homebrew/Library/Homebrew/mktemp.rb:75:in `block in run' /usr/local/Homebrew/Library/Homebrew/mktemp.rb:75:in `chdir' /usr/local/Homebrew/Library/Homebrew/mktemp.rb:75:in `run' /usr/local/Homebrew/Library/Homebrew/formula.rb:3095:in `mktemp' /usr/local/Homebrew/Library/Homebrew/formula.rb:2778:in `run_test' /usr/local/Homebrew/Library/Homebrew/test.rb:46:in `block in <main>' /usr/local/Homebrew/Library/Homebrew/vendor/portable-ruby/3.3.1/lib/ruby/3.3.0/timeout.rb:186:in `block in timeout' /usr/local/Homebrew/Library/Homebrew/vendor/portable-ruby/3.3.1/lib/ruby/3.3.0/timeout.rb:41:in `handle_timeout' /usr/local/Homebrew/Library/Homebrew/vendor/portable-ruby/3.3.1/lib/ruby/3.3.0/timeout.rb:195:in `timeout' /usr/local/Homebrew/Library/Homebrew/test.rb:50:in `<main>'
system bin/"llama", "--hf-repo", "ggml-org/tiny-llamas",
"-m", "stories15M-q4_0.gguf",
"-n", "400", "-p", "I", "-ngl", "0"
end
end

0 comments on commit 67cd1c0

Please sign in to comment.