diff --git a/packages/llama-cpp/build.sh b/packages/llama-cpp/build.sh new file mode 100644 index 000000000000000..7abc9b8fc552597 --- /dev/null +++ b/packages/llama-cpp/build.sh @@ -0,0 +1,22 @@ +TERMUX_PKG_HOMEPAGE=/~https://github.com/ggerganov/llama.cpp +TERMUX_PKG_DESCRIPTION="Port of Facebook's LLaMA model in C/C++" +TERMUX_PKG_LICENSE=GPL-3.0 +TERMUX_PKG_MAINTAINER=@termux +TERMUX_PKG_VERSION=294f424 +TERMUX_PKG_SRCURL=$TERMUX_PKG_HOMEPAGE/archive/master-$TERMUX_PKG_VERSION.tar.gz +TERMUX_PKG_SHA256=95effaa75fdf1e7fb4819500f3aa6a9c970dbe36392a51a4ead904660841cd93 +TERMUX_PKG_AUTO_UPDATE=true +TERMUX_PKG_DEPENDS="openmpi, libopenblas" +TERMUX_PKG_RECOMMENDS="python-numpy, python-sentencepiece" +TERMUX_PKG_EXTRA_CONFIGURE_ARGS=" +-DCMAKE_INSTALL_PREFIX=$TERMUX_PREFIX +-DLLAMA_MPI=ON +-DBUILD_SHARED_LIBS=ON +-DLLAMA_BLAS=ON +-DLLAMA_BLAS_VENDOR=OpenBLAS +" + +termux_step_post_make_install() { + mv main llama + mv server llama-server +}