-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathllama-cpp-hs.cabal
More file actions
94 lines (89 loc) · 2.85 KB
/
llama-cpp-hs.cabal
File metadata and controls
94 lines (89 loc) · 2.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.38.0.
--
-- see: https://github.com/sol/hpack
name: llama-cpp-hs
version: 0.1.0.0
synopsis: Haskell FFI bindings to the llama.cpp LLM inference library
description: Haskell bindings for [llama.cpp](https://github.com/ggerganov/llama.cpp), a performant, C++-based inference engine for running large language models (LLMs) like LLaMA, Mistral, Qwen, and others directly on local hardware.
category: ai, ffi, natural-language-processing
homepage: https://github.com/tusharad/llama-cpp-hs#readme
bug-reports: https://github.com/tusharad/llama-cpp-hs/issues
author: tushar
maintainer: tusharadhatrao@gmail.com
copyright: 2025 tushar
license: MIT
license-file: LICENSE
build-type: Simple
extra-source-files:
README.md
CHANGELOG.md
source-repository head
type: git
location: https://github.com/tusharad/llama-cpp-hs
library
exposed-modules:
Llama.Adapter
Llama.Backend
Llama.ChatTemplate
Llama.Context
Llama.Decode
Llama.Internal.Foreign
Llama.Internal.Foreign.Adapter
Llama.Internal.Foreign.Backend
Llama.Internal.Foreign.ChatTemplate
Llama.Internal.Foreign.Context
Llama.Internal.Foreign.Decode
Llama.Internal.Foreign.GGML
Llama.Internal.Foreign.KVCache
Llama.Internal.Foreign.Model
Llama.Internal.Foreign.Performance
Llama.Internal.Foreign.Sampler
Llama.Internal.Foreign.Split
Llama.Internal.Foreign.State
Llama.Internal.Foreign.Tokenize
Llama.Internal.Foreign.Vocab
Llama.Internal.Types
Llama.Internal.Types.Params
Llama.KVCache
Llama.Model
Llama.Performance
Llama.Sampler
Llama.Split
Llama.State
Llama.Tokenize
Llama.Vocab
other-modules:
Paths_llama_cpp_hs
hs-source-dirs:
src
ghc-options: -Wall -Wcompat -Widentities -Wincomplete-record-updates -Wincomplete-uni-patterns -Wmissing-home-modules -Wpartial-fields -Wredundant-constraints
include-dirs:
cbits
c-sources:
cbits/helper.c
extra-lib-dirs:
/usr/local/lib
extra-libraries:
llama
build-depends:
base >=4.7 && <5
, bytestring >=0.9 && <0.13
, derive-storable >=0.2 && <0.4
default-language: Haskell2010
test-suite llama-cpp-hs-test
type: exitcode-stdio-1.0
main-is: Spec.hs
other-modules:
Paths_llama_cpp_hs
hs-source-dirs:
test
ghc-options: -Wall -Wcompat -Widentities -Wincomplete-record-updates -Wincomplete-uni-patterns -Wmissing-home-modules -Wpartial-fields -Wredundant-constraints -threaded -rtsopts -with-rtsopts=-N
build-depends:
base >=4.7 && <5
, bytestring >=0.9 && <0.13
, derive-storable >=0.2 && <0.4
, llama-cpp-hs
, tasty
, tasty-hunit
default-language: Haskell2010