-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMETA6.json
More file actions
58 lines (58 loc) · 2.43 KB
/
META6.json
File metadata and controls
58 lines (58 loc) · 2.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
{
"auth": "zef:apogee",
"authors": [
"Matt Doughty"
],
"build-depends": [
],
"depends": [
"Cro::HTTP:ver<0.8.11>:auth<zef:cro>",
"Digest::SHA256::Native:ver<1.0.0>",
"JSON::Fast:ver<0.19>:auth<cpan:TIMOTIMO>",
"Template::Jinja2:auth<zef:apogee>",
"Tokenizers:auth<zef:apogee>",
"UUID::V4:ver<1.0.0>:auth<zef:masukomi>"
],
"description": "Simple framework for LLM inferencing",
"license": "Artistic-2.0",
"name": "LLM::Chat",
"provides": {
"LLM::Chat": "lib/LLM/Chat.rakumod",
"LLM::Chat::Backend": "lib/LLM/Chat/Backend.rakumod",
"LLM::Chat::Backend::KoboldCpp": "lib/LLM/Chat/Backend/KoboldCpp.rakumod",
"LLM::Chat::Backend::Mock": "lib/LLM/Chat/Backend/Mock.rakumod",
"LLM::Chat::Backend::OpenAICommon": "lib/LLM/Chat/Backend/OpenAICommon.rakumod",
"LLM::Chat::Backend::OpenRouter": "lib/LLM/Chat/Backend/OpenRouter.rakumod",
"LLM::Chat::Backend::Response": "lib/LLM/Chat/Backend/Response.rakumod",
"LLM::Chat::Backend::Response::OpenRouter": "lib/LLM/Chat/Backend/Response/OpenRouter.rakumod",
"LLM::Chat::Backend::Response::OpenRouter::Stream": "lib/LLM/Chat/Backend/Response/OpenRouter/Stream.rakumod",
"LLM::Chat::Backend::Response::Stream": "lib/LLM/Chat/Backend/Response/Stream.rakumod",
"LLM::Chat::Backend::Settings": "lib/LLM/Chat/Backend/Settings.rakumod",
"LLM::Chat::Conversation": "lib/LLM/Chat/Conversation.rakumod",
"LLM::Chat::Conversation::Message": "lib/LLM/Chat/Conversation/Message.rakumod",
"LLM::Chat::Debug": "lib/LLM/Chat/Debug.rakumod",
"LLM::Chat::Template": "lib/LLM/Chat/Template.rakumod",
"LLM::Chat::Template::ChatML": "lib/LLM/Chat/Template/ChatML.rakumod",
"LLM::Chat::Template::Gemma2": "lib/LLM/Chat/Template/Gemma2.rakumod",
"LLM::Chat::Template::Jinja2": "lib/LLM/Chat/Template/Jinja2.rakumod",
"LLM::Chat::Template::Llama3": "lib/LLM/Chat/Template/Llama3.rakumod",
"LLM::Chat::Template::Llama4": "lib/LLM/Chat/Template/Llama4.rakumod",
"LLM::Chat::Template::MistralV7": "lib/LLM/Chat/Template/MistralV7.rakumod",
"LLM::Chat::TokenCounter": "lib/LLM/Chat/TokenCounter.rakumod"
},
"resources": [
],
"source-url": "https://github.com/m-doughty/LLM_Chat.git",
"support": {
"bugtracker": "https://github.com/m-doughty/LLM_Chat/issues",
"source": "https://github.com/m-doughty/LLM_Chat"
},
"tags": [
"llm",
"inferencing",
"ai"
],
"test-depends": [
],
"version": "0.5.1"
}