1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
|
{ lib
, buildPythonPackage
, fetchFromGitHub
, setuptools
, aiohttp
, fastapi
, httpx
, markdown2
, nh3
, numpy
, prompt-toolkit
, pydantic
, requests
, rich
, shortuuid
, tiktoken
, uvicorn
, anthropic
, openai
, ray
, wandb
, einops
, gradio
, accelerate
, peft
, sentencepiece
, torch
, transformers
, protobuf
}:
let
version = "0.2.28";
in
buildPythonPackage {
pname = "fschat";
inherit version;
format = "pyproject";
src = fetchFromGitHub {
owner = "lm-sys";
repo = "FastChat";
rev = "v${version}";
hash = "sha256-nTP4zY6mJykzKb6LBWosg77mwE33vq9eiYSpAlZU5NI=";
};
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [
aiohttp
fastapi
httpx
markdown2
nh3
numpy
prompt-toolkit
pydantic
requests
rich
shortuuid
tiktoken
uvicorn
# ] ++ markdown2.optional-dependencies.all;
];
passthru.optional-dependencies = {
llm_judge = [
anthropic
openai
ray
];
train = [
# flash-attn
wandb
einops
];
webui = [
gradio
];
model_worker = [
accelerate
peft
sentencepiece
torch
transformers
protobuf
];
};
pythonImportsCheck = [ "fastchat" ];
# tests require networking
doCheck = false;
meta = with lib; {
description = "An open platform for training, serving, and evaluating large language models. Release repo for Vicuna and Chatbot Arena";
homepage = "https://github.com/lm-sys/FastChat";
license = licenses.asl20;
maintainers = with maintainers; [ happysalada ];
};
}
|