@@ -4,6 +4,28 @@ import packages/docutils/highlite, terminal
44
55var all_models: JsonNode = nil
66
7+ proc show_help () =
8+ echo """
9+ Usage: ./program [OPTIONS]
10+
11+ A command-line interface for interacting with language models using libchatllm.
12+
13+ Options:
14+ -m, --model <model_id> Specify the model to use (e.g., :qwen:1.5b)
15+ --embedding_model <model_id> Specify the embedding model to use
16+ --reranker_model <model_id> Specify the reranker model to use
17+ -p, --prompt <prompt> Set the initial prompt for the model
18+ -i, --interactive Enable interactive mode
19+ --reversed_role Reverse the role of user and AI in interactive mode
20+ --multi Allow multi-line input in interactive mode
21+ -h, --help Show this help message
22+
23+ Examples:
24+ ./program -m :qwen:1.5b -p "Hello, world!"
25+ ./program --interactive --model :qwen:1.5b
26+ ./program --help
27+ """
28+
729proc get_model_url_on_modelscope (url: seq [string ]): string =
830 let proj = url[0 ]
931 let fn = url[1 ]
@@ -190,6 +212,10 @@ var reversed_role = false
190212var use_multiple_lines = false
191213
192214for i in 1 .. paramCount ():
215+ if paramStr (i) in [" -h" , " --help" ]:
216+ show_help ()
217+ quit (0 )
218+
193219 if paramStr (i) in [" -i" , " --interactive" ]:
194220 interactive = true
195221
@@ -239,4 +265,4 @@ if interactive:
239265else :
240266 discard chatllm_user_input (chat, prompt.cstring )
241267
242- chatllm_show_statistics (chat)
268+ chatllm_show_statistics (chat)
0 commit comments