mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-08-14 04:17:53 -04:00
server: enable token array inputs for OAI API (#15001)
This commit is contained in:
@@ -4249,9 +4249,6 @@ int main(int argc, char ** argv) {
|
|||||||
|
|
||||||
// process prompt
|
// process prompt
|
||||||
std::vector<server_tokens> inputs;
|
std::vector<server_tokens> inputs;
|
||||||
if (oaicompat && !prompt.is_string()) {
|
|
||||||
throw std::runtime_error("prompt must be a string");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (oaicompat && has_mtmd) {
|
if (oaicompat && has_mtmd) {
|
||||||
// multimodal
|
// multimodal
|
||||||
|
Reference in New Issue
Block a user