diff options
Diffstat (limited to 'src/ai_providers')
| -rw-r--r-- | src/ai_providers/DeepSeek.cpp | 4 | ||||
| -rw-r--r-- | src/ai_providers/openAI.cpp | 37 |
2 files changed, 38 insertions, 3 deletions
diff --git a/src/ai_providers/DeepSeek.cpp b/src/ai_providers/DeepSeek.cpp index a1857e9..2bc4dde 100644 --- a/src/ai_providers/DeepSeek.cpp +++ b/src/ai_providers/DeepSeek.cpp @@ -46,7 +46,7 @@ static bool _DeepSeek_query_with_file(char* query, size_t query_length, char* fi size_t body_size = file_size + QUERY_BUFFER_SIZE; char* body = (char*)memops::alloc(body_size); strops::format(body, body_size, - "{\"model\":\"deepseek-reasoner\", \"messages\": [ { \"role\": \"user\", \"content\": \"%s\" } ] }", query_escaped); + "{\"model\":\"%s\", \"messages\": [ { \"role\": \"user\", \"content\": \"%s\" } ] }", administration::get_ai_service().model_name, query_escaped); httplib::Headers headers; headers.insert(std::make_pair("Authorization", std::string("Bearer ") + api_key)); @@ -113,6 +113,8 @@ static bool _DeepSeek_upload_file(char* file_path, char* file_id, size_t file_id importer::ai_provider_impl _deepseek_api_provider = { "DeekSeek", + "deepseek-reasoner", _DeepSeek_upload_file, _DeepSeek_query_with_file, + 0, };
\ No newline at end of file diff --git a/src/ai_providers/openAI.cpp b/src/ai_providers/openAI.cpp index b55f191..fba050c 100644 --- a/src/ai_providers/openAI.cpp +++ b/src/ai_providers/openAI.cpp @@ -35,8 +35,8 @@ static bool _openAI_query_with_file(char* query, size_t query_length, char* file size_t body_size = query_length + 200; char* body = (char*)memops::alloc(body_size); strops::format(body, body_size, - "{\"model\":\"gpt-5-nano\", \"input\": [ { \"role\": \"user\", \"content\": [ { \"type\": \"input_file\", \"file_id\": \"%s\" }, " - "{ \"type\": \"input_text\", \"text\": \"%s\" } ] } ] }", file_id, query_escaped); + "{\"model\":\"%s\", \"input\": [ { \"role\": \"user\", \"content\": [ { \"type\": \"input_file\", \"file_id\": \"%s\" }, " + "{ \"type\": \"input_text\", \"text\": \"%s\" } ] } ] }", administration::get_ai_service().model_name, file_id, query_escaped); httplib::Headers headers; headers.insert(std::make_pair("Authorization", std::string("Bearer ") + api_key)); @@ -168,8 +168,41 @@ static bool _openAI_upload_file(char* file_path, char* file_id, size_t file_id_l return 1; } +static bool _openAI_get_available_models(importer::model_list_request* buffer) +{ + const char *api_key = administration::get_ai_service().api_key_public; + + httplib::SSLClient cli("api.openai.com", 443); + + httplib::Headers headers; + headers.insert(std::make_pair("Authorization", std::string("Bearer ") + api_key)); + + httplib::Result res = cli.Get("/v1/models", headers); + if (!res || res->status != 200) { + logger::error("ERROR Failed to get models list."); + logger::error(res->body.c_str()); + return 0; + } + + char* completion_body_response = (char*)res->body.c_str(); + + u32 count = 0; + char model_name[MAX_LEN_SHORT_DESC]; + + while(1) { + if (!strops::get_json_value(completion_body_response, "id", model_name, MAX_LEN_SHORT_DESC, count++)) break; + if (count == MAX_MODEL_LIST_RESULT_COUNT) break; + + strops::copy(buffer->result[buffer->result_count++], model_name, MAX_LEN_SHORT_DESC); + } + + return 1; +} + importer::ai_provider_impl _chatgpt_api_provider = { "OpenAI", + "gpt-5-nano", _openAI_upload_file, _openAI_query_with_file, + _openAI_get_available_models, };
\ No newline at end of file |
