diff --git a/flake.nix b/flake.nix index 87dd826..77623b0 100755 --- a/flake.nix +++ b/flake.nix @@ -176,10 +176,12 @@ $imgui/*.cpp \ -DRENDERER_OPENGL \ -DWINDOW_GLFW \ - -DMAINGUI_DYNAMIC \ + -DOLLAMA_DYNAMIC \ -fpic -shared \ -I src -I include -I $imgui -I . \ -lGL -lglfw -lGLEW \ + $(curl-config --cflags) \ + $(curl-config --libs) \ -Wall \ -o $name ''; diff --git a/include/pch.hpp b/include/pch.hpp index 97d7ef4..9738403 100644 --- a/include/pch.hpp +++ b/include/pch.hpp @@ -7,11 +7,12 @@ #include #include #include -#include + +#include #include #include #include -//#include +#include //#include #include diff --git a/modules/Ollama/src/Ollama.cpp b/modules/Ollama/src/Ollama.cpp index dccf092..bd70cc3 100644 --- a/modules/Ollama/src/Ollama.cpp +++ b/modules/Ollama/src/Ollama.cpp @@ -1,6 +1,8 @@ #include "Ollama.h" #include "modules/ImguiModule/src/ImguiModule.h" +#include + Ollama::Ollama(Archimedes::App* a, void* h) : Archimedes::Module(a, h) { name = "Ollama"; @@ -10,7 +12,17 @@ Ollama::Ollama(Archimedes::App* a, void* h) : Archimedes::Module(a, h) { } Ollama::~Ollama() { + if(curl) { + curl_easy_cleanup(curl); + curl = nullptr; + } + curl_global_cleanup(); +} +static size_t WriteCallback(void* contents, size_t size, size_t nmemb, std::ostream* userp) +{ + userp->write(static_cast(contents), size * nmemb); + return size * nmemb; } void Ollama::onLoad() { @@ -24,14 +36,31 @@ void Ollama::onLoad() { ImGui::SetCurrentContext(im->getContext()); + curl_global_init(CURL_GLOBAL_ALL); curl = curl_easy_init(); + if(curl) { + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback); + //curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 1L); + //curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, 2L); + curl_easy_setopt(curl, CURLOPT_POST, 1L); + } } void Ollama::run() { static std::string s, url, response = ""; + static nlohmann::json sendObj; + + static std::ostringstream oss; + + static nlohmann::json jsonObj; + + static std::future result; + + static bool inFlight = false; + ImGui::Begin("Ollama Module"); ImGui::InputText("url: ", &url); @@ -39,10 +68,34 @@ void Ollama::run() { ImGui::InputTextMultiline("prompt: ", &s); if(ImGui::Button("send")) { - + sendObj["model"] = "llama3.2"; + sendObj["stream"] = false; + sendObj["prompt"] = s; + curl_easy_setopt(curl, CURLOPT_COPYPOSTFIELDS, sendObj.dump().c_str()); + result = std::async(curl_easy_perform, curl); + inFlight = true; } - ImGui::Text("%s", response.c_str()); + ImGui::Text("%s", jsonObj["response"].dump().c_str()); ImGui::End(); + + if(curl) { + curl_easy_setopt(curl, CURLOPT_URL, (url + "/api/generate").c_str()); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, &response); + } + + if(inFlight && result.wait_for(std::chrono::seconds(0)) == std::future_status::ready) { + CURLcode code = result.get(); + if(code != CURLE_OK) { + std::cerr << "curl_easy_perform() failed!: " << curl_easy_strerror(code) << std::endl; + app->stopModule(getName()); + return; + } else { + jsonObj = nlohmann::json::parse(response); + + std::cout << "Full json object:\n" << jsonObj.dump() << std::endl; + } + inFlight = false; + } }