14using json = nlohmann::json;
25 azure_deployment_id = std::getenv(
"DEPLOYMENT_ID");
26 azure_resource = std::getenv(
"AZURE_RESOURCE");
30 yCWarning(
GPTDEVICE) <<
"Could not read env variable AZURE_RESOURCE. Device set in offline mode";
35 if (!oai.auth.SetAzureKeyEnv(
"AZURE_API_KEY"))
37 yCWarning(
GPTDEVICE) <<
"Invalid or no azure key provided. Device set in offline mode.";
49 auto stream = std::ifstream(prompt_file_fullpath);
52 yCWarning(
GPTDEVICE) <<
"File:" << prompt_file_fullpath <<
"does not exist or path is invalid";
56 std::ostringstream sstr;
57 sstr << stream.rdbuf();
70 auto stream = std::ifstream(functions_file_fullpath);
73 yCWarning(
GPTDEVICE) <<
"File: " << functions_file_fullpath <<
"does not exist or path is invalid.";
79 json function_js = json::parse(stream);
80 if (!setFunctions(function_js))
93 m_convo->AddUserData(question);
105 liboai::Response res = oai.Azure->create_chat_completion(
108 m_convo->Update(res);
110 catch (
const std::exception &e)
116 if(m_convo->LastResponseIsFunctionCall())
119 auto str_args = m_convo->GetLastFunctionCallArguments();
120 std::string function_call_name = m_convo->GetLastFunctionCallName();
121 auto j_args = json::parse(str_args);
123 std::vector<std::string> parameters_list;
124 std::vector<std::string> arguments_list;
126 for(
const auto&[key,val]: j_args.items())
128 parameters_list.push_back(key);
129 arguments_list.push_back(val);
137 m_function_called.insert({m_convo_length,function_call_message});
139 oAnswer = function_call_message;
144 m_convo->GetLastResponse(),
145 std::vector<std::string>(),
146 std::vector<std::string>()};
150 return yarp::dev::ReturnValue_ok;
160 yCError(
GPTDEVICE) <<
"A prompt is already set. You must delete conversation first";
166 m_convo->SetSystemData(prompt);
168 catch (
const std::exception &e)
174 return yarp::dev::ReturnValue_ok;
179 auto &convo_json = m_convo->GetJSON();
180 for (
auto &message : convo_json[
"messages"])
182 if (message[
"role"] ==
"system")
184 oPrompt = message[
"content"];
185 return yarp::dev::ReturnValue_ok;
194 std::vector<yarp::dev::LLM_Message> conversation;
196 auto &convo_json = m_convo->GetJSON();
199 if (convo_json[
"messages"].empty())
205 for (
auto &message : convo_json[
"messages"])
207 std::string type = message[
"role"].get<std::string>();
208 std::string content = message[
"content"].get<std::string>();
210 conversation.push_back(
yarp::dev::LLM_Message{type, content,std::vector<std::string>(),std::vector<std::string>()});
214 for(
const auto& [i,answer]: m_function_called)
216 auto conv_it = conversation.begin();
217 conversation.insert(std::next(conv_it,i),answer);
220 oConversation = conversation;
221 return yarp::dev::ReturnValue_ok;
227 m_convo.reset(
new liboai::Conversation());
229 m_function_called.clear();
230 return yarp::dev::ReturnValue_ok;
235 std::string current_prompt =
"";
239 return yarp::dev::ReturnValue_ok;
247bool GPTDevice::setFunctions(
const json& function_json)
250 for (
auto& function: function_json.items())
252 if(!function.value().contains(
"name") || !function.value().contains(
"description"))
254 yCError(
GPTDEVICE) <<
"Function missing mandatory parameters <name> and/or <description>";
258 std::string function_name = function.value()[
"name"].template get<std::string>();
259 std::string function_desc = function.value()[
"description"].template get<std::string>();
261 if(!m_functions->AddFunction(function_name))
263 yCError(
GPTDEVICE) << module_name +
"::setFunctions(). Cannot add function.";
267 if(!m_functions->SetDescription(function_name,function_desc))
269 yCError(
GPTDEVICE) << module_name +
"::setFunctions(). Cannot set description";
273 if(function.value().contains(
"parameters"))
275 auto parameters = function.value()[
"parameters"][
"properties"];
276 std::vector<liboai::Functions::FunctionParameter> parameters_vec;
277 for(
auto& params: parameters.items())
279 liboai::Functions::FunctionParameter param;
280 param.name = params.key();
281 param.description = params.value()[
"description"];
282 param.type = params.value()[
"type"];
283 parameters_vec.push_back(param);
285 if(!m_functions->SetParameters(function_name,parameters_vec))
287 yCError(
GPTDEVICE) << module_name +
"::setFunction(). Cannot set parameters";
293 if(!m_convo->SetFunctions(*m_functions))
const yarp::os::LogComponent & GPTDEVICE()
std::string m_function_file
bool parseParams(const yarp::os::Searchable &config) override
Parse the DeviceDriver parameters.
std::string m_api_version
std::string m_json_context
std::string m_prompt_context
std::string m_prompt_file
yarp::dev::ReturnValue deleteConversation() noexcept override
Delete the conversation and clear the system context from any internally stored context.
yarp::dev::ReturnValue setPrompt(const std::string &prompt) override
Performs a question.
yarp::dev::ReturnValue getConversation(std::vector< yarp::dev::LLM_Message > &oConversation) override
Retrieves the whole conversation.
yarp::dev::ReturnValue refreshConversation() noexcept override
Refresh the conversation.
yarp::dev::ReturnValue readPrompt(std::string &oPrompt) override
Retrieves the provided prompt.
yarp::dev::ReturnValue ask(const std::string &question, yarp::dev::LLM_Message &oAnswer) override
Performs a question.
bool close() override
Close the DeviceDriver.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
@ return_value_error_method_failed
Method is deprecated.
Helper class for finding config files and other external resources.
bool setDefaultContext(const std::string &contextName)
Sets the context for the current ResourceFinder object.
std::string findFile(const std::string &name)
Find the full path to a file.
A base class for nested structures that can be searched.
#define yCError(component,...)
#define yCWarning(component,...)
#define yCDebug(component,...)
#define YARP_LOG_COMPONENT(name,...)