|
@@ -3156,14 +3156,14 @@ int main(int argc, char **argv) {
|
|
{
|
|
{
|
|
res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin"));
|
|
res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin"));
|
|
const json body = json::parse(req.body);
|
|
const json body = json::parse(req.body);
|
|
- json input;
|
|
|
|
|
|
+ json prompt;
|
|
if (body.count("content") != 0)
|
|
if (body.count("content") != 0)
|
|
{
|
|
{
|
|
- input = body["content"];
|
|
|
|
|
|
+ prompt = body["content"];
|
|
}
|
|
}
|
|
else
|
|
else
|
|
{
|
|
{
|
|
- input = "";
|
|
|
|
|
|
+ prompt = "";
|
|
}
|
|
}
|
|
|
|
|
|
// create and queue the task
|
|
// create and queue the task
|
|
@@ -3171,7 +3171,7 @@ int main(int argc, char **argv) {
|
|
{
|
|
{
|
|
const int id_task = llama.queue_tasks.get_new_id();
|
|
const int id_task = llama.queue_tasks.get_new_id();
|
|
llama.queue_results.add_waiting_task_id(id_task);
|
|
llama.queue_results.add_waiting_task_id(id_task);
|
|
- llama.request_completion(id_task, {{"prompt", input}}, true, -1);
|
|
|
|
|
|
+ llama.request_completion(id_task, {{"prompt", prompt}}, true, -1);
|
|
|
|
|
|
// get the result
|
|
// get the result
|
|
task_result result = llama.queue_results.recv(id_task);
|
|
task_result result = llama.queue_results.recv(id_task);
|