Skip to content

Commit b5de84b

Browse files
committed
Avoid code duplication in context (re)creation
1 parent c07c1ba commit b5de84b

File tree

2 files changed

+8
-15
lines changed

2 files changed

+8
-15
lines changed

libraries/YarpPlugins/LlamaGPT/DeviceDriverImpl.cpp

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -97,18 +97,9 @@ bool LlamaGPT::open(yarp::os::Searchable & config)
9797
}
9898
}
9999

100-
// initialize the context
101-
llama_context_params ctx_params = llama_context_default_params();
102-
ctx_params.n_ctx = m_tokens;
103-
ctx_params.n_batch = m_tokens;
104-
ctx_params.no_perf = false;
105-
106-
ctx = llama_init_from_model(model, ctx_params);
107-
108-
if (!ctx)
100+
if (!clear(false)) // initialize the context
109101
{
110-
yCError(LLAMA) << "Failed to create context";
111-
return 1;
102+
return false;
112103
}
113104

114105
// initialize the sampler

libraries/YarpPlugins/LlamaGPT/LlamaGPT.cpp

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,13 @@ bool LlamaGPT::clear(bool preservePrompt, bool recreateContext)
4646

4747
prev_len = 0;
4848

49-
if (recreateContext && ctx)
49+
if (recreateContext)
5050
{
51-
llama_free(ctx);
51+
if (ctx)
52+
{
53+
llama_free(ctx);
54+
}
5255

53-
// initialize the context
5456
llama_context_params ctx_params = llama_context_default_params();
5557
ctx_params.n_ctx = m_tokens;
5658
ctx_params.n_batch = m_tokens;
@@ -60,7 +62,7 @@ bool LlamaGPT::clear(bool preservePrompt, bool recreateContext)
6062

6163
if (!ctx)
6264
{
63-
yCError(LLAMA) << "Failed to recreate context";
65+
yCError(LLAMA) << "Failed to (re)create context";
6466
return 1;
6567
}
6668
}

0 commit comments

Comments
 (0)