Skip to content

Commit fea2cf0

Browse files
committed
llama_cpp/llama_chat_format.py: free up image embed given a new image
1 parent 0597a75 commit fea2cf0

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1165,6 +1165,10 @@ def __call__(
11651165
)
11661166
)
11671167
if content["type"] == "image_url":
1168+
if self.current_image_embed is not None:
1169+
with suppress_stdout_stderr(disable=self.verbose):
1170+
self._llava_cpp.llava_image_embed_free(self.current_image_embed)
1171+
11681172
image_bytes = (
11691173
self.load_image(content["image_url"]["url"])
11701174
if isinstance(content["image_url"], dict)
@@ -1216,7 +1220,7 @@ def __call__(
12161220
llama.n_tokens = n_past.value
12171221
except:
12181222
with suppress_stdout_stderr(disable=self.verbose):
1219-
self._llava_cpp.llava_image_embed_free(embed)
1223+
self._llava_cpp.llava_image_embed_free(self.current_image_embed)
12201224

12211225
if message["role"] == "assistant" and message["content"] is not None:
12221226
llama.eval(

0 commit comments

Comments
 (0)