From 3987a12ec1cb0259639ea435cd523f01e976cc09 Mon Sep 17 00:00:00 2001 From: Leodanis Pozo Ramos Date: Tue, 6 Jan 2026 16:51:04 +0100 Subject: [PATCH 1/2] Sample code for the article on Ollama and Python --- ollama-python-sdk/README.md | 3 ++ ollama-python-sdk/chat.py | 11 +++++++ ollama-python-sdk/chat_context.py | 13 ++++++++ ollama-python-sdk/generate_code.py | 0 ollama-python-sdk/generate_text.py | 8 +++++ ollama-python-sdk/streams.py | 15 +++++++++ ollama-python-sdk/tool_calling.py | 51 ++++++++++++++++++++++++++++++ 7 files changed, 101 insertions(+) create mode 100644 ollama-python-sdk/README.md create mode 100644 ollama-python-sdk/chat.py create mode 100644 ollama-python-sdk/chat_context.py create mode 100644 ollama-python-sdk/generate_code.py create mode 100644 ollama-python-sdk/generate_text.py create mode 100644 ollama-python-sdk/streams.py create mode 100644 ollama-python-sdk/tool_calling.py diff --git a/ollama-python-sdk/README.md b/ollama-python-sdk/README.md new file mode 100644 index 0000000000..4dd7472a90 --- /dev/null +++ b/ollama-python-sdk/README.md @@ -0,0 +1,3 @@ +# How to Integrate Local LLMs With Ollama and Python + +This folder provides the code examples for the Real Python tutorial [How to Integrate Local LLMs With Ollama and Python](https://realpython.com/ollama-python/). diff --git a/ollama-python-sdk/chat.py b/ollama-python-sdk/chat.py new file mode 100644 index 0000000000..0f6fb67d46 --- /dev/null +++ b/ollama-python-sdk/chat.py @@ -0,0 +1,11 @@ +from ollama import chat + +messages = [ + { + "role": "user", + "content": "Explain what Python is in one sentence.", + }, +] + +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) diff --git a/ollama-python-sdk/chat_context.py b/ollama-python-sdk/chat_context.py new file mode 100644 index 0000000000..314c5b1dc0 --- /dev/null +++ b/ollama-python-sdk/chat_context.py @@ -0,0 +1,13 @@ +from ollama import chat + +messages = [ + {"role": "system", "content": "You are an expert Python tutor."}, + {"role": "user", "content": "Define list comprehensions in a sentence."}, +] +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) + +messages.append(response.message) # Keep context +messages.append({"role": "user", "content": "Provide a short, practical example."}) +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) diff --git a/ollama-python-sdk/generate_code.py b/ollama-python-sdk/generate_code.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ollama-python-sdk/generate_text.py b/ollama-python-sdk/generate_text.py new file mode 100644 index 0000000000..6011850322 --- /dev/null +++ b/ollama-python-sdk/generate_text.py @@ -0,0 +1,8 @@ +from ollama import generate + +response = generate( + model="llama3.2:latest", + prompt="Explain what Python is in one sentence.", +) + +print(response.response) diff --git a/ollama-python-sdk/streams.py b/ollama-python-sdk/streams.py new file mode 100644 index 0000000000..c37cd839ae --- /dev/null +++ b/ollama-python-sdk/streams.py @@ -0,0 +1,15 @@ +from ollama import chat + +stream = chat( + model="llama3.2:latest", + messages=[ + { + "role": "user", + "content": "Explain Python dataclasses with a quick example.", + } + ], + stream=True, +) + +for chunk in stream: + print(chunk.message.content, end="", flush=True) diff --git a/ollama-python-sdk/tool_calling.py b/ollama-python-sdk/tool_calling.py new file mode 100644 index 0000000000..7002a81080 --- /dev/null +++ b/ollama-python-sdk/tool_calling.py @@ -0,0 +1,51 @@ +import math + +from ollama import chat + + +# Define a tool as a Python function +def square_root(number: float) -> float: + """Calculate the square root of a number. + + Args: + number: The number to calculate the square root for. + + Returns: + The square root of the number. + """ + return math.sqrt(number) + + +messages = [ + { + "role": "user", + "content": "What is the square root of 36?", + } +] + +response = chat( + model="llama3.2:latest", + messages=messages, + tools=[square_root], # Pass the tools along with the prompt +) + +# Append the response for context +messages.append(response.message) + +if response.message.tool_calls: + tool = response.message.tool_calls[0] + # Call the tool + result = square_root(float(tool.function.arguments["number"])) + + # Append the tool result + messages.append( + { + "role": "tool", + "tool_name": tool.function.name, + "content": str(result), + } + ) + + # Obtain the final answer + final_response = chat(model="llama3.2:latest", messages=messages) + print(final_response.message.content) From 5616673268a24c3e9adfe8aa21e71f5edc02ad81 Mon Sep 17 00:00:00 2001 From: Leodanis Pozo Ramos Date: Tue, 6 Jan 2026 16:53:09 +0100 Subject: [PATCH 2/2] Fix linter issues --- ollama-python-sdk/chat_context.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/ollama-python-sdk/chat_context.py b/ollama-python-sdk/chat_context.py index 314c5b1dc0..f426d37ed2 100644 --- a/ollama-python-sdk/chat_context.py +++ b/ollama-python-sdk/chat_context.py @@ -1,13 +1,24 @@ from ollama import chat messages = [ - {"role": "system", "content": "You are an expert Python tutor."}, - {"role": "user", "content": "Define list comprehensions in a sentence."}, + { + "role": "system", + "content": "You are an expert Python tutor.", + }, + { + "role": "user", + "content": "Define list comprehensions in a sentence.", + }, ] response = chat(model="llama3.2:latest", messages=messages) print(response.message.content) messages.append(response.message) # Keep context -messages.append({"role": "user", "content": "Provide a short, practical example."}) +messages.append( + { + "role": "user", + "content": "Provide a short, practical example.", + } +) response = chat(model="llama3.2:latest", messages=messages) print(response.message.content)