diff --git a/ollama-python-sdk/README.md b/ollama-python-sdk/README.md new file mode 100644 index 0000000000..4dd7472a90 --- /dev/null +++ b/ollama-python-sdk/README.md @@ -0,0 +1,3 @@ +# How to Integrate Local LLMs With Ollama and Python + +This folder provides the code examples for the Real Python tutorial [How to Integrate Local LLMs With Ollama and Python](https://realpython.com/ollama-python/). diff --git a/ollama-python-sdk/chat.py b/ollama-python-sdk/chat.py new file mode 100644 index 0000000000..0f6fb67d46 --- /dev/null +++ b/ollama-python-sdk/chat.py @@ -0,0 +1,11 @@ +from ollama import chat + +messages = [ + { + "role": "user", + "content": "Explain what Python is in one sentence.", + }, +] + +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) diff --git a/ollama-python-sdk/chat_context.py b/ollama-python-sdk/chat_context.py new file mode 100644 index 0000000000..f426d37ed2 --- /dev/null +++ b/ollama-python-sdk/chat_context.py @@ -0,0 +1,24 @@ +from ollama import chat + +messages = [ + { + "role": "system", + "content": "You are an expert Python tutor.", + }, + { + "role": "user", + "content": "Define list comprehensions in a sentence.", + }, +] +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) + +messages.append(response.message) # Keep context +messages.append( + { + "role": "user", + "content": "Provide a short, practical example.", + } +) +response = chat(model="llama3.2:latest", messages=messages) +print(response.message.content) diff --git a/ollama-python-sdk/generate_code.py b/ollama-python-sdk/generate_code.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ollama-python-sdk/generate_text.py b/ollama-python-sdk/generate_text.py new file mode 100644 index 0000000000..6011850322 --- /dev/null +++ b/ollama-python-sdk/generate_text.py @@ -0,0 +1,8 @@ +from ollama import generate + +response = generate( + model="llama3.2:latest", + prompt="Explain what Python is in one sentence.", +) + +print(response.response) diff --git a/ollama-python-sdk/streams.py b/ollama-python-sdk/streams.py new file mode 100644 index 0000000000..c37cd839ae --- /dev/null +++ b/ollama-python-sdk/streams.py @@ -0,0 +1,15 @@ +from ollama import chat + +stream = chat( + model="llama3.2:latest", + messages=[ + { + "role": "user", + "content": "Explain Python dataclasses with a quick example.", + } + ], + stream=True, +) + +for chunk in stream: + print(chunk.message.content, end="", flush=True) diff --git a/ollama-python-sdk/tool_calling.py b/ollama-python-sdk/tool_calling.py new file mode 100644 index 0000000000..7002a81080 --- /dev/null +++ b/ollama-python-sdk/tool_calling.py @@ -0,0 +1,51 @@ +import math + +from ollama import chat + + +# Define a tool as a Python function +def square_root(number: float) -> float: + """Calculate the square root of a number. + + Args: + number: The number to calculate the square root for. + + Returns: + The square root of the number. + """ + return math.sqrt(number) + + +messages = [ + { + "role": "user", + "content": "What is the square root of 36?", + } +] + +response = chat( + model="llama3.2:latest", + messages=messages, + tools=[square_root], # Pass the tools along with the prompt +) + +# Append the response for context +messages.append(response.message) + +if response.message.tool_calls: + tool = response.message.tool_calls[0] + # Call the tool + result = square_root(float(tool.function.arguments["number"])) + + # Append the tool result + messages.append( + { + "role": "tool", + "tool_name": tool.function.name, + "content": str(result), + } + ) + + # Obtain the final answer + final_response = chat(model="llama3.2:latest", messages=messages) + print(final_response.message.content)