42 lines
1.2 KiB
Python
42 lines
1.2 KiB
Python
# advanced_example.py
|
|
from ollama_deepseek_client import OllamaDeepSeekClient
|
|
|
|
def advanced_example():
|
|
client = OllamaDeepSeekClient()
|
|
|
|
# Example 1: Code generation
|
|
code_prompt = "Write a Python function to calculate fibonacci numbers"
|
|
print("Code Generation Example:")
|
|
code_response = client.generate_response(
|
|
code_prompt,
|
|
temperature=0.3, # Lower temperature for more deterministic code
|
|
max_tokens=1000
|
|
)
|
|
print(code_response)
|
|
print("\n" + "="*50 + "\n")
|
|
|
|
# Example 2: Streaming response
|
|
print("Streaming Response Example:")
|
|
stream_response = client.generate_response(
|
|
"Explain quantum computing in simple terms",
|
|
stream=True,
|
|
temperature=0.8
|
|
)
|
|
print("\n" + "="*50 + "\n")
|
|
|
|
# Example 3: Chat with context
|
|
conversation = []
|
|
|
|
messages = [
|
|
"What's the capital of France?",
|
|
"Tell me more about its history",
|
|
"What are the main tourist attractions there?"
|
|
]
|
|
|
|
for msg in messages:
|
|
print(f"You: {msg}")
|
|
response, conversation = client.chat(msg, conversation)
|
|
print(f"Assistant: {response}\n")
|
|
|
|
if __name__ == "__main__":
|
|
advanced_example() |