Last active
July 26, 2024 04:01
-
-
Save ausboss/3130993498e129d00e0836c71157dd17 to your computer and use it in GitHub Desktop.
ollama_math_expression.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import json | |
| import ollama | |
| import asyncio | |
| import sympy as sp | |
| def evaluate_math_expression(expression: str) -> str: | |
| try: | |
| result = sp.sympify(expression).evalf() | |
| return json.dumps({"result": str(result)}) | |
| except Exception as e: | |
| return json.dumps({"error": str(e)}) | |
| async def process_message(model: str, user_input: str): | |
| client = ollama.AsyncClient() | |
| messages = [ | |
| {'role': 'system', 'content': 'You have access to a calculator tool. Use the tool to answer the question from the user. The final response should be brief with little preamble.'}, | |
| {'role': 'user', 'content': user_input} | |
| ] | |
| response = await client.chat( | |
| model=model, | |
| messages=messages, | |
| tools=[ | |
| { | |
| 'type': 'function', | |
| 'function': { | |
| 'name': 'evaluate_math_expression', | |
| 'description': 'Evaluate a mathematical expression', | |
| 'parameters': { | |
| 'type': 'object', | |
| 'properties': { | |
| 'expression': { | |
| 'type': 'string', | |
| 'description': 'The mathematical expression to evaluate', | |
| }, | |
| }, | |
| 'required': ['expression'], | |
| }, | |
| }, | |
| }, | |
| ], | |
| ) | |
| messages.append(response['message']) | |
| if response['message'].get('tool_calls'): | |
| for tool in response['message']['tool_calls']: | |
| expression = tool['function']['arguments']['expression'] | |
| print(f">>> <calling tool calculator ({{'expression': '{expression}'}})>") | |
| function_response = evaluate_math_expression(expression) | |
| messages.append( | |
| { | |
| 'role': 'tool', | |
| 'content': function_response, | |
| } | |
| ) | |
| final_response = await client.chat(model=model, messages=messages) | |
| return final_response['message']['content'] | |
| async def main(): | |
| model = 'llama3.1' # You can change this to any other model you prefer | |
| while True: | |
| user_input = input("Enter a message (or 'quit' to exit): ") | |
| if user_input.lower() == 'quit': | |
| break | |
| result = await process_message(model, user_input) | |
| print(result) | |
| print() # Add a blank line for better readability | |
| if __name__ == "__main__": | |
| asyncio.run(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment