Skip to content

Instantly share code, notes, and snippets.

@seanchen1991
Last active December 2, 2025 15:35
Show Gist options
  • Select an option

  • Save seanchen1991/9a69ec45eb21615481c0f76cea60795b to your computer and use it in GitHub Desktop.

Select an option

Save seanchen1991/9a69ec45eb21615481c0f76cea60795b to your computer and use it in GitHub Desktop.
Demystifying AI Agents Presentation Code
from openai import OpenAI
client = OpenAI()
context = []
import subprocess
import json
tools = [{
"type": "function", "name": "ping",
"description": "ping some host on the internet",
"parameters": {
"type": "object", "properties": {
"host": {
"type": "string", "description": "hostname or IP",
},
},
"required": ["host"],
},
}]
def ping(host=""):
try:
result = subprocess.run(
["ping", "-c", "5", host],
text=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
return result.stdout
except Exception as e:
return f"error: {e}"
def tool_call(item):
result = ping(**json.loads(item.arguments))
return [item, {
"type": "function_call_output",
"call_id": item.call_id,
"output": result
}]
def call_llm(tools):
return client.responses.create(model="gpt-5", tools=tools, input=context)
def handle_tools(response):
if response.output[0].type == "reasoning":
context.append(response.output[0])
context_size = len(context)
for item in response.output:
if item.type == "function_call":
context.extend(tool_call(item))
return len(context) != context_size
def process(line):
context.append({"role": "user", "content": line})
response = call_llm(tools)
while handle_tools(response):
response = call_llm(tools)
context.append({"role": "assistant", "content": response.output_text})
return response.output_text
def main():
while True:
line = input("> ")
result = process(line)
print(f">>> {result}\n")
if __name__ == "__main__":
main()
from openai import OpenAI
client = OpenAI()
context = []
def call_llm():
return client.responses.create(model="gpt-5", input=context)
def process(line):
context.append({"role": "user", "content": line})
response = call_llm()
context.append({"role": "assistant", "content": response.output_text})
return response.output_text
def main():
while True:
line = input("> ")
result = process(line)
print(f">>> {result}\n")
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment