Skip to content

Instantly share code, notes, and snippets.

@mosajjal
Last active October 4, 2025 10:55
Show Gist options
  • Select an option

  • Save mosajjal/66a6ebb392f6ab573e90d65bf2256d37 to your computer and use it in GitHub Desktop.

Select an option

Save mosajjal/66a6ebb392f6ab573e90d65bf2256d37 to your computer and use it in GitHub Desktop.
Sed but with AI
// # AI Sed-like Tool
// A Go script that behaves like sed but uses AI models to edit files. It can read from stdin or edit files in-place with the `-i` flag. The tool supports splitting files into sections using delimiters and can work with multiple AI providers.
// ## Features
// - Edit files in-place with the `-i` flag
// - Read from stdin for pipe-based operations
// - Support for multiple AI providers (OpenAI, Anthropic, OpenRouter, Vertex AI)
// - Delimiter-based file sectioning for processing large files
// - Configurable models for each provider
// ## Installation
// ```bash
// cd aiscript
// go build -o aiscript .
// ```
// ## Usage
// ```bash
// # Basic usage - edit file and output to stdout
// ./aiscript -p "Your prompt here" input.txt
// # Edit file in-place
// ./aiscript -i -p "Your prompt here" input.txt
// # Use with stdin and pipes
// cat input.txt | ./aiscript -p "Your prompt here"
// # Specify API provider (openai, anthropic, openrouter)
// ./aiscript -api anthropic -p "Your prompt here" -key YOUR_API_KEY input.txt
// # Use specific model
// ./aiscript -api openai -m gpt-4 -p "Your prompt here" -key YOUR_API_KEY input.txt
// # Use delimiters to process sections separately
// ./aiscript -d "=== SECTION ===" -p "Your prompt here" -key YOUR_API_KEY input.txt
// # Use Vertex AI with Google Cloud
// ./aiscript -api vertex -project your-project-id -p "Your prompt here" input.txt
// ```
// ## Options
// - `-i`: Edit file in-place
// - `-p`: Prompt to send to the AI model (required)
// - `-api`: API provider (openai, anthropic, openrouter, vertex) - defaults to openai
// - `-m`: Model to use (optional, defaults to provider's default)
// - `-d`: Delimiter to split file into sections (optional)
// - `-key`: API key for the chosen provider (not needed for vertex)
// - `-project`: Google Cloud Project ID (required for Vertex AI)
// - `-location`: Google Cloud Location (optional for Vertex AI, defaults to us-central1)
// ## Supported AI Providers
// 1. **OpenAI**: Uses the OpenAI API (default provider)
// 2. **Anthropic**: Uses the Anthropic Claude API
// 3. **OpenRouter**: Uses the OpenRouter API
// 4. **Vertex AI**: Uses Google Cloud Vertex AI with Gemini models
// ## Examples
// ```bash
// # Format code in a file
// ./aiscript -i -p "Format this code according to best practices" -api openai -key sk-... myfile.py
// # Process sections separately
// ./aiscript -d "###" -p "Improve the clarity of this text" -api anthropic -key your-key file.md
// # Process stdin input
// echo "hello world" | ./aiscript -p "Convert to title case"
// # Use Vertex AI with Gemini
// ./aiscript -i -p "Format this code" -api vertex -project your-project-id myfile.py
// ```
// ## Requirements
// - API key for your chosen provider
// ## How It Works
// 1. The tool reads the input file or stdin
// 2. If a delimiter is specified, it splits the content into sections
// 3. For each section (or the entire content if no delimiter), it sends the content along with the provided prompt to the selected AI provider
// 4. The AI processes the content according to the prompt
// 5. The tool combines the processed sections (if delimiters were used) and outputs the result
// 6. If the `-i` flag is used, it writes the result back to the input file
// The tool maintains delimiters in the output when using the delimiter option, ensuring the structure of the file is preserved while only the content within sections is modified by the AI.
package main
import (
"context"
"encoding/json"
"flag"
"fmt"
"io"
"net/http"
"os"
"strings"
"os/exec"
)
// OpenAIClient represents an OpenAI API client
type OpenAIClient struct {
apiKey string
model string
}
// NewOpenAIClient creates a new OpenAI client
func NewOpenAIClient(apiKey, model string) *OpenAIClient {
if model == "" {
model = "gpt-3.5-turbo"
}
return &OpenAIClient{
apiKey: apiKey,
model: model,
}
}
// Process sends the prompt and content to OpenAI API
func (c *OpenAIClient) Process(ctx context.Context, prompt, content string) (string, error) {
if c.apiKey == "" {
return "", fmt.Errorf("OpenAI API key is required")
}
// Create the request payload
requestBody := map[string]interface{}{
"model": c.model,
"messages": []interface{}{
map[string]string{
"role": "system",
"content": "You are an expert code editor. Follow the user's instructions precisely to edit the provided content. Return only the edited content without any additional explanation or formatting markers.",
},
map[string]string{
"role": "user",
"content": fmt.Sprintf("%s\n\nContent to edit:\n%s", prompt, content),
},
},
"temperature": 0.1,
}
jsonData, err := json.Marshal(requestBody)
if err != nil {
return "", err
}
// Make the HTTP request
req, err := http.NewRequestWithContext(ctx, "POST", "https://api.openai.com/v1/chat/completions", strings.NewReader(string(jsonData)))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+c.apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
}
// Parse the response
var response struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
if err := json.Unmarshal(body, &response); err != nil {
return "", err
}
if len(response.Choices) == 0 {
return "", fmt.Errorf("no choices returned from API")
}
return response.Choices[0].Message.Content, nil
}
// AnthropicClient represents an Anthropic API client
type AnthropicClient struct {
apiKey string
model string
}
// NewAnthropicClient creates a new Anthropic client
func NewAnthropicClient(apiKey, model string) *AnthropicClient {
if model == "" {
model = "claude-3-sonnet-20240229"
}
return &AnthropicClient{
apiKey: apiKey,
model: model,
}
}
// Process sends the prompt and content to Anthropic API
func (c *AnthropicClient) Process(ctx context.Context, prompt, content string) (string, error) {
if c.apiKey == "" {
return "", fmt.Errorf("Anthropic API key is required")
}
// Create the request payload for Anthropic Claude
requestBody := map[string]interface{}{
"model": c.model,
"messages": []interface{}{
map[string]string{
"role": "user",
"content": fmt.Sprintf("%s\n\nContent to edit:\n%s", prompt, content),
},
},
"max_tokens": 4096,
"temperature": 0.1,
}
jsonData, err := json.Marshal(requestBody)
if err != nil {
return "", err
}
// Make the HTTP request to Anthropic
req, err := http.NewRequestWithContext(ctx, "POST", "https://api.anthropic.com/v1/messages", strings.NewReader(string(jsonData)))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("x-api-key", c.apiKey)
req.Header.Set("anthropic-version", "2023-06-01")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
}
// Parse the response
var response struct {
Content []struct {
Text string `json:"text"`
} `json:"content"`
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
if err := json.Unmarshal(body, &response); err != nil {
return "", err
}
if len(response.Content) == 0 {
return "", fmt.Errorf("no content returned from API")
}
return response.Content[0].Text, nil
}
// OpenRouterClient represents an OpenRouter API client
type OpenRouterClient struct {
apiKey string
model string
}
// NewOpenRouterClient creates a new OpenRouter client
func NewOpenRouterClient(apiKey, model string) *OpenRouterClient {
if model == "" {
model = "openai/gpt-3.5-turbo"
}
return &OpenRouterClient{
apiKey: apiKey,
model: model,
}
}
// Process sends the prompt and content to OpenRouter API
func (c *OpenRouterClient) Process(ctx context.Context, prompt, content string) (string, error) {
if c.apiKey == "" {
return "", fmt.Errorf("OpenRouter API key is required")
}
// Create the request payload for OpenRouter
requestBody := map[string]interface{}{
"model": c.model,
"messages": []interface{}{
map[string]string{
"role": "system",
"content": "You are an expert code editor. Follow the user's instructions precisely to edit the provided content. Return only the edited content without any additional explanation or formatting markers.",
},
map[string]string{
"role": "user",
"content": fmt.Sprintf("%s\n\nContent to edit:\n%s", prompt, content),
},
},
"temperature": 0.1,
}
jsonData, err := json.Marshal(requestBody)
if err != nil {
return "", err
}
// Make the HTTP request to OpenRouter
req, err := http.NewRequestWithContext(ctx, "POST", "https://openrouter.ai/api/v1/chat/completions", strings.NewReader(string(jsonData)))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+c.apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
}
// Parse the response
var response struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
if err := json.Unmarshal(body, &response); err != nil {
return "", err
}
if len(response.Choices) == 0 {
return "", fmt.Errorf("no choices returned from API")
}
return response.Choices[0].Message.Content, nil
}
// VertexAIClient represents a Google Cloud Vertex AI client for Gemini
type VertexAIClient struct {
projectID string
location string
model string
}
// NewVertexAIClient creates a new Vertex AI client
func NewVertexAIClient(projectID, location, model string) *VertexAIClient {
if model == "" {
model = "gemini-pro" // Default Gemini model
}
if location == "" {
location = "us-central1" // Default region
}
return &VertexAIClient{
projectID: projectID,
location: location,
model: model,
}
}
// Process sends the prompt and content to Vertex AI Gemini API
func (c *VertexAIClient) Process(ctx context.Context, prompt, content string) (string, error) {
if c.projectID == "" {
return "", fmt.Errorf("Vertex AI project ID is required")
}
// Create the request payload for Vertex AI
requestBody := map[string]interface{}{
"contents": []map[string]interface{}{
{
"role": "user",
"parts": []map[string]string{
{
"text": fmt.Sprintf("%s\n\nContent to edit:\n%s", prompt, content),
},
},
},
},
"generationConfig": map[string]interface{}{
"temperature": 0.1,
},
"safetySettings": []map[string]interface{}{
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_NONE",
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_NONE",
},
},
}
jsonData, err := json.Marshal(requestBody)
if err != nil {
return "", err
}
// Construct the Vertex AI API endpoint
endpoint := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:generateContent",
c.location, c.projectID, c.location, c.model)
// Make the HTTP request to Vertex AI
req, err := http.NewRequestWithContext(ctx, "POST", endpoint, strings.NewReader(string(jsonData)))
if err != nil {
return "", err
}
// For Vertex AI, we'll use Google Cloud authentication via Application Default Credentials (ADC)
// In a real implementation, we'd use the golang.org/x/oauth2/google package and google.JWTConfigFromJSON
// For now, we'll try to get the access token using gcloud command or environment variables
// First check if we have access to application default credentials
token, err := getGoogleAccessToken()
if err != nil {
return "", fmt.Errorf("could not get Google access token: %v", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+token)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
}
// Parse the response
var response struct {
Candidates []struct {
Content struct {
Parts []struct {
Text string `json:"text"`
} `json:"parts"`
} `json:"content"`
} `json:"candidates"`
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
if err := json.Unmarshal(body, &response); err != nil {
return "", err
}
if len(response.Candidates) == 0 || len(response.Candidates[0].Content.Parts) == 0 {
return "", fmt.Errorf("no content returned from API")
}
return response.Candidates[0].Content.Parts[0].Text, nil
}
// getGoogleAccessToken retrieves a Google access token using gcloud or environment variables
func getGoogleAccessToken() (string, error) {
// First, try to get the access token from environment or gcloud
accessToken := os.Getenv("GOOGLE_ACCESS_TOKEN")
if accessToken != "" {
return accessToken, nil
}
// If not available via environment, try to get it via gcloud command
// Note: This requires gcloud to be installed and authenticated
cmd := exec.Command("gcloud", "auth", "print-access-token")
output, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("failed to get access token from gcloud: %v", err)
}
// Trim newline characters from the token
token := strings.TrimSpace(string(output))
return token, nil
}
// Config holds the configuration from command line arguments
type Config struct {
InPlace bool
Prompt string
APIProvider string
Model string
Delimiter string
APIKey string
ProjectID string // For Vertex AI
Location string // For Vertex AI
File string
Input string
}
// API interface for different providers
type API interface {
Process(ctx context.Context, prompt, content string) (string, error)
}
func main() {
var config Config
// Define command line flags
flag.BoolVar(&config.InPlace, "i", false, "Edit file in-place")
flag.StringVar(&config.Prompt, "p", "", "Prompt to send to the AI model (required)")
flag.StringVar(&config.APIProvider, "api", "openai", "API provider: openai, anthropic, openrouter, vertex")
flag.StringVar(&config.Model, "m", "", "Model to use (optional, defaults to provider's default)")
flag.StringVar(&config.Delimiter, "d", "", "Delimiter to split file into sections (optional)")
flag.StringVar(&config.APIKey, "key", "", "API key for the chosen provider (not needed for vertex)")
flag.StringVar(&config.ProjectID, "project", "", "Google Cloud Project ID (for Vertex AI)")
flag.StringVar(&config.Location, "location", "us-central1", "Google Cloud Location (for Vertex AI, default: us-central1)")
flag.Parse()
// The non-flag argument is the filename
if flag.NArg() > 0 {
config.File = flag.Arg(0)
}
// Validate required arguments
if config.Prompt == "" {
fmt.Fprintln(os.Stderr, "Error: prompt is required (-p flag)")
flag.Usage()
os.Exit(1)
}
// Determine input source
var input string
if config.File != "" {
content, err := os.ReadFile(config.File)
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading file %s: %v\n", config.File, err)
os.Exit(1)
}
input = string(content)
} else {
// Read from stdin
stdin, err := io.ReadAll(os.Stdin)
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading from stdin: %v\n", err)
os.Exit(1)
}
input = string(stdin)
}
config.Input = input
// Process with AI
result, err := processWithAI(config)
if err != nil {
fmt.Fprintf(os.Stderr, "Error processing with AI: %v\n", err)
os.Exit(1)
}
// Output result
if config.InPlace && config.File != "" {
// Write back to the same file
err = os.WriteFile(config.File, []byte(result), 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "Error writing to file %s: %v\n", config.File, err)
os.Exit(1)
}
} else {
// Write to stdout
fmt.Print(result)
}
}
func processWithAI(config Config) (string, error) {
// Initialize the appropriate API client based on provider
var apiClient API
switch strings.ToLower(config.APIProvider) {
case "openai":
apiClient = NewOpenAIClient(config.APIKey, config.Model)
case "anthropic":
apiClient = NewAnthropicClient(config.APIKey, config.Model)
case "openrouter":
apiClient = NewOpenRouterClient(config.APIKey, config.Model)
case "vertex":
apiClient = NewVertexAIClient(config.ProjectID, config.Location, config.Model)
default:
return "", fmt.Errorf("unsupported API provider: %s", config.APIProvider)
}
// If no delimiter is specified, process the entire input as one chunk
if config.Delimiter == "" {
return apiClient.Process(context.Background(), config.Prompt, config.Input)
}
// Split the input by delimiter and process each section
sections := strings.Split(config.Input, config.Delimiter)
var result strings.Builder
for i, section := range sections {
processedSection, err := apiClient.Process(context.Background(), config.Prompt, section)
if err != nil {
return "", fmt.Errorf("error processing section %d: %w", i, err)
}
result.WriteString(processedSection)
// Add delimiter back if it's not the last section
if i < len(sections)-1 {
result.WriteString(config.Delimiter)
}
}
return result.String(), nil
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment