Skip to content

Instantly share code, notes, and snippets.

@emptyflask
Last active July 17, 2024 15:02
Show Gist options
  • Save emptyflask/5aaaa6a216850d9c24fa4a3688868235 to your computer and use it in GitHub Desktop.
Save emptyflask/5aaaa6a216850d9c24fa4a3688868235 to your computer and use it in GitHub Desktop.
convert slim templates to haml using openai
# frozen_string_literal: true
require 'openai'
require 'fileutils'
require 'haml'
# Initialize OpenAI client
openai_client = OpenAI::Client.new(access_token: ENV.fetch('OPENAI_ACCESS_TOKEN'), log_errors: true)
# Root directory containing Slim templates
root_directory = 'app/views'
# Function to convert Slim to HAML using ChatGPT
def convert_slim_to_haml(openai_client, slim_content, error_message = nil)
prompt = "Convert the following Slim template to HAML. Only provide the HAML code, without any explanations or extra text:\n\n#{slim_content}"
prompt += "\n\nThe following error occurred during the conversion: #{error_message}" if error_message
response = openai_client.chat(
parameters: {
model: 'gpt-4',
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: prompt }
],
response_format: { type: 'text' }
}
)
haml_content = response.dig('choices', 0, 'message', 'content').strip
haml_content.sub(/^```(haml)?\n/, '').sub("\n```", '')
end
def valid_haml?(haml_content)
Haml::Parser.new(Haml::Engine.options.to_h).call(haml_content)
true
rescue Haml::Error => e
puts "HAML validation error: #{e.message}"
false
end
# Function to process a directory recursively
def process_directory(openai_client, directory)
Dir.glob(File.join(directory, '**', '*.slim')).each do |slim_file|
# Read the Slim template
slim_template = File.read(slim_file)
# First attempt to convert the Slim template to HAML using ChatGPT
haml_content = convert_slim_to_haml(openai_client, slim_template)
unless valid_haml?(haml_content)
# Second attempt with error message if the first attempt fails
puts "First attempt failed, trying again with error message"
haml_content = convert_slim_to_haml(openai_client, slim_template, "Initial conversion produced invalid HAML.")
end
if valid_haml?(haml_content)
# Define the HAML file path (same location, different extension)
haml_file = slim_file.sub(/\.slim$/, '.haml')
# Write the HAML template to the new file
File.write(haml_file, haml_content)
FileUtils.rm(slim_file)
puts "Converted #{slim_file} to #{haml_file}"
else
puts "Skipping invalid HAML content for #{slim_file}"
end
end
end
# Process the root directory recursively
process_directory(openai_client, root_directory)
puts "Conversion completed!"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment