import os
import morph
from morph import MorphGlobalContext
from morph_lib.stream import create_chunk
import json
from openai import OpenAI
from morph_lib.database import execute_sql
@morph.funcdefchat_text_to_sql(context: MorphGlobalContext): client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) prompt = context.vars["prompt"] messages =[{"role":"system","content":f"""Please execute SQL queries on a table named ./example.csv in DuckDB with the following schema:
city: text - Names of cities in the United States
state: text - Names of states in the United States
population: int - Population
This table contains aggregated population data for the United States.
"""}] messages.append({"role":"user","content": prompt}) response = client.chat.completions.create( model="gpt-4o", messages=messages, functions=[{"name":"generate_sql","description":"Generate SQL queries based on user prompt","parameters":{"type":"object","properties":{"sql":{"type":"string","description":"The SQL query generated based on the prompt"},},"required":["sql"]}}]) response_json = json.loads(response.choices[0].message.function_call.arguments) sql = response_json['sql']yield create_chunk(f"""
## SQL Query
{sql}""") data = execute_sql(sql,"DUCKDB") data_md = data.to_markdown(index=False) messages =[{"role":"system","content":f"""Please answer in markdown format.
You can use the following data:
{data_md}"""},{"role":"user","content": prompt
}] response = client.chat.completions.create( model="gpt-4o", messages=messages, stream=True,)for chunk in response:yield create_chunk(chunk.choices[0].delta.content)return response
MDX
exportconst title ="Text to SQL app"# Text to SQL app
<LLM postData="chat_text_to_sql"/>