Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ __pycache__/
*.py[cod]
*$py.class

# .secrets
.streamlit/secrets.toml
# C extensions
*.so

Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ altair<5
langchain
openai
tiktoken
python-dotenv
python-dotenv
trubrics
141 changes: 87 additions & 54 deletions src/prompt_based/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import streamlit as st
from model import LogicModel, StreamlitModel
from trubrics.integrations.streamlit import FeedbackCollector

try:
from dotenv import load_dotenv
Expand All @@ -14,9 +15,14 @@
except Exception as e:
logging.error("dotenv import error but no needed")


def main():

if "success" not in st.session_state:
st.session_state["success"] = False

if "streamlit_code" not in st.session_state:
st.session_state["streamlit_code"] = ""

num_of_iterations = 10

def generate_response(txt):
Expand All @@ -39,6 +45,9 @@ def generate_response(txt):
st.title(title)
# Text input

email = st.secrets.get("TRUBRICS_EMAIL")
password = st.secrets.get("TRUBRICS_PASSWORD")

openai_api_key = st.sidebar.text_input(
"OpenAI API Key",
placeholder="sk-...",
Expand Down Expand Up @@ -79,62 +88,86 @@ def generate_response(txt):

final_code_empty = st.empty()

with st.form("a", clear_on_submit=True):
submitted = st.form_submit_button("Submit")
for col, example in zip(cols, examples):
if col.button(example):
example_submitted = True
demo_idea = empty_idea.text_area(
"Enter your LLM-based demo idea", example, height=100
)
logging.info(f"Demo Idea:{demo_idea}")
model_name = ""
submitted = st.button("Submit")
for col, example in zip(cols, examples):
if col.button(example):
example_submitted = True
demo_idea = empty_idea.text_area(
"Enter your LLM-based demo idea", example, height=100
)
logging.info(f"Demo Idea:{demo_idea}")

if submitted or example_submitted:

if not openai_api_key.startswith("sk-"):
st.warning("Please enter your OpenAI API Key!", icon="⚠️")
else:
agent = LogicModel(openai_api_key=openai_api_key)
streamlit_agent = StreamlitModel(openai_api_key=openai_api_key)
model_name = streamlit_agent.llm.model_name

if st.session_state["pid"] != -1:
logging.info(f"Terminating the previous applicaton ...")
os.kill(st.session_state["pid"], signal.SIGTERM)
st.session_state["pid"] = -1

bar = st.progress(25, PROGRESS_BAR_TEXTS["start"])
for data in generate_response(demo_idea):
response = data["out"]
error = data["error"]
code = data["code"]
test_code = data["test_code"]
success = data["success"]
percentage = data["percentage"]

if success:
st.session_state["success"] = True
bar.progress(75, text=PROGRESS_BAR_TEXTS["creating"])
example_submitted = False
st.session_state["pid"], streamlit_code = streamlit_agent(
demo_idea,
demo_title,
code,
test_code,
bar.progress,
st.balloons,
)
st.session_state["streamlit_code"] = streamlit_code

sleep(5)
webbrowser.open("http://localhost:8502")

if submitted or example_submitted:

if not openai_api_key.startswith("sk-"):
st.warning("Please enter your OpenAI API Key!", icon="⚠️")
else:
agent = LogicModel(openai_api_key=openai_api_key)
streamlit_agent = StreamlitModel(openai_api_key=openai_api_key)

if st.session_state["pid"] != -1:
logging.info(f"Terminating the previous applicaton ...")
os.kill(st.session_state["pid"], signal.SIGTERM)
st.session_state["pid"] = -1

bar = st.progress(25, PROGRESS_BAR_TEXTS["start"])
for data in generate_response(demo_idea):
response = data["out"]
error = data["error"]
code = data["code"]
test_code = data["test_code"]
success = data["success"]
percentage = data["percentage"]

if success:
bar.progress(75, text=PROGRESS_BAR_TEXTS["creating"])
example_submitted = False
st.session_state["pid"], streamlit_code = streamlit_agent(
demo_idea,
demo_title,
code,
test_code,
bar.progress,
st.balloons,
)
with st.expander("Code"):
st.code(streamlit_code)
sleep(5)
webbrowser.open("http://localhost:8502")

else:
bar.progress(50, text=PROGRESS_BAR_TEXTS["refining"])

if success:
break
else:
bar.progress(100, text=PROGRESS_BAR_TEXTS["failed"])
bar.progress(50, text=PROGRESS_BAR_TEXTS["refining"])

if st.session_state["success"]:
break
else:
bar.progress(100, text=PROGRESS_BAR_TEXTS["failed"])

if st.session_state["success"]:
print(st.session_state['streamlit_code'])
with st.expander("Code"):
st.code(st.session_state["streamlit_code"])

collector = FeedbackCollector(
component_name="default",
email=email,
password=password,
)

feedback_main_code = collector.st_feedback(
feedback_type="faces",
model=model_name,
open_feedback_label="[Optional] Provide additional feedback",
metadata={
"response": st.session_state["streamlit_code"],
"demo_title": demo_title,
"demo_idea": demo_idea,
},
tags=["main_code"],
)

if __name__ == "__main__":
main()
30 changes: 28 additions & 2 deletions src/prompt_based/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(self, openai_api_key):
"""
self.openai_api_key = openai_api_key
self.llm = ChatOpenAI(openai_api_key=openai_api_key, temperature=0.0)

def refine_code(self, code):
"""
Refines the provided code by removing unnecessary parts.
Expand Down Expand Up @@ -284,7 +284,33 @@ def run_code(self, code):
tmp = tempfile.NamedTemporaryFile(
"w", suffix=".py", delete=False, encoding="utf-8"
)
tmp.write(self.normalize(code))

trubrics_feedback_code = """
from trubrics.integrations.streamlit import FeedbackCollector

email = st.secrets.get("TRUBRICS_EMAIL")
password = st.secrets.get("TRUBRICS_PASSWORD")

model_name = "{model_name}"
collector = FeedbackCollector(
component_name="default",
email=email,
password=password,
)

feedback_generated_code = collector.st_feedback(
feedback_type="thumbs",
model=model_name,
open_feedback_label="[Optional] Provide additional feedback",
metadata={{"response": 'result', "prompt": 'prompt'}},
tags=["generated_code"],

)
"""
trubrics_feedback_code = trubrics_feedback_code.format(model_name=self.llm.model_name)

tmp.write(self.normalize(code + trubrics_feedback_code))

tmp.flush()
environmental_variables = {
"OPENAI_API_KEY": self.openai_api_key,
Expand Down