Skip to content

Commit 9ee3750

Browse files
Python: Add Kelly Intelligence chat completion sample (related to #13694)
Adds a new concept sample showing how to use Semantic Kernel with Kelly Intelligence (https://api.thedailylesson.com), a hosted OpenAI-compatible API with a built-in 162,000-word vocabulary RAG layer and an AI tutor persona, built on top of Claude. Operated by Lesson of the Day, PBC, a public benefit corporation. The sample mirrors the existing `lm_studio_chat_completion.py` and `foundry_local_chatbot.py` patterns exactly: an `AsyncOpenAI` client pointed at a custom `base_url`, wrapped in `OpenAIChatCompletion`, with a `ChatHistory` and chat loop. No new connector, no new abstraction — this is a pure example of the existing OpenAI-compatible pattern applied to a third-party hosted endpoint. The sample uses the free `kelly-haiku` model id and reads `KELLY_API_KEY` from the environment. A free key (no credit card) is available at https://api.thedailylesson.com. Reviewers can also smoke-test the API without any signup using the public `/v1/demo` endpoint, which is rate-limited at 5 requests per hour per IP and uses the same wire format. Related to #13694.
1 parent 134e52e commit 9ee3750

File tree

1 file changed

+121
-0
lines changed

1 file changed

+121
-0
lines changed
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
4+
import asyncio
5+
import os
6+
7+
from openai import AsyncOpenAI
8+
9+
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion
10+
from semantic_kernel.contents.chat_history import ChatHistory
11+
from semantic_kernel.functions.kernel_arguments import KernelArguments
12+
from semantic_kernel.kernel import Kernel
13+
14+
# This concept sample shows how to use the OpenAI connector to create a chat
15+
# experience with Kelly Intelligence (https://api.thedailylesson.com), a hosted
16+
# OpenAI-compatible API with a built-in 162,000-word vocabulary RAG layer and an
17+
# AI tutor persona, built on top of Claude. It is operated by Lesson of the Day,
18+
# PBC, a public benefit corporation. The free tier requires no credit card.
19+
#
20+
# Although this file lives under "local_models", the same `AsyncOpenAI` +
21+
# `base_url` pattern that powers `lm_studio_chat_completion.py` and
22+
# `foundry_local_chatbot.py` works for any OpenAI-compatible endpoint, local or
23+
# hosted. Kelly Intelligence is a hosted instance of that pattern.
24+
#
25+
# Get a free API key (no credit card) at https://api.thedailylesson.com and
26+
# export it before running this sample:
27+
#
28+
# export KELLY_API_KEY=...
29+
# python kelly_intelligence_chat_completion.py
30+
#
31+
# You can also try the API with no signup at all using its public `/v1/demo`
32+
# endpoint, which is rate-limited at 5 requests per hour per IP:
33+
#
34+
# curl -X POST https://api.thedailylesson.com/v1/demo \
35+
# -H "Content-Type: application/json" \
36+
# -d '{"messages":[{"role":"user","content":"What does ephemeral mean?"}]}'
37+
38+
system_message = """
39+
You are Kelly, a friendly vocabulary tutor. When the user gives you a word,
40+
teach it in one short paragraph: definition, a memorable example sentence,
41+
and one related word. Keep the tone warm and encouraging.
42+
"""
43+
44+
kernel = Kernel()
45+
46+
service_id = "kelly-intelligence"
47+
48+
# Kelly Intelligence is OpenAI wire-format compatible, so we point a standard
49+
# AsyncOpenAI client at its `/v1` base URL and let `OpenAIChatCompletion`
50+
# handle the rest. The free model id is `kelly-haiku`; `kelly-sonnet` and
51+
# `kelly-opus` are also available on paid tiers.
52+
openAIClient: AsyncOpenAI = AsyncOpenAI(
53+
api_key=os.environ.get("KELLY_API_KEY", "fake-key"),
54+
base_url="https://api.thedailylesson.com/v1",
55+
)
56+
kernel.add_service(
57+
OpenAIChatCompletion(
58+
service_id=service_id,
59+
ai_model_id="kelly-haiku",
60+
async_client=openAIClient,
61+
)
62+
)
63+
64+
settings = kernel.get_prompt_execution_settings_from_service_id(service_id)
65+
settings.max_tokens = 800
66+
settings.temperature = 0.7
67+
settings.top_p = 0.9
68+
69+
chat_function = kernel.add_function(
70+
plugin_name="VocabularyTutor",
71+
function_name="Chat",
72+
prompt="{{$chat_history}}{{$user_input}}",
73+
template_format="semantic-kernel",
74+
prompt_execution_settings=settings,
75+
)
76+
77+
chat_history = ChatHistory(system_message=system_message)
78+
chat_history.add_user_message("Hi! I'd like to learn some new English words today.")
79+
chat_history.add_assistant_message(
80+
"Wonderful! Give me any word you're curious about and I'll teach it to you in one short paragraph."
81+
)
82+
83+
84+
async def chat() -> bool:
85+
try:
86+
user_input = input("User:> ")
87+
except KeyboardInterrupt:
88+
print("\n\nExiting chat...")
89+
return False
90+
except EOFError:
91+
print("\n\nExiting chat...")
92+
return False
93+
94+
if user_input == "exit":
95+
print("\n\nExiting chat...")
96+
return False
97+
98+
answer = await kernel.invoke(
99+
chat_function,
100+
KernelArguments(user_input=user_input, chat_history=chat_history),
101+
)
102+
chat_history.add_user_message(user_input)
103+
chat_history.add_assistant_message(str(answer))
104+
print(f"Kelly:> {answer}")
105+
return True
106+
107+
108+
async def main() -> None:
109+
chatting = True
110+
while chatting:
111+
chatting = await chat()
112+
113+
# Sample output:
114+
# User:> serendipity
115+
# Kelly:> "Serendipity" is the happy accident of finding something good without looking for it -
116+
# like discovering a favorite cafe while you were just trying to get out of the rain.
117+
# A close cousin is "fortuitous", which describes lucky timing more generally.
118+
119+
120+
if __name__ == "__main__":
121+
asyncio.run(main())

0 commit comments

Comments
 (0)