add feature to fomrat note and save top local directory
This commit is contained in:
65
note_chat.py
65
note_chat.py
@@ -1,15 +1,19 @@
|
||||
from dotenv import load_dotenv
|
||||
import os, sys
|
||||
import os, time
|
||||
import json
|
||||
from openai import OpenAI
|
||||
import time
|
||||
import chat_tools
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# initialize then client
|
||||
# initialize parameters
|
||||
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
|
||||
DEEPSEEK_API_URL = os.getenv("DEEPSEEK_API_URL")
|
||||
MODEL = "deepseek-chat"
|
||||
chat_tools = chat_tools.get_chat_tools()
|
||||
history = []
|
||||
|
||||
# Initialize the OpenAI client with the API key and base URL
|
||||
client = OpenAI(
|
||||
api_key=DEEPSEEK_API_KEY,
|
||||
base_url=DEEPSEEK_API_URL,
|
||||
@@ -23,38 +27,63 @@ system_prompt = "You are a helpful assistant. You can convert text notes to mark
|
||||
in Question and Answer format. for example: Create two namespaces and name them ns1 and ns2 \n k create ns ns1 \n \
|
||||
k create ns ns2. where k create ns ns1 and k create ns ns2 are answers. Sometoimees questions can be more than one line, \
|
||||
and each of Q&A are separated by a empty line. Please make question as header and fomrat ansers in correct markdown format. \
|
||||
Please only return the markdown content. "
|
||||
Please write the markdown to user specified file."
|
||||
content = ""
|
||||
with open("k8s_day10.txt", "r") as f:
|
||||
with open("test_data/k8s_day10.txt", "r") as f:
|
||||
for line in f:
|
||||
content += line
|
||||
|
||||
def chat_with_deepseek(max_retries: int =MAX_RETRIES, retry_delay: int =RETRY_DELAY):
|
||||
# Initialize the chat history with the system prompt
|
||||
global content
|
||||
history.append({"role": "system", "content": system_prompt})
|
||||
history.append({"role": "user", "content": f"please covert notes below to markdown format and write to k8s_day12.md: \n {content} "})
|
||||
count = 0
|
||||
for _ in range(max_retries):
|
||||
try:
|
||||
response = client.chat.completions.create(
|
||||
model=MODEL,
|
||||
messages=[
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": f"please covert notes below to markdown format: \n {content} "},
|
||||
],
|
||||
stream=True,
|
||||
messages=history,
|
||||
tools=chat_tools.get_tools(),
|
||||
)
|
||||
for chunk in response:
|
||||
text = chunk.choices[0].delta.content
|
||||
if text:
|
||||
sys.stdout.write(text)
|
||||
sys.stdout.flush()
|
||||
|
||||
if response.choices[0].message.tool_calls:
|
||||
tool_call = response.choices[0].message.tool_calls[0]
|
||||
tool_name = tool_call.function.name
|
||||
tool_args = tool_call.function.arguments
|
||||
tool_id = tool_call.id
|
||||
|
||||
# Call the function with the arguments
|
||||
if tool_name == "write_file":
|
||||
params = json.loads(tool_args)
|
||||
file_path = params["file_path"]
|
||||
content = params["content"]
|
||||
result = chat_tools.util.write_file(file_path, content)
|
||||
history.append(response.choices[0].message)
|
||||
history.append({"role": "tool", "tool_call_id": tool_id, "content": result})
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model=MODEL,
|
||||
messages=history,
|
||||
)
|
||||
return response.choices[0].message.content
|
||||
else:
|
||||
# for chunk in response:
|
||||
# text = chunk.choices[0].delta.content
|
||||
# if text:
|
||||
# sys.stdout.write(text)
|
||||
# sys.stdout.flush()
|
||||
return response.choices[0].message.content
|
||||
|
||||
break
|
||||
except Exception as e:
|
||||
count += 1
|
||||
print(f"An error occurre: {e}.")
|
||||
if count < max_retries:
|
||||
print(f"An error occurre: {e}. Retrying in {retry_delay} seconds...")
|
||||
print(f"Retrying in {retry_delay} seconds...")
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
print("Max retries reached. Exiting.")
|
||||
break
|
||||
raise e
|
||||
|
||||
chat_with_deepseek()
|
||||
print(chat_with_deepseek(max_retries=1))
|
||||
Reference in New Issue
Block a user