forked from i-am-alice/2nd-devs
-
Notifications
You must be signed in to change notification settings - Fork 3
/
C01L04_blogger.py
47 lines (38 loc) · 1.63 KB
/
C01L04_blogger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import sys
sys.path.append(r'..')
from task_handler import get_task_token, get_task_info_from_token, send_answer_by_task_token, apikey
# --------------------------------------------------------------
# Get task data
# --------------------------------------------------------------
task_token = get_task_token(taskname='blogger', apikey=apikey)
task_data = get_task_info_from_token(task_token)
# --------------------------------------------------------------
# Prepare answer
# --------------------------------------------------------------
import openai
outline = task_data['blog']
outline_with_numbers = []
for index, element in enumerate(outline, 1):
outline_with_numbers.append(f"{index}. {element}")
outline_with_numbers
models = ["gpt-4", "gpt-3.5-turbo"]
model = models[1]
# openai.api_key = openai_apikey #Needed if OPENAI_API_KEY has different name
output=[]
for number in range(1, 5):
messages = [
{"role": "system", "content": "Write blog post describing each part of provided outline."},
{"role": "user", "content": f"Here is the outline {outline}"},
{"role": "user", "content": f"Generate section for header number {number}"}
]
response = openai.chat.completions.create(
model=model,
messages=messages)
output.append(response.choices[0].message.content)
# print(json.dumps(json.loads(response.model_dump_json()), indent=4))
print(output)
data = {"answer": output}
# --------------------------------------------------------------
# send answer
# --------------------------------------------------------------
response = send_answer_by_task_token(task_token, data)