forked from OpenBMB/ChatDev
-
Notifications
You must be signed in to change notification settings - Fork 0
/
run.py
140 lines (113 loc) · 5.21 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
import argparse
import logging
import os
import sys
from camel.typing import ModelType
root = os.path.dirname(__file__)
sys.path.append(root)
from chatdev.chat_chain import ChatChain
try:
from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_message import FunctionCall
openai_new_api = True # new openai api version
except ImportError:
openai_new_api = False # old openai api version
print(
"Warning: Your OpenAI version is outdated. \n "
"Please update as specified in requirement.txt. \n "
"The old API interface is deprecated and will no longer be supported.")
def get_config(company):
"""
return configuration json files for ChatChain
user can customize only parts of configuration json files, other files will be left for default
Args:
company: customized configuration name under CompanyConfig/
Returns:
path to three configuration jsons: [config_path, config_phase_path, config_role_path]
"""
config_dir = os.path.join(root, "CompanyConfig", company)
default_config_dir = os.path.join(root, "CompanyConfig", "Default")
config_files = [
"ChatChainConfig.json",
"PhaseConfig.json",
"RoleConfig.json"
]
config_paths = []
for config_file in config_files:
company_config_path = os.path.join(config_dir, config_file)
default_config_path = os.path.join(default_config_dir, config_file)
if os.path.exists(company_config_path):
config_paths.append(company_config_path)
else:
config_paths.append(default_config_path)
return tuple(config_paths)
parser = argparse.ArgumentParser(description='argparse')
parser.add_argument('--config', type=str, default="Default",
help="Name of config, which is used to load configuration under CompanyConfig/")
parser.add_argument('--org', type=str, default="DefaultOrganization",
help="Name of organization, your software will be generated in WareHouse/name_org_timestamp")
parser.add_argument('--task', type=str, default="Develop a basic Gomoku game.",
help="Prompt of software")
parser.add_argument('--name', type=str, default="Gomoku",
help="Name of software, your software will be generated in WareHouse/name_org_timestamp")
parser.add_argument('--model', type=str, default="GPT_3_5_TURBO",
help="GPT Model, choose from {'GPT_3_5_TURBO','GPT_4','GPT_4_32K', 'GPT_4_TURBO'}")
parser.add_argument('--path', type=str, default="",
help="Your file directory, ChatDev will build upon your software in the Incremental mode")
args = parser.parse_args()
# Start ChatDev
# ----------------------------------------
# Init ChatChain
# ----------------------------------------
config_path, config_phase_path, config_role_path = get_config(args.config)
args2type = {'GPT_3_5_TURBO': ModelType.GPT_3_5_TURBO,
'GPT_4': ModelType.GPT_4,
'GPT_4_32K': ModelType.GPT_4_32k,
'GPT_4_TURBO': ModelType.GPT_4_TURBO,
'GPT_4_TURBO_V': ModelType.GPT_4_TURBO_V
}
if openai_new_api:
args2type['GPT_3_5_TURBO'] = ModelType.GPT_3_5_TURBO_NEW
chat_chain = ChatChain(config_path=config_path,
config_phase_path=config_phase_path,
config_role_path=config_role_path,
task_prompt=args.task,
project_name=args.name,
org_name=args.org,
model_type=args2type[args.model],
code_path=args.path)
# ----------------------------------------
# Init Log
# ----------------------------------------
logging.basicConfig(filename=chat_chain.log_filepath, level=logging.INFO,
format='[%(asctime)s %(levelname)s] %(message)s',
datefmt='%Y-%d-%m %H:%M:%S', encoding="utf-8")
# ----------------------------------------
# Pre Processing
# ----------------------------------------
chat_chain.pre_processing()
# ----------------------------------------
# Personnel Recruitment
# ----------------------------------------
chat_chain.make_recruitment()
# ----------------------------------------
# Chat Chain
# ----------------------------------------
chat_chain.execute_chain()
# ----------------------------------------
# Post Processing
# ----------------------------------------
chat_chain.post_processing()