Skip to content

Commit

Permalink
simply remove debug package
Browse files Browse the repository at this point in the history
  • Loading branch information
wladpaiva committed Oct 31, 2023
1 parent b536e8e commit 8afc179
Show file tree
Hide file tree
Showing 5 changed files with 1 addition and 31 deletions.
Binary file modified bun.lockb
Binary file not shown.
2 changes: 0 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
"devDependencies": {
"@changesets/cli": "^2.26.2",
"@ianvs/prettier-plugin-sort-imports": "^4.1.0",
"@types/debug": "^4.1.9",
"bun-types": "latest",
"husky": "^8.0.0",
"is-ci": "^3.0.1",
Expand All @@ -54,7 +53,6 @@
"@anthropic-ai/sdk": "^0.8.1",
"@inquirer/prompts": "^3.2.0",
"chalk": "^5.3.0",
"debug": "^4.3.4",
"langchain": "^0.0.169",
"node-html-markdown": "^1.3.0",
"openai": "^4.11.1"
Expand Down
15 changes: 0 additions & 15 deletions src/aibitat.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import {EventEmitter} from 'events'
import chalk from 'chalk'
import debug from 'debug'

import {APIError} from './error.ts'
import {
Expand All @@ -11,8 +10,6 @@ import {
type OpenAIModel,
} from './providers/index.ts'

const log = debug('autogen:chat-aibitat')

/**
* The provider config to use for the AI.
*/
Expand Down Expand Up @@ -449,12 +446,6 @@ export class AIbitat {
* @param message The message to start the chat.
*/
public async start(message: Message) {
log(
`starting a chat from ${chalk.yellow(message.from)} to ${chalk.yellow(
message.to,
)} with ${chalk.green(message.content)}`,
)

// register the message in the chat history
this.newMessage(message)
this.emitter.emit('start', message, this)
Expand All @@ -475,12 +466,6 @@ export class AIbitat {
* @param keepAlive Whether to keep the chat alive.
*/
private async chat(route: Route, keepAlive = true) {
log(
`executing a chat from ${chalk.yellow(route.from)} to ${chalk.green(
route.to,
)}`,
)

// check if the message is for a group
// if it is, select the next node to chat with from the group
// and then ask them to reply.
Expand Down
5 changes: 0 additions & 5 deletions src/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import Anthropic, {ClientOptions} from '@anthropic-ai/sdk'
import debug from 'debug'

import {FunctionDefinition} from '../aibitat.ts'
import {
Expand All @@ -11,8 +10,6 @@ import {
} from '../error.ts'
import {AIProvider, Message} from './ai-provider.ts'

const log = debug('autogen:provider:anthropic')

/**
* The model to use for the Anthropic API.
*/
Expand Down Expand Up @@ -67,8 +64,6 @@ export class AnthropicProvider extends AIProvider<Anthropic> {
messages: Message[],
functions?: FunctionDefinition[],
): Promise<string> {
log(`calling 'anthropic.completions.create' with model '${this.model}'`)

// clone messages to avoid mutating the original array
const promptMessages = [...messages]

Expand Down
10 changes: 1 addition & 9 deletions src/providers/openai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import debug from 'debug'
import OpenAI, {
ClientOptions,
APIConnectionError as OpenAIAPIConnectionError,
Expand All @@ -25,8 +24,6 @@ import {
} from '../error.ts'
import {AIProvider} from './ai-provider.ts'

const log = debug('autogen:provider:openai')

/**
* The model to use for the OpenAI API.
*/
Expand Down Expand Up @@ -100,8 +97,6 @@ export class OpenAIProvider extends AIProvider<OpenAI> {
messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[],
functions?: FunctionDefinition[],
): Promise<string> {
log(`calling 'openai.chat.completions.create' with model '${this.model}'`)

try {
const response = await this.client.chat.completions.create({
model: this.model,
Expand All @@ -110,8 +105,6 @@ export class OpenAIProvider extends AIProvider<OpenAI> {
functions,
})

log('cost: ', this.getCost(response.usage))

if (functions && response.choices[0].message.function_call) {
// send the info on the function call and function response to GPT
// and return the response
Expand Down Expand Up @@ -201,7 +194,6 @@ export class OpenAIProvider extends AIProvider<OpenAI> {
if (!(model in OpenAIProvider.COST_PER_TOKEN)) {
return 'unknown'
}
log('model:', model)

const costPerToken =
OpenAIProvider.COST_PER_TOKEN[
Expand Down Expand Up @@ -230,7 +222,7 @@ export class OpenAIProvider extends AIProvider<OpenAI> {
call: OpenAI.Chat.ChatCompletionMessage.FunctionCall,
) {
const funcToCall = functions.find(f => f.name === call.name)
log(`calling function "${call.name}" with arguments: `, call.arguments)

if (!funcToCall) {
throw new Error(`Function '${call.name}' not found`)
}
Expand Down

0 comments on commit 8afc179

Please sign in to comment.