This repository has been archived by the owner on Apr 2, 2024. It is now read-only.
-
-
Notifications
You must be signed in to change notification settings - Fork 351
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Create chat, and finalises the dashboard API, #68
Moves the websocket handler from main.go to its own package, chat. Move the logs for the dashboard and the chat to their own packages.
- Loading branch information
Showing
4 changed files
with
139 additions
and
116 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,123 @@ | ||
package chat | ||
|
||
import ( | ||
"encoding/json" | ||
"fmt" | ||
"net/http" | ||
"reflect" | ||
"time" | ||
|
||
"github.com/olivia-ai/olivia/network" | ||
|
||
"github.com/gookit/color" | ||
"github.com/gorilla/websocket" | ||
"github.com/olivia-ai/olivia/analysis" | ||
"github.com/olivia-ai/olivia/user" | ||
"github.com/olivia-ai/olivia/util" | ||
gocache "github.com/patrickmn/go-cache" | ||
) | ||
|
||
var ( | ||
// Create the neural network variable to use it everywhere | ||
neuralNetwork network.Network | ||
// Initiatizes the cache with a 5 minute lifetime | ||
cache = gocache.New(5*time.Minute, 5*time.Minute) | ||
) | ||
|
||
// Configure the upgrader | ||
var upgrader = websocket.Upgrader{ | ||
CheckOrigin: func(r *http.Request) bool { | ||
return true | ||
}, | ||
} | ||
|
||
// RequestMessage is the structure that uses entry connections to chat with the websocket | ||
type RequestMessage struct { | ||
Content string `json:"content"` | ||
Token string `json:"user_token"` | ||
Information user.Information `json:"information"` | ||
} | ||
|
||
// ResponseMessage is the structure used to reply to the user through the websocket | ||
type ResponseMessage struct { | ||
Content string `json:"content"` | ||
Tag string `json:"tag"` | ||
Information user.Information `json:"information"` | ||
} | ||
|
||
// Serve serves the websocket in the given port | ||
func Serve(_neuralNetwork network.Network, port string) { | ||
// Set the current global network as a global variable | ||
neuralNetwork = _neuralNetwork | ||
|
||
http.HandleFunc("/", Handle) | ||
|
||
magenta := color.FgMagenta.Render | ||
fmt.Printf("\nChat Websocket listening on the port %s...\n", magenta(port)) | ||
|
||
// Serves the chat | ||
err := http.ListenAndServe(":"+port, nil) | ||
if err != nil { | ||
panic(err) | ||
} | ||
} | ||
|
||
// Handle manages the entry connections and reply with the neural network | ||
func Handle(w http.ResponseWriter, r *http.Request) { | ||
conn, _ := upgrader.Upgrade(w, r, nil) | ||
fmt.Println(color.FgGreen.Render("A new connection has been opened")) | ||
|
||
for { | ||
// Read message from browser | ||
msgType, msg, err := conn.ReadMessage() | ||
if err != nil { | ||
continue | ||
} | ||
|
||
// Unserialize the json content of the message | ||
var request RequestMessage | ||
if err = json.Unmarshal(msg, &request); err != nil { | ||
continue | ||
} | ||
|
||
// Set the informations from the client into the cache | ||
if reflect.DeepEqual(user.GetUserInformation(request.Token), user.Information{}) { | ||
user.SetUserInformation(request.Token, request.Information) | ||
} | ||
|
||
// Write message back to browser | ||
response := Reply(request) | ||
if err = conn.WriteMessage(msgType, response); err != nil { | ||
continue | ||
} | ||
} | ||
} | ||
|
||
// Reply takes the entry message and returns an array of bytes for the answer | ||
func Reply(request RequestMessage) []byte { | ||
var responseSentence, responseTag string | ||
|
||
// Send a message from res/messages.json if it is too long | ||
if len(request.Content) > 500 { | ||
responseTag = "too long" | ||
responseSentence = util.GetMessage(responseTag) | ||
} else { | ||
responseTag, responseSentence = analysis.NewSentence( | ||
request.Content, | ||
).Calculate(*cache, neuralNetwork, request.Token) | ||
} | ||
|
||
// Marshall the response in json | ||
response := ResponseMessage{ | ||
Content: responseSentence, | ||
Tag: responseTag, | ||
Information: user.GetUserInformation(request.Token), | ||
} | ||
|
||
bytes, err := json.Marshal(response) | ||
if err != nil { | ||
panic(err) | ||
} | ||
|
||
return bytes | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,129 +1,22 @@ | ||
package main | ||
|
||
import ( | ||
"encoding/json" | ||
"fmt" | ||
"net/http" | ||
"os" | ||
"reflect" | ||
"time" | ||
|
||
"github.com/olivia-ai/olivia/chat" | ||
"github.com/olivia-ai/olivia/dashboard" | ||
|
||
"github.com/gookit/color" | ||
"github.com/gorilla/websocket" | ||
"github.com/olivia-ai/olivia/analysis" | ||
"github.com/olivia-ai/olivia/training" | ||
"github.com/olivia-ai/olivia/user" | ||
"github.com/olivia-ai/olivia/util" | ||
gocache "github.com/patrickmn/go-cache" | ||
) | ||
|
||
var ( | ||
model = training.CreateNeuralNetwork() | ||
cache = gocache.New(5*time.Minute, 5*time.Minute) | ||
// Initialize the neural network by training it | ||
neuralNetwork = training.CreateNeuralNetwork() | ||
) | ||
|
||
// Configure the upgrader | ||
var upgrader = websocket.Upgrader{ | ||
CheckOrigin: func(r *http.Request) bool { | ||
return true | ||
}, | ||
} | ||
|
||
type RequestMessage struct { | ||
Content string `json:"content"` | ||
Token string `json:"user_token"` | ||
Information user.Information `json:"information"` | ||
} | ||
|
||
type ResponseMessage struct { | ||
Content string `json:"content"` | ||
Tag string `json:"tag"` | ||
Information user.Information `json:"information"` | ||
} | ||
|
||
func main() { | ||
http.HandleFunc("/", Handle) | ||
|
||
port := "8080" | ||
if os.Getenv("PORT") != "" { | ||
port = os.Getenv("PORT") | ||
} | ||
|
||
magenta := color.FgMagenta.Render | ||
|
||
// Serve the REST API inside a go routine | ||
go func() { | ||
fmt.Printf("Dashboard API listening on the port %s...\n", magenta(8081)) | ||
|
||
// Serve the API | ||
dashboard.Serve(model) | ||
dashboard.Serve(neuralNetwork, "8081") | ||
}() | ||
|
||
fmt.Printf("\nChat Websocket listening on the port %s...\n", magenta(port)) | ||
|
||
// Serves the websocket | ||
err := http.ListenAndServe(":"+port, nil) | ||
if err != nil { | ||
panic(err) | ||
} | ||
} | ||
|
||
func Handle(w http.ResponseWriter, r *http.Request) { | ||
conn, _ := upgrader.Upgrade(w, r, nil) | ||
fmt.Println(color.FgGreen.Render("A new connection has been opened")) | ||
|
||
for { | ||
// Read message from browser | ||
msgType, msg, err := conn.ReadMessage() | ||
if err != nil { | ||
continue | ||
} | ||
|
||
// Unserialize the json content of the message | ||
var request RequestMessage | ||
if err = json.Unmarshal(msg, &request); err != nil { | ||
continue | ||
} | ||
|
||
// Set the informations from the client into the cache | ||
if reflect.DeepEqual(user.GetUserInformation(request.Token), user.Information{}) { | ||
user.SetUserInformation(request.Token, request.Information) | ||
} | ||
|
||
// Write message back to browser | ||
response := Reply(request) | ||
if err = conn.WriteMessage(msgType, response); err != nil { | ||
continue | ||
} | ||
} | ||
} | ||
|
||
func Reply(request RequestMessage) []byte { | ||
var responseSentence, responseTag string | ||
|
||
// Send a message from res/messages.json if it is too long | ||
if len(request.Content) > 500 { | ||
responseTag = "too long" | ||
responseSentence = util.GetMessage(responseTag) | ||
} else { | ||
responseTag, responseSentence = analysis.NewSentence( | ||
request.Content, | ||
).Calculate(*cache, model, request.Token) | ||
} | ||
|
||
// Marshall the response in json | ||
response := ResponseMessage{ | ||
Content: responseSentence, | ||
Tag: responseTag, | ||
Information: user.GetUserInformation(request.Token), | ||
} | ||
|
||
bytes, err := json.Marshal(response) | ||
if err != nil { | ||
panic(err) | ||
} | ||
|
||
return bytes | ||
// Serves the chat | ||
chat.Serve(neuralNetwork, "8080") | ||
} |
Large diffs are not rendered by default.
Oops, something went wrong.