Skip to content

Commit

Permalink
Adding a signal handler to the server.
Browse files Browse the repository at this point in the history
Various small changes in raw_http_req_tests.py.
  • Loading branch information
akira.kurogane@gmail.com committed Sep 30, 2011
1 parent 25133ec commit 2b317e2
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 27 deletions.
45 changes: 31 additions & 14 deletions furiganaserver/fiserver/raw_http_req_tests.py
Expand Up @@ -43,7 +43,7 @@ def __socket_req_test(buf, sample_filename, skip_known_bad_samples=False):
elif hrt == "POST" and re.search("User-Agent: Pingdom", buf):
r.update({"expected_fail": True, "fail_reason": "Pingdom POST"})
else:
print("Non-OK response to {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:-4]))
print("Non-OK response to {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:].rstrip()))
r.update({"raw_reply": repl_data})

elif hrt == "HEAD":
Expand All @@ -59,7 +59,7 @@ def __socket_req_test(buf, sample_filename, skip_known_bad_samples=False):
except Exception:
r["fail_reason"] = "Invalid json"
r.update({"raw_reply": repl_data})
print("JSON parsing failed on the response to {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, repl_content[:20], len(repl_content), repl_data[-10:]))
print("JSON parsing failed on the response to {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, repl_content[:20], len(repl_content), repl_data[-10:].rstrip()))


except (socket.timeout, socket.error) as ex:
Expand All @@ -74,9 +74,9 @@ def __socket_req_test(buf, sample_filename, skip_known_bad_samples=False):
r.update({"expected_fail": True, "fail_reason": "whitespace-only request"})
else:
if ex is socket.timeout:
print("Socket timeout on {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:-4]))
print("Socket timeout on {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:].rstrip()))
else:
print("Socket error on {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:-4]))
print("Socket error on {0}:\n {1} ({2} chars) ...{3}".format(sample_filename, buf[:10], len(buf), buf[-10:].rstrip()))
finally:
s.close()

Expand All @@ -90,6 +90,7 @@ def __iterate_sample_files():
for req_file in os.listdir(__http_dump_dir):
#for req_file in ["002.102.167.116.64079-192.168.011.016.00080"]:
#for req_file in ["219.110.205.086.56328-192.168.011.016.00080"]:
#for req_file in ["069.011.096.181.63083-192.168.011.016.00080", "069.011.096.181.62841-192.168.011.016.00080", "061.114.219.065.03200-192.168.011.016.00080", "061.114.219.065.03217-192.168.011.016.00080", "076.114.133.223.40003-192.168.011.016.00080", "114.042.238.129.13825-192.168.011.016.00080"]: #cases of post data that seems to be truncated according to this script
buf = ""
hs = open(__http_dump_dir + "/" + req_file)

Expand Down Expand Up @@ -148,11 +149,13 @@ def __tally(results):
req_cnts[hrt] += 1

if hrt == "HEAD" and not "head_reply_ok" in r:
req_cnts[hrt] -= 1
if "expected_fail" in r:
exp_fail_cnts[hrt] += 1
else:
fail_cnts[hrt] += 1
elif hrt == "POST" and not "jbody" in r:
req_cnts[hrt] -= 1
if "expected_fail" in r:
exp_fail_cnts[hrt] += 1
else:
Expand All @@ -161,7 +164,7 @@ def __tally(results):
stats = {
"tests_datetime": datetime.datetime.now(),
"POST_count": req_cnts.get("POST", 0),
"POST_avg_time_µs": reqtime_sums["POST"] / req_cnts["POST"] * 1000000 if req_cnts["POST"] else "N/A",
"POST_avg_time_µs": reqtime_sums["POST"] / req_cnts["POST"] * 1000000 if req_cnts["POST"] else None,
"POST_fail_count": fail_cnts.get("POST", 0),
"POST_expected_fail_count" : exp_fail_cnts.get("POST", 0),
"GET_count": req_cnts.get("GET", 0),
Expand All @@ -171,20 +174,34 @@ def __tally(results):
"HEAD_avg_time_µs": reqtime_sums["HEAD"] / req_cnts["HEAD"] * 1000000 if "HEAD" in req_cnts else None,
"HEAD_fail_count": fail_cnts.get("HEAD", 0),
"HEAD_expected_fail_count" : exp_fail_cnts.get("HEAD", 0),
"longest_req": longest_req if longest_req else "N/A",
"longest_req_time": longest_req_time * 1000000 if longest_req_time else "--",
"longest_req": longest_req if longest_req else None,
"longest_req_time": longest_req_time * 1000000 if longest_req_time else None,
}

return stats

def __print_summary(stats):
print("""POST: {POST_count} requests at average of {POST_avg_time_µs:.0f} µs each.
{POST_fail_count} POSTs unexpectedly failed. ({POST_expected_fail_count} known bad request samples ignored.)
HEAD: {HEAD_count} requests at average of {HEAD_avg_time_µs:.0f} µs each.
{HEAD_fail_count} HEADs unexpectedly failed. ({HEAD_expected_fail_count} known bad request samples ignored.)
Other: {GET_count} GET and {No_http_type_count} other junk requests processed at avg of {junk_req_avg_time_µs:.0f} µs each.
Longest-running request: {longest_req_time:.0f} µs for {longest_req}.
""".format(**stats))
if "POST_avg_time_µs" in stats and stats["POST_avg_time_µs"]:
print("POST: {POST_count} requests at average of {POST_avg_time_µs:.0f} µs each.".format(**stats))
else:
print("POST: {POST_count} requests (no avg time info- all failed?).".format(**stats))
if stats["POST_fail_count"] or stats["POST_expected_fail_count"]:
print(" {POST_fail_count} POSTs unexpectedly failed. ({POST_expected_fail_count} known bad request samples ignored.)".format(**stats))
if "HEAD_avg_time_µs" in stats and stats["HEAD_avg_time_µs"]:
print("HEAD: {HEAD_count} requests at average of {HEAD_avg_time_µs:.0f} µs each.".format(**stats))
elif not stats["HEAD_count"]:
pass
else:
print("HEAD: {HEAD_count} requests (no avg time info- all failed?).".format(**stats))
if stats["HEAD_fail_count"] or stats["HEAD_expected_fail_count"]:
print(" {HEAD_fail_count} HEADs unexpectedly failed. ({HEAD_expected_fail_count} known bad request samples ignored.)".format(**stats))
if stats["GET_count"] or stats["No_http_type_count"]:
if "junk_req_avg_time_µs" in stats and stats["junk_req_avg_time_µs"]:
print("Other: {GET_count} GET and {No_http_type_count} other junk requests processed at avg of {junk_req_avg_time_µs:.0f} µs each.".format(**stats))
else:
print("Other: {GET_count} GET and {No_http_type_count} other junk requests processed (no avg time info- all failed?).".format(**stats))
if "longest_req_time" in stats and "longest_req" in stats and stats["longest_req"]:
print("Longest-running request: {longest_req_time:.0f} µs for {longest_req}.".format(**stats))


def __log_perf_stats(stats):
Expand Down
42 changes: 29 additions & 13 deletions furiganaserver/fiserver/server.c
Expand Up @@ -3,6 +3,7 @@
#include <stdlib.h>
#include <assert.h>
#include <string.h>
#include <signal.h>
#include <time.h>
#include <stdarg.h>
#include <pthread.h>
Expand All @@ -11,10 +12,15 @@
#include "mongoose.h"
#include "json.h"

static int exit_flag; //global var that signal handlers will set.
static size_t mg_read_max_req_size; //Will be initialized after mongoose context is created.
//N.B. The Mecab lib doesn't seem to have maximum buffer size for node parsing. The mecab executable does
// but that's not relevant.

static void signal_handler(int sig_num) {
exit_flag = sig_num;
}

static const char *ajax_service_url = "/furiganainjector";
static const char *test_form_url = "/test_form";
/*static const char *ajax_reply_start =
Expand Down Expand Up @@ -58,23 +64,23 @@ static void print_robots_txt(struct mg_connection *conn) {
// Split up form data values as required, print the result from furigana_decorate
static void handle_furigana_request(struct mg_connection *conn) {

char tmp_inp_copy_filename[] = "/tmp/fiserver_input_copy_XXXXXX"; //The XXXXXX will be updated with a random value
int ic_tmp_fd = mkstemp(tmp_inp_copy_filename); //Debug use
//File tracing: char tmp_inp_copy_filename[] = "/tmp/fiserver_input_copy_XXXXXX"; //The XXXXXX will be updated with a random value
//File tracing: int ic_tmp_fd = mkstemp(tmp_inp_copy_filename); //Debug use
//printf("Copy of input being saved at %s\n", tmp_inp_copy_filename);

unsigned int buf_chunks = 2; //multiple of mg_read_max_req_size to make the buf size
char* buf = (char*)malloc(mg_read_max_req_size * buf_chunks + 1);
buf[0] = '\0'; //in case mg_read doesn't return anything
char* decorated_result;
size_t curr_buf_len = 0, temp_len;
while (temp_len = mg_read(conn, buf + curr_buf_len, mg_read_max_req_size)) { //Eh? Doesn't mg_read() alter the last parameter?
while ((temp_len = mg_read(conn, buf + curr_buf_len, mg_read_max_req_size))) {
curr_buf_len += temp_len;
if (curr_buf_len >= mg_read_max_req_size * (buf_chunks - 1))
buf = (char*)realloc(buf, mg_read_max_req_size * ++buf_chunks);
}
*(buf + curr_buf_len) = '\0';
write(ic_tmp_fd, buf, strlen(buf)); //debug use
close(ic_tmp_fd); //This file will be deleted at the end of this function. I.e. it will only survive if the program aborts due to a segfault/etc.
//File tracing: write(ic_tmp_fd, buf, strlen(buf)); //debug use
//File tracing: close(ic_tmp_fd); //This file will be deleted at the end of this function. I.e. it will only survive if the program aborts due to a segfault/etc.
char* tok_start_ptr = buf; //buf pointer that will set to null after the first use in strtok_r()
char* key;
char* val;
Expand Down Expand Up @@ -110,14 +116,15 @@ static void handle_furigana_request(struct mg_connection *conn) {

mg_printf(conn, "HTTP/1.1 200 OK\r\n"
"Server: Mongoose-not-mod_furiganainjector\r\n" //Have to include the substring "mod_furiganainjector" until clients re-written
"Connection: close\r\n"
"Content-Type: application/json\r\n"
"\r\n");
//Todo: process in chunks < 4096
const char * temp_json_str = json_object_to_json_string(reply_json_obj);
mg_write(conn, temp_json_str, strlen(temp_json_str));

json_object_put(reply_json_obj);
unlink(tmp_inp_copy_filename);
//File tracing: unlink(tmp_inp_copy_filename);
}

static void *fiserver_event_handler(enum mg_event event,
Expand Down Expand Up @@ -152,12 +159,11 @@ static void *fiserver_event_handler(enum mg_event event,
static const char *options[] = {
"enable_directory_listing", "no",
//"document_root", "html",
"listening_ports", "8081",
"listening_ports", "80",
//"ssl_certificate", "ssl_cert.pem",
//"access_log_file", "fiserver.access.log",
//"error_log_file", "fiserver.err.log",
"num_threads", "5",
"max_request_size", "16384",
//"enable_keep_alive", "no",
NULL
};

Expand All @@ -167,15 +173,25 @@ int main(void) {

init_mecab_context(/*args*/);

/* Setup signal handler: quit on Ctrl-C */
signal(SIGTERM, signal_handler);
signal(SIGINT, signal_handler);

// Setup and start Mongoose
ctx = mg_start(&fiserver_event_handler, NULL, options);
assert(ctx != NULL);
mg_read_max_req_size = atoi(mg_get_option(ctx, "max_request_size"));

// Wait until enter is pressed, then exit
printf("Furigana Injector server started on port(s) %s, press enter to quit.\n",
mg_get_option(ctx, "listening_ports"));
getchar();
//// Wait until enter is pressed, then exit
//printf("Furigana Injector server started on port(s) %s, press enter to quit.\n",
// mg_get_option(ctx, "listening_ports"));
//getchar();
while (exit_flag == 0) {
sleep(1);
}
printf("Exiting on signal %d, waiting for all threads to finish...",
exit_flag);
fflush(stdout);
mg_stop(ctx);
printf("%s\n", "Furigana Injector server stopped.");

Expand Down

0 comments on commit 2b317e2

Please sign in to comment.