Skip to content

Commit

Permalink
Logs pagination
Browse files Browse the repository at this point in the history
  • Loading branch information
t348575 committed Apr 25, 2024
1 parent 1dba50a commit 2c597b3
Show file tree
Hide file tree
Showing 4 changed files with 80 additions and 25 deletions.
2 changes: 1 addition & 1 deletion app/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async fn main() -> Result<()> {
tracing_subscriber::fmt::layer()
.with_timer(ChronoLocal::new("%v %k:%M:%S %z".to_owned()))
.with_target(false)
.compact()
.compact(),
);

let file_appender = tracing_appender::rolling::never(".", "twitch-points-miner.log");
Expand Down
44 changes: 30 additions & 14 deletions app/src/web_api/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::{io::SeekFrom, path::Path, sync::Arc};

use axum::{
extract::State,
extract::{Query, State},
http::StatusCode,
response::{Html, IntoResponse},
routing::get,
Expand All @@ -14,6 +14,7 @@ use common::{
twitch::auth::Token,
types::*,
};
use serde::Deserialize;
use tokio::{
fs::File,
io::{AsyncReadExt, AsyncSeekExt, BufReader},
Expand Down Expand Up @@ -82,7 +83,7 @@ pub async fn get_api_server(
components(
schemas(
PubSub, StreamerState, StreamerConfigRefWrapper, ConfigTypeRef, StreamerConfig, PredictionConfig, StreamerInfo, Event,
Filter, Strategy, UserId, Game, Detailed, Timestamp, DefaultPrediction, DetailedOdds, Points, OddsComparisonType
Filter, Strategy, UserId, Game, Detailed, Timestamp, DefaultPrediction, DetailedOdds, Points, OddsComparisonType, LogQuery
),
),
tags(
Expand Down Expand Up @@ -227,16 +228,17 @@ impl PubSub {
}
}

async fn read_last_n_lines(file: &mut File, mut n: usize) -> Result<Vec<String>> {
async fn read_sliced_lines(file: &mut File, log_query: LogQuery) -> Result<Vec<String>> {
let mut lines = Vec::new();
let mut n = log_query.per_page;
let mut current_page = 0;
let mut total_lines = 0;

let file_size = file.metadata().await?.len();
let mut file = BufReader::new(file);
file.seek(SeekFrom::End(0)).await?;

let mut prev_buffer: Vec<u8> = Vec::new();

file.seek(SeekFrom::End(0)).await?;
while n > 0 && file_size > 0 {
while current_page <= log_query.page {
file.seek(SeekFrom::Current(-1024)).await?;
let mut buffer = [0; 1024];
let bytes_read = file.read(&mut buffer).await?;
Expand All @@ -262,14 +264,21 @@ async fn read_last_n_lines(file: &mut File, mut n: usize) -> Result<Vec<String>>
break;
}

if current_page > log_query.page {
break;
}

let line = String::from_utf8(line.to_vec())?;
if !line.trim().is_empty() {
if idx + 1 == size {
prev_buffer = line.as_bytes().to_vec();
} else {
break;
} else if current_page == log_query.page {
lines.push(format!("{line}\n"));
n -= 1;
}
n -= 1;
total_lines += 1;
current_page = total_lines / log_query.per_page;
}
}
file.seek(SeekFrom::Current((-1 * bytes_read as i64) - 1))
Expand All @@ -285,14 +294,21 @@ async fn read_last_n_lines(file: &mut File, mut n: usize) -> Result<Vec<String>>
Ok(lines)
}

#[derive(Deserialize, utoipa::ToSchema, utoipa::IntoParams)]
struct LogQuery {
per_page: usize,
page: usize,
}

#[utoipa::path(
get,
path = "/api/logs",
responses(
(status = 200, description = "Get last logs as rendered html", body = String, content_type = "text/html"),
)
),
params(LogQuery)
)]
async fn get_logs() -> Result<Html<String>, ApiError> {
async fn get_logs(Query(log_query): Query<LogQuery>) -> Result<Html<String>, ApiError> {
if !Path::new("twitch-points-miner.log").exists() {
return Ok(Html(
"Logging to file not enabled, use the --log-to-file flag!".to_string(),
Expand All @@ -306,17 +322,17 @@ async fn get_logs() -> Result<Html<String>, ApiError> {
.context("Opening log file")
.map_err(ApiError::internal_error)?;

let text = read_last_n_lines(&mut file, 30)
let text = read_sliced_lines(&mut file, log_query)
.await
.context("Grabbing log lines")
.context("grabbing log lines")
.map_err(ApiError::internal_error)?
.into_iter()
.filter(|x| !x.trim().is_empty())
.filter(|x| !x.starts_with('\n'))
.collect::<Vec<_>>()
.join("");
let html = ansi_to_html::convert(&text)
.context("Rendering log lines")
.context("rendering log lines")
.map_err(ApiError::internal_error)?;
Ok(Html(html))
}
55 changes: 47 additions & 8 deletions frontend/src/Logs.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,56 @@
import * as Card from "$lib/components/ui/card";
import { onMount } from "svelte";
import { get_logs } from "./common";
import { ScrollArea } from "$lib/components/ui/scroll-area";
import { Button } from "$lib/components/ui/button";
import { ChevronLeft, ChevronRight } from "lucide-svelte";
import { Input } from "$lib/components/ui/input";
let text = "";
let page = 0;
let page_size = 30;
onMount(async () => {
text = await get_logs();
await render_logs();
});
async function render_logs() {
text = await get_logs(page, page_size);
}
</script>

<Card.Root>
<Card.Content>
<pre>
{@html text}
</pre>
</Card.Content>
</Card.Root>
<div class="flex flex-col">
<Card.Root>
<Card.Content class="max-h-[80vh]">
<ScrollArea orientation="both">
<pre class="max-h-[80vh]">
{@html text}
</pre>
</ScrollArea>
</Card.Content>
</Card.Root>

<div class="flex gap-1 self-center content-center mt-1">
<Input
type="number"
min="1"
placeholder="Lines per page"
bind:value={page_size}
/>
<Button
variant="outline"
on:click={() => {
page++;
render_logs();
}}><ChevronLeft /></Button
>
<p class="p-2">{page + 1}</p>
<Button
variant="outline"
on:click={() => {
page--;
render_logs();
}}
disabled={page === 0}><ChevronRight /></Button
>
</div>
</div>
4 changes: 2 additions & 2 deletions frontend/src/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ export async function set_watch_priority(
}
}

export async function get_logs(): Promise<string> {
const res = await fetch(`${baseUrl}api/logs`);
export async function get_logs(page: number, page_size: number): Promise<string> {
const res = await fetch(`${baseUrl}api/logs?page=${page}&per_page=${page_size}`);
return await res.text()
}

0 comments on commit 2c597b3

Please sign in to comment.