diff --git a/server/Cargo.toml b/server/Cargo.toml index 5b6dd56..b17627e 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lemmy-search" -version = "0.3.3" +version = "0.3.4" edition = "2021" authors = ["MarSara9"] @@ -11,19 +11,16 @@ lazy_static = "1.4.*" robotstxt = "0.3.*" deadpool = "0.9.*" deadpool-r2d2 = "0.2.*" -url = { version = "2.4.*", features = ["serde"] } serde = { version = "1.0.*", features = ["derive"] } -serde_json = { version = "1.0.*", features = ["default"] } reqwest = { version = "0.11.*", features = ["json"] } -tokio = { version = "1.28.*", features = ["rt-multi-thread"] } +tokio = { version = "1.29.*", features = ["rt-multi-thread", "fs"] } actix-web = { version = "4.3.*", features = [] } actix-files = "0.6.*" -async-std = { version = "1.12.*", features = [] } futures = { version = "0.3.*", features = ["std", "async-await", "executor"] } clokwerk = "0.4.*" chrono = { version = "0.4.*", features = [] } config-file = { version = "0.2.3", features = ["yaml"] } -uuid = { version = "1.3.*", features = ["v4"] } -postgres = { version = "0.19.3", features = ["with-uuid-1", "with-chrono-0_4"] } +uuid = { version = "1.4.*", features = ["v4"] } +postgres = { version = "0.19.*", features = ["with-uuid-1", "with-chrono-0_4"] } r2d2_postgres = { version = "0.18.*", features = [] } -regex = { version = "1.8.*", features = [] } \ No newline at end of file +regex = { version = "1.9.*", features = [] } \ No newline at end of file diff --git a/server/src/crawler/crawler.rs b/server/src/crawler/crawler.rs index 141c45b..cac7cea 100644 --- a/server/src/crawler/crawler.rs +++ b/server/src/crawler/crawler.rs @@ -96,21 +96,27 @@ impl Crawler { .linked; for instance in federated_instances { - if match instance.software { - Some(value) => value == "lemmy", + if !match instance.software { + Some(value) => value.to_lowercase() == "lemmy", None => false } { - let crawler = Crawler::new( - instance.domain, - self.context.clone(), - true - ); - - let _ = match crawler { - Ok(crawler) => crawler.crawl().await, - Err(_) => Ok(()) - }; + continue; } + + if instance.domain == self.context.config.crawler.seed_instance { + continue; + } + + let crawler = Crawler::new( + instance.domain, + self.context.clone(), + true + ); + + let _ = match crawler { + Ok(crawler) => crawler.crawl().await, + Err(_) => Ok(()) + }; } } } diff --git a/server/src/crawler/mod.rs b/server/src/crawler/mod.rs index c784c1f..73f7853 100644 --- a/server/src/crawler/mod.rs +++ b/server/src/crawler/mod.rs @@ -6,7 +6,6 @@ use std::{ time::Duration, path::Path }; -use async_std::fs::remove_file; use tokio::task::JoinHandle; use crate::{ database::Context, @@ -70,7 +69,7 @@ impl Runner { ) { let file = Path::new("/lemmy/config/crawl"); if file.exists() { - match remove_file("/lemmy/config/crawl") + match tokio::fs::remove_file("/lemmy/config/crawl") .await { Ok(_) => { Self::run(context) diff --git a/server/src/main.rs b/server/src/main.rs index 0396947..ba07eb0 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -30,7 +30,7 @@ async fn main() -> std::io::Result<()> { let config = config::Config::load(); println!("Giving time for database to come online..."); - async_std::task::sleep(Duration::from_secs( 1 )).await; + tokio::time::sleep(Duration::from_secs( 1 )).await; let database = match Database::create(&config).await { Ok(value) => value, diff --git a/ui/results/results.js b/ui/results/results.js index 5b799fd..f21311a 100644 --- a/ui/results/results.js +++ b/ui/results/results.js @@ -1,11 +1,20 @@ -function checkQueryParameters() { +function getQueryParameters() { const urlParameters = new URLSearchParams(window.location.search); - $("#search").val(urlParameters.get("query")); - return urlParameters.has("query"); + return { + "query": urlParameters.get("query"), + "page": urlParameters.get("page") || 1 + }; } -function query(queryString) { - fetchJson("/search" + queryString, result => { +function query(queryString, page, instance) { + + queryParameters = new URLSearchParams({ + "query" : queryString, + "page" : page, + "home_instance" : dropSchema(instance) + }).toString() + + fetchJson("/search?" + queryParameters, result => { let response_time = Math.round((result.time_taken.secs + (result.time_taken.nanos / 1_000_000_000)) * 100) / 100; @@ -177,7 +186,6 @@ function getPostQueryBody(queryTerms, body) { return span; }).filter(span => span != null); - // if(spans.length < split_body.length) { if(body.length > 200) { let more = $(""); more.text("..."); @@ -192,9 +200,16 @@ function isImage(url) { function onReady() { - if(!checkQueryParameters()) { + const queryParameters = getQueryParameters(); + if(!queryParameters["query"]) { window.location = "/"; } - query(window.location.search); + $("#search").val(queryParameters["query"]); + + query( + queryParameters["query"], + queryParameters["page"], + home_instance + ); } diff --git a/ui/shared/common.js b/ui/shared/common.js index ca34001..ef3bba9 100644 --- a/ui/shared/common.js +++ b/ui/shared/common.js @@ -18,7 +18,6 @@ function populateInstances() { select.append(option); }) - // $("#instance-select").val(home_instance); $("#instance-select").selectize({ sortField: 'text' }); @@ -44,7 +43,6 @@ function onSearch() { let params = { "query" : query, - "home_instance" : dropSchema(home_instance), "page" : 1 }; @@ -61,11 +59,7 @@ function initializeUI() { $( "#search-form" ).on( "submit", function( event ) { onSearch(); event.preventDefault(); - }); - - // $("#instance-select").selectize({ - // sortField: 'text' - // }); + }); $("#instance-select").on("change", function() { home_instance = this.value;