Skip to content

Commit

Permalink
nit: add rustfmt config to sort imports, run rustfmt
Browse files Browse the repository at this point in the history
  • Loading branch information
K900 committed Jan 5, 2023
1 parent f78ee58 commit 7440ec1
Show file tree
Hide file tree
Showing 10 changed files with 78 additions and 59 deletions.
2 changes: 2 additions & 0 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
reorder_imports = true
group_imports = "StdExternalCrate"
19 changes: 8 additions & 11 deletions src/bin/nix-channel-index.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
//! Tool for generating a nix-index database.
use clap::Parser;
use error_chain::ChainedError;
use futures::{future, StreamExt};
use nix_index::files::FileNode;
use rusqlite::{Connection, DatabaseName};
use std::ffi::OsString;
use std::io::{self, Write};
use stderr::*;

use std::os::unix::ffi::OsStringExt;
use std::path::PathBuf;

use std::process;

use nix_index::{errors::*, CACHE_URL};

use clap::Parser;
use error_chain::ChainedError;
use futures::{future, StreamExt};
use nix_index::files::FileNode;
use nix_index::hydra::Fetcher;
use nix_index::listings::fetch_file_listings;
use nix_index::nixpkgs;
use nix_index::package::StorePath;
use nix_index::{errors::*, CACHE_URL};
use rusqlite::{Connection, DatabaseName};
use stderr::*;

const EXTRA_SCOPES: [&str; 5] = [
"xorg",
Expand Down Expand Up @@ -219,7 +216,7 @@ struct Args {
/// Show a stack trace in the case of a Nix evaluation error
#[clap(long)]
show_trace: bool,
}
}

#[tokio::main]
async fn main() {
Expand Down
47 changes: 31 additions & 16 deletions src/bin/nix-index.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,27 @@
//! Tool for generating a nix-index database.
use error_chain::ChainedError;
use separator::Separatable;
use stderr::*;

use clap::Parser;
use futures::{future, StreamExt};
use std::ffi::OsStr;
use std::fs::{self, File};
use std::io::{self, Write};
use std::iter;
use std::sync::mpsc::channel;
use std::thread;
use std::path::PathBuf;
use std::process;
use std::str;
use std::sync::mpsc::channel;
use std::thread;

use clap::Parser;
use error_chain::ChainedError;
use futures::{future, StreamExt};
use nix_index::database::Writer;
use nix_index::errors::*;
use nix_index::files::FileTree;
use nix_index::hydra::Fetcher;
use nix_index::listings::{fetch_file_listings, try_load_paths_cache};
use nix_index::nixpkgs;
use nix_index::package::StorePath;
use nix_index::CACHE_URL;
use separator::Separatable;
use stderr::*;

/// The URL of the binary cache that we use to fetch file listings and references.
///
Expand All @@ -45,7 +45,12 @@ async fn update_index(args: &Args) -> Result<()> {
Some(v) => v,
None => {
// These are the paths that show up in `nix-env -qa`.
let normal_paths = nixpkgs::query_packages(&args.nixpkgs, args.system.as_deref(), None, args.show_trace);
let normal_paths = nixpkgs::query_packages(
&args.nixpkgs,
args.system.as_deref(),
None,
args.show_trace,
);

// We also add some additional sets that only show up in `nix-env -qa -A someSet`.
//
Expand All @@ -62,20 +67,31 @@ async fn update_index(args: &Args) -> Result<()> {
"rPackages",
"nodePackages",
"coqPackages",
].iter().map(|scope| nixpkgs::query_packages(&args.nixpkgs, args.system.as_deref(), Some(scope), args.show_trace));
]
.iter()
.map(|scope| {
nixpkgs::query_packages(
&args.nixpkgs,
args.system.as_deref(),
Some(scope),
args.show_trace,
)
});

// Collect results in parallel.
let rx = {
let (tx, rx) = channel();
let handles : Vec<thread::JoinHandle<_>> =
iter::once(normal_paths).chain(extra_scopes).map(|path_iter| {
let handles: Vec<thread::JoinHandle<_>> = iter::once(normal_paths)
.chain(extra_scopes)
.map(|path_iter| {
let tx = tx.clone();
thread::spawn(move || {
for path in path_iter {
tx.send(path).unwrap();
}
})
}).collect();
})
.collect();

for h in handles {
h.join().unwrap();
Expand Down Expand Up @@ -155,7 +171,6 @@ async fn update_index(args: &Args) -> Result<()> {
Ok(())
}


fn cache_dir() -> &'static OsStr {
let base = xdg::BaseDirectories::with_prefix("nix-index").unwrap();
let cache_dir = Box::new(base.get_cache_home());
Expand Down Expand Up @@ -186,7 +201,7 @@ struct Args {
/// Zstandard compression level
#[clap(short, long = "compression", default_value = "22")]
compression_level: i32,

/// Show a stack trace in the case of a Nix evaluation error
#[clap(long)]
show_trace: bool,
Expand All @@ -195,7 +210,7 @@ struct Args {
#[clap(long, default_value = "")]
filter_prefix: String,

/// Store and load results of fetch phase in a file called paths.cache. This speeds up testing
/// Store and load results of fetch phase in a file called paths.cache. This speeds up testing
/// different database formats / compression.
///
/// Note: does not check if the cached data is up to date! Use only for development.
Expand Down
14 changes: 7 additions & 7 deletions src/bin/nix-locate.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
//! Tool for searching for files in nixpkgs packages
use ansi_term::Colour::Red;
use clap::Parser;
use error_chain::error_chain;
use regex::bytes::Regex;
use separator::Separatable;
use std::collections::HashSet;
use std::ffi::OsStr;
use std::path::PathBuf;
use std::process;
use std::result;
use std::str;
use std::str::FromStr;
use stderr::{err, errln};

use ansi_term::Colour::Red;
use clap::Parser;
use error_chain::error_chain;
use nix_index::database;
use nix_index::files::{self, FileTreeEntry, FileType};
use regex::bytes::Regex;
use separator::Separatable;
use stderr::{err, errln};

error_chain! {
errors {
Expand Down Expand Up @@ -259,7 +259,7 @@ struct Opts {
#[clap(long)]
top_level: bool,

/// Only print matches for files that have this type. If the option is given multiple times,
/// Only print matches for files that have this type. If the option is given multiple times,
/// a file will be printed if it has any of the given types.
#[clap(short, long, possible_values=["d", "x", "r", "s"])]
r#type: Option<Vec<FileType>>,
Expand Down
15 changes: 8 additions & 7 deletions src/database.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
use std::fs::File;
/// Creating and searching file databases.
///
/// This module implements an abstraction for creating an index of files with meta information
/// and searching that index for paths matching a specific pattern.
use std::io::{self, BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;

use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use error_chain::error_chain;
use grep::matcher::{LineMatchKind, Match, Matcher, NoError};
Expand All @@ -8,13 +16,6 @@ use regex_syntax::ast::{
Alternation, Assertion, AssertionKind, Ast, Concat, Group, Literal, Repetition,
};
use serde_json;
use std::fs::File;
/// Creating and searching file databases.
///
/// This module implements an abstraction for creating an index of files with meta information
/// and searching that index for paths matching a specific pattern.
use std::io::{self, BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;
use zstd;

use crate::files::{FileTree, FileTreeEntry};
Expand Down
7 changes: 4 additions & 3 deletions src/files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
//!
//! The main type here is `FileTree` which represents
//! such as the file listing for a store path.
use memchr::memchr;
use serde::{Deserialize, Serialize};
use serde_bytes::ByteBuf;
use std::collections::HashMap;
use std::io::{self, Write};
use std::str::{self, FromStr};

use memchr::memchr;
use serde::{Deserialize, Serialize};
use serde_bytes::ByteBuf;

use crate::frcode;

/// This enum represents a single node in a file tree.
Expand Down
5 changes: 3 additions & 2 deletions src/frcode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,13 @@
//! The last entry shares four bytes less than the second to last one did with its predecessor, so here the differential is negative.
//!
//! Through this encoding, the size of the index is typically reduces by a factor of 3 to 5.
use error_chain::{bail, error_chain};
use memchr;
use std::cmp;
use std::io::{self, BufRead, Write};
use std::ops::{Deref, DerefMut};

use error_chain::{bail, error_chain};
use memchr;

error_chain! {
foreign_links {
Io(io::Error);
Expand Down
22 changes: 11 additions & 11 deletions src/hydra.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,15 @@
//! This module has all functions that deal with accessing hydra or the binary cache.
//! Currently, it only provides two functions: `fetch_files` to get the file listing for
//! a store path and `fetch_references` to retrieve the references from the narinfo.
use serde::{self, Deserialize};
use serde_json;
use std::collections::HashMap;
use std::env::var;
use std::fmt;
use std::io::{self, Write};
use std::path::PathBuf;
use std::pin::Pin;
use std::result;
use std::str::{self, FromStr, Utf8Error};
use std::time::{Duration, Instant};

use brotli2::write::BrotliDecoder;
use error_chain::error_chain;
Expand All @@ -15,16 +22,9 @@ use hyper::client::{Client as HyperClient, HttpConnector};
use hyper::{self, Body, Request, StatusCode, Uri};
use hyper_proxy::{Custom, Intercept, Proxy, ProxyConnector};
use serde::de::{Deserializer, MapAccess, Visitor};
use serde::{self, Deserialize};
use serde_bytes::ByteBuf;
use std::collections::HashMap;
use std::env::var;
use std::fmt;
use std::io::{self, Write};
use std::path::PathBuf;
use std::pin::Pin;
use std::result;
use std::str::{self, FromStr, Utf8Error};
use std::time::{Duration, Instant};
use serde_json;
use tokio::time::error::Elapsed;
use tokio::time::timeout;
use tokio_retry::strategy::ExponentialBackoff;
Expand Down
1 change: 1 addition & 0 deletions src/nixpkgs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use std::error;
use std::fmt;
use std::io::{self, Read};
use std::process::{Child, ChildStdout, Command, Stdio};

use xml;
use xml::common::{Position, TextPosition};
use xml::reader::{EventReader, XmlEvent};
Expand Down
5 changes: 3 additions & 2 deletions src/workset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@
//! // and pkgD itself
//! }
//! ```
use futures::Stream;
use indexmap::IndexMap;
use std::cell::RefCell;
use std::collections::HashSet;
use std::hash::Hash;
Expand All @@ -62,6 +60,9 @@ use std::pin::Pin;
use std::rc::{Rc, Weak};
use std::task::{Context, Poll};

use futures::Stream;
use indexmap::IndexMap;

/// This structure holds the internal state of our queue.
struct Shared<K, V> {
/// The set of keys that have already been added to the queue sometime in the past.
Expand Down

0 comments on commit 7440ec1

Please sign in to comment.