Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "treewide: Bump rust version to nightly-2023-12-28" #1241

Open
wants to merge 1 commit into
base: I6adededdb24ad5d991870dff159eb1ece5074ed6
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions array2/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@
//! Internally, values are stored in a single continuous allocation row-first, alongside the length
//! of the row.

#![feature(int_roundings)]
#![feature(core_intrinsics, int_roundings)]
use std::fmt::Debug;
use std::intrinsics::unlikely;
use std::ops::{Index, IndexMut};
use std::usize;

Expand Down Expand Up @@ -84,16 +85,11 @@ impl<T> Array2<T> {
/// passed an empty vector or if the rows are a different size.
#[inline]
pub fn try_from_rows(rows: Vec<Vec<T>>) -> Result<Self> {
#[cold]
fn not_equal(x: usize, y: usize) -> bool {
x != y
}

let row_size = rows.first().ok_or(Error::Empty)?.len();
let mut elems = Vec::with_capacity(row_size * rows.len());

for (row_index, row) in rows.into_iter().enumerate() {
if not_equal(row.len(), row_size) {
if unlikely(row.len() != row_size) {
return Err(Error::InconsistentRowSize {
row_index,
row_size,
Expand Down Expand Up @@ -203,7 +199,9 @@ impl<T> Array2<T> {
/// );
/// ```
#[inline]
pub fn rows(&self) -> impl ExactSizeIterator<Item = &[T]> + DoubleEndedIterator + '_ {
pub fn rows(
&self,
) -> impl Iterator<Item = &[T]> + ExactSizeIterator + DoubleEndedIterator + '_ {
self.cells.chunks(self.row_size)
}

Expand All @@ -222,7 +220,7 @@ impl<T> Array2<T> {
/// )
/// ```
#[inline]
pub fn entries(&self) -> impl ExactSizeIterator<Item = ((usize, usize), &T)> + '_ {
pub fn entries(&self) -> impl Iterator<Item = ((usize, usize), &T)> + ExactSizeIterator + '_ {
self.cells.iter().enumerate().map(move |(i, v)| {
let row = i.div_floor(self.row_size);
let col = i % self.row_size;
Expand All @@ -245,7 +243,9 @@ impl<T> Array2<T> {
/// assert_eq!(my_array2, Array2::from_rows(vec![vec![1, 3], vec![4, 6]]))
/// ```
#[inline]
pub fn entries_mut(&mut self) -> impl ExactSizeIterator<Item = ((usize, usize), &mut T)> + '_ {
pub fn entries_mut(
&mut self,
) -> impl Iterator<Item = ((usize, usize), &mut T)> + ExactSizeIterator + '_ {
let row_size = self.row_size;
self.cells.iter_mut().enumerate().map(move |(i, v)| {
let row = i.div_floor(row_size);
Expand All @@ -270,7 +270,7 @@ impl<T> Array2<T> {
/// )
/// ```
#[inline]
pub fn into_entries(self) -> impl ExactSizeIterator<Item = ((usize, usize), T)> {
pub fn into_entries(self) -> impl Iterator<Item = ((usize, usize), T)> + ExactSizeIterator {
self.cells
.into_vec()
.into_iter()
Expand Down
6 changes: 6 additions & 0 deletions nom-sql/src/sql_identifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,12 @@ impl std::borrow::Borrow<str> for SqlIdentifier {
}
}

impl std::borrow::Borrow<[u8]> for SqlIdentifier {
fn borrow(&self) -> &[u8] {
self.as_ref()
}
}

impl PartialOrd for SqlIdentifier {
#[inline]
#[allow(clippy::non_canonical_partial_ord_impl)]
Expand Down
1 change: 0 additions & 1 deletion readyset-alloc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, feature(custom_test_frameworks))]
#![cfg_attr(test, test_runner(runner::run_env_conditional_tests))]
#![allow(internal_features)]
#![feature(core_intrinsics)]

#[macro_use]
Expand Down
2 changes: 1 addition & 1 deletion readyset-clustertest/src/readyset_mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1599,7 +1599,7 @@ async fn views_synchronize_between_deployments() {

// Eventually it should show up in adapter 1 too
eventually! {
adapter_1.as_mysql_conn().unwrap().query_drop("SELECT * FROM t1;").await.unwrap();
adapter_1.as_mysql_conn().unwrap().query_drop("SELECT * FROM t1;");
last_statement_destination(adapter_1.as_mysql_conn().unwrap()).await == QueryDestination::Readyset
}

Expand Down
10 changes: 6 additions & 4 deletions readyset-mir/src/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,10 +378,12 @@ impl MirGraph {
},
// otherwise, just look up in the column set
// Compare by name if there is no table
_ => match if c.table.is_none() {
self.columns(node).iter().position(|cc| cc.name == c.name)
} else {
self.columns(node).iter().position(|cc| cc == c)
_ => match {
if c.table.is_none() {
self.columns(node).iter().position(|cc| cc.name == c.name)
} else {
self.columns(node).iter().position(|cc| cc == c)
}
} {
Some(id) => Ok(id),
None => err,
Expand Down
1 change: 1 addition & 0 deletions readyset-tracing/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
//! [presampling](presampled) - sampling spans at creation time rather than when a subscriber would
//! send them to a collector.

#![feature(core_intrinsics)]
use std::fs::File;
use std::path::{Path, PathBuf};
use std::sync::Arc;
Expand Down
14 changes: 6 additions & 8 deletions replicators/src/mysql_connector/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ impl MySqlReplicator {
let mut bad_tables = Vec::new();
// Process `CREATE TABLE` statements
for (db, table) in replicated_tables.iter() {
let res = create_for_table(&mut tx, db, table, TableKind::BaseTable)
match create_for_table(&mut tx, db, table, TableKind::BaseTable)
.map_err(|e| e.into())
.and_then(|create_table| {
debug!(%create_table, "Extending recipe");
Expand All @@ -222,9 +222,8 @@ impl MySqlReplicator {
changelist.with_schema_search_path(vec![db.clone().into()]),
)
})
.await;

match res {
.await
{
Ok(_) => {}
Err(error) => {
warn!(%error, "Error extending CREATE TABLE, table will not be used");
Expand Down Expand Up @@ -257,7 +256,7 @@ impl MySqlReplicator {

// Process `CREATE VIEW` statements
for (db, view) in all_views.iter() {
let res = create_for_table(&mut tx, db, view, TableKind::View)
match create_for_table(&mut tx, db, view, TableKind::View)
.map_err(|e| e.into())
.and_then(|create_view| {
db_schemas.extend_create_schema_for_view(
Expand All @@ -273,9 +272,8 @@ impl MySqlReplicator {
changelist.with_schema_search_path(vec![db.clone().into()]),
)
})
.await;

match res {
.await
{
Ok(_) => {}
Err(error) => {
warn!(%view, %error, "Error extending CREATE VIEW, view will not be used");
Expand Down
14 changes: 6 additions & 8 deletions replicators/src/postgres_connector/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ impl<'a> PostgresReplicator<'a> {
let mut tables = Vec::with_capacity(table_list.len());
for table in table_list {
let table_name = &table.name.clone().to_string();
let res = table
match table
.get_table(get_transaction!(self))
.and_then(|create_table| {
future::ready(
Expand Down Expand Up @@ -809,9 +809,8 @@ impl<'a> PostgresReplicator<'a> {
))
.map_ok(|_| create_table)
})
.await;

match res {
.await
{
Ok(create_table) => {
tables.push(create_table);
}
Expand All @@ -837,7 +836,7 @@ impl<'a> PostgresReplicator<'a> {
let view_name = view.name.clone();
let view_schema = view.schema.clone();

let res = view
match view
.get_create_view(get_transaction!(self))
.map_err(|e| e.into())
.and_then(|create_view| {
Expand All @@ -857,9 +856,8 @@ impl<'a> PostgresReplicator<'a> {
.with_schema_search_path(vec![view_schema.clone().into()]),
)
})
.await;

match res {
.await
{
Ok(_) => {}
Err(error) => {
warn!(
Expand Down
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
nightly-2023-12-28
nightly-2023-11-09