Skip to content

Commit

Permalink
Fix inlined increments
Browse files Browse the repository at this point in the history
All inlined usages of `value++` were converted to an unreadable

```rust
{
    let _old = value;
    value = value.wrapping_add(1);
    _old
}
```

So I fixed them by simply moving the `++` portion to the following statement

Search string:

```
\.wrapping_add\(1\)\;\s*_
```
  • Loading branch information
nyurik authored and danielrh committed Mar 16, 2024
1 parent d7d8b2d commit d66b801
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 130 deletions.
14 changes: 4 additions & 10 deletions src/enc/backward_references/hq.rs
Expand Up @@ -571,11 +571,8 @@ fn StartPosQueueSize(xself: &StartPosQueue) -> usize {
}

fn StartPosQueuePush(xself: &mut StartPosQueue, posdata: &PosData) {
let mut offset: usize = !{
let _old = xself.idx_;
xself.idx_ = xself.idx_.wrapping_add(1);
_old
} & 7usize;
let mut offset: usize = !xself.idx_ & 7usize;
xself.idx_ = xself.idx_.wrapping_add(1);
let len: usize = StartPosQueueSize(xself);
let mut i: usize;
let q: &mut [PosData; 8] = &mut xself.q_;
Expand Down Expand Up @@ -1518,11 +1515,8 @@ pub fn BrotliCreateHqZopfliBackwardReferences<
if match_len > 325usize {
let skip: usize = match_len.wrapping_sub(1);
let tmp = matches.slice()[(cur_match_end.wrapping_sub(1) as usize)];
matches.slice_mut()[{
let _old = cur_match_pos;
cur_match_pos = cur_match_pos.wrapping_add(1);
_old
}] = tmp;
matches.slice_mut()[cur_match_pos] = tmp;
cur_match_pos = cur_match_pos.wrapping_add(1);
num_matches.slice_mut()[i] = 1u32;
hasher.StoreRange(
ringbuffer,
Expand Down
54 changes: 14 additions & 40 deletions src/enc/block_splitter.rs
Expand Up @@ -593,15 +593,9 @@ fn ClusterBlocks<
{
HistogramAddItem(
&mut histograms.slice_mut()[j],
u64::from(
data[{
let _old = pos;
pos = pos.wrapping_add(1);
_old
}]
.clone(),
) as usize,
u64::from(data[pos].clone()) as usize,
);
pos = pos.wrapping_add(1);
}
k = k.wrapping_add(1);
}
Expand Down Expand Up @@ -670,16 +664,11 @@ fn ClusterBlocks<
j = 0usize;
while j < num_new_clusters {
{
all_histograms.slice_mut()[{
let _old = all_histograms_size;
all_histograms_size = all_histograms_size.wrapping_add(1);
_old
}] = histograms.slice()[(new_clusters[j] as usize)].clone();
cluster_size.slice_mut()[{
let _old = cluster_size_size;
cluster_size_size = cluster_size_size.wrapping_add(1);
_old
}] = sizes[new_clusters[j] as usize];
all_histograms.slice_mut()[all_histograms_size] =
histograms.slice()[new_clusters[j] as usize].clone();
all_histograms_size = all_histograms_size.wrapping_add(1);
cluster_size.slice_mut()[cluster_size_size] = sizes[new_clusters[j] as usize];
cluster_size_size = cluster_size_size.wrapping_add(1);
remap[new_clusters[j] as usize] = j as u32;
}
j = j.wrapping_add(1);
Expand Down Expand Up @@ -750,17 +739,8 @@ fn ClusterBlocks<
j = 0usize;
while j < block_lengths.slice()[i] as usize {
{
HistogramAddItem(
&mut histo,
u64::from(
data[{
let _old = pos;
pos = pos.wrapping_add(1);
_old
}]
.clone(),
) as usize,
);
HistogramAddItem(&mut histo, u64::from(data[pos].clone()) as usize);
pos = pos.wrapping_add(1);
}
j = j.wrapping_add(1);
}
Expand Down Expand Up @@ -790,12 +770,9 @@ fn ClusterBlocks<
j = j.wrapping_add(1);
}
histogram_symbols.slice_mut()[i] = best_out;
if new_index.slice()[(best_out as usize)] == kInvalidIndex {
new_index.slice_mut()[(best_out as usize)] = {
let _old = next_index;
next_index = next_index.wrapping_add(1);
_old
};
if new_index.slice()[best_out as usize] == kInvalidIndex {
new_index.slice_mut()[best_out as usize] = next_index;
next_index = next_index.wrapping_add(1);
}
}
i = i.wrapping_add(1);
Expand Down Expand Up @@ -1102,11 +1079,8 @@ pub fn BrotliSplitBlock<
{
let cmd = &cmds[i];
if CommandCopyLen(cmd) != 0 && (cmd.cmd_prefix_ as i32 >= 128i32) {
distance_prefixes.slice_mut()[{
let _old = j;
j = j.wrapping_add(1);
_old
}] = cmd.dist_prefix_ & 0x3ff;
distance_prefixes.slice_mut()[j] = cmd.dist_prefix_ & 0x3ff;
j = j.wrapping_add(1);
}
}
i = i.wrapping_add(1);
Expand Down
7 changes: 2 additions & 5 deletions src/enc/compress_fragment.rs
Expand Up @@ -760,11 +760,8 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
'break15: loop {
{
let hash = next_hash;
let bytes_between_hash_lookups: u32 = {
let _old = skip;
skip = skip.wrapping_add(1);
_old
} >> 5;
let bytes_between_hash_lookups: u32 = skip >> 5;
skip = skip.wrapping_add(1);
ip_index = next_ip;
next_ip =
ip_index.wrapping_add(bytes_between_hash_lookups as usize);
Expand Down
7 changes: 2 additions & 5 deletions src/enc/compress_fragment_two_pass.rs
Expand Up @@ -204,11 +204,8 @@ fn CreateCommands(
'break3: loop {
{
let hash: u32 = next_hash;
let bytes_between_hash_lookups: u32 = ({
let _old = skip;
skip = skip.wrapping_add(1);
_old
}) >> 5;
let bytes_between_hash_lookups: u32 = skip >> 5;
skip = skip.wrapping_add(1);
ip_index = next_ip;
0i32;
next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
Expand Down
56 changes: 16 additions & 40 deletions src/enc/encode.rs
Expand Up @@ -1457,16 +1457,10 @@ fn MakeUncompressedStream(input: &[u8], input_size: usize, output: &mut [u8]) ->
output[0] = 6u8;
return 1;
}
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = 0x21u8;
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = 0x3u8;
output[result] = 0x21u8;
result = result.wrapping_add(1);
output[result] = 0x3u8;
result = result.wrapping_add(1);
while size > 0usize {
let mut nibbles: u32 = 0u32;

Expand All @@ -1481,39 +1475,24 @@ fn MakeUncompressedStream(input: &[u8], input_size: usize, output: &mut [u8]) ->
let bits: u32 = nibbles << 1
| chunk_size.wrapping_sub(1) << 3
| 1u32 << (19u32).wrapping_add((4u32).wrapping_mul(nibbles));
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = bits as u8;
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = (bits >> 8) as u8;
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = (bits >> 16) as u8;
output[result] = bits as u8;
result = result.wrapping_add(1);
output[result] = (bits >> 8) as u8;
result = result.wrapping_add(1);
output[result] = (bits >> 16) as u8;
result = result.wrapping_add(1);
if nibbles == 2u32 {
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = (bits >> 24) as u8;
output[result] = (bits >> 24) as u8;
result = result.wrapping_add(1);
}
output[result..(result + chunk_size as usize)]
.clone_from_slice(&input[offset..(offset + chunk_size as usize)]);
result = result.wrapping_add(chunk_size as usize);
offset = offset.wrapping_add(chunk_size as usize);
size = size.wrapping_sub(chunk_size as usize);
}
output[{
let _old = result;
result = result.wrapping_add(1);
_old
}] = 3u8;
output[result] = 3u8;
result = result.wrapping_add(1);
result
}
pub fn BrotliEncoderCompress<
Expand Down Expand Up @@ -2594,13 +2573,10 @@ where
}
if s.last_insert_len_ > 0usize {
InitInsertCommand(
&mut s.commands_.slice_mut()[{
let _old = s.num_commands_;
s.num_commands_ = s.num_commands_.wrapping_add(1);
_old
}],
&mut s.commands_.slice_mut()[s.num_commands_],
s.last_insert_len_,
);
s.num_commands_ = s.num_commands_.wrapping_add(1);
s.num_literals_ = s.num_literals_.wrapping_add(s.last_insert_len_);
s.last_insert_len_ = 0usize;
}
Expand Down
12 changes: 2 additions & 10 deletions src/enc/entropy_encode.rs
Expand Up @@ -175,16 +175,8 @@ pub fn BrotliCreateHuffmanTree(
i = i.wrapping_sub(1);
if data[i] != 0 {
let count: u32 = brotli_max_uint32_t(data[i], count_limit);
InitHuffmanTree(
&mut tree[{
let _old = n;
n = n.wrapping_add(1);
_old
}],
count,
-1i16,
i as i16,
);
InitHuffmanTree(&mut tree[n], count, -1i16, i as i16);
n = n.wrapping_add(1);
}
}
if n == 1 {
Expand Down
28 changes: 8 additions & 20 deletions src/enc/static_dict.rs
Expand Up @@ -450,11 +450,8 @@ pub fn BrotliFindAllStaticDictionaryMatches(
let mut offset: usize = kStaticDictionaryBuckets[Hash(data) as usize] as usize;
let mut end: i32 = (offset == 0) as i32;
while end == 0 {
let mut w: DictWord = kStaticDictionaryWords[{
let _old = offset;
offset = offset.wrapping_add(1);
_old
}];
let mut w: DictWord = kStaticDictionaryWords[offset];
offset = offset.wrapping_add(1);
let l: usize = (w.len() as i32 & 0x1fi32) as usize;
let n: usize = 1usize << dictionary.size_bits_by_length[l] as i32;
let id: usize = w.idx() as usize;
Expand Down Expand Up @@ -1101,11 +1098,8 @@ pub fn BrotliFindAllStaticDictionaryMatches(
kStaticDictionaryBuckets[Hash(data.split_at(1).1) as usize] as usize;
let mut end: i32 = (offset == 0) as i32;
while end == 0 {
let mut w: DictWord = kStaticDictionaryWords[{
let _old = offset;
offset = offset.wrapping_add(1);
_old
}];
let mut w: DictWord = kStaticDictionaryWords[offset];
offset = offset.wrapping_add(1);
let l: usize = (w.len() as i32 & 0x1fi32) as usize;
let n: usize = 1usize << dictionary.size_bits_by_length[l] as i32;
let id: usize = w.idx() as usize;
Expand Down Expand Up @@ -1321,11 +1315,8 @@ pub fn BrotliFindAllStaticDictionaryMatches(
kStaticDictionaryBuckets[Hash(data.split_at(2).1) as usize] as usize;
let mut end: i32 = (offset == 0) as i32;
while end == 0 {
let mut w: DictWord = kStaticDictionaryWords[{
let _old = offset;
offset = offset.wrapping_add(1);
_old
}];
let mut w: DictWord = kStaticDictionaryWords[offset];
offset = offset.wrapping_add(1);
let l: usize = (w.len() as i32 & 0x1fi32) as usize;
let n: usize = 1usize << dictionary.size_bits_by_length[l] as i32;
let id: usize = w.idx() as usize;
Expand Down Expand Up @@ -1386,11 +1377,8 @@ pub fn BrotliFindAllStaticDictionaryMatches(
kStaticDictionaryBuckets[Hash(data.split_at(5).1) as usize] as usize;
let mut end: i32 = (offset == 0) as i32;
while end == 0 {
let mut w: DictWord = kStaticDictionaryWords[{
let _old = offset;
offset = offset.wrapping_add(1);
_old
}];
let mut w: DictWord = kStaticDictionaryWords[offset];
offset = offset.wrapping_add(1);
let l: usize = (w.len() as i32 & 0x1fi32) as usize;
let n: usize = 1usize << dictionary.size_bits_by_length[l] as i32;
let id: usize = w.idx() as usize;
Expand Down

0 comments on commit d66b801

Please sign in to comment.