mirror of
https://github.com/martinvonz/jj.git
synced 2025-01-16 00:56:23 +00:00
index, stacked_table: use u32::try_from() instead of numeric cast
These .unwrap()s wouldn't be compiled out, but I don't think they would have measurable impact. Let's use the safer method.
This commit is contained in:
parent
9ec89bcf86
commit
6f5096e266
3 changed files with 25 additions and 13 deletions
|
@ -124,7 +124,7 @@ impl<'a> CompositeIndex<'a> {
|
||||||
}
|
}
|
||||||
change_ids.insert(entry.change_id());
|
change_ids.insert(entry.change_id());
|
||||||
}
|
}
|
||||||
let num_heads = is_head.iter().filter(|is_head| **is_head).count() as u32;
|
let num_heads = u32::try_from(is_head.iter().filter(|is_head| **is_head).count()).unwrap();
|
||||||
|
|
||||||
let mut levels = self
|
let mut levels = self
|
||||||
.ancestor_index_segments()
|
.ancestor_index_segments()
|
||||||
|
@ -140,7 +140,7 @@ impl<'a> CompositeIndex<'a> {
|
||||||
num_merges,
|
num_merges,
|
||||||
max_generation_number,
|
max_generation_number,
|
||||||
num_heads,
|
num_heads,
|
||||||
num_changes: change_ids.len() as u32,
|
num_changes: change_ids.len().try_into().unwrap(),
|
||||||
levels,
|
levels,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,7 +122,7 @@ impl MutableIndexSegment {
|
||||||
}
|
}
|
||||||
self.lookup.insert(
|
self.lookup.insert(
|
||||||
entry.commit_id.clone(),
|
entry.commit_id.clone(),
|
||||||
IndexPosition(self.graph.len() as u32 + self.num_parent_commits),
|
IndexPosition(u32::try_from(self.graph.len()).unwrap() + self.num_parent_commits),
|
||||||
);
|
);
|
||||||
self.graph.push(entry);
|
self.graph.push(entry);
|
||||||
}
|
}
|
||||||
|
@ -171,7 +171,11 @@ impl MutableIndexSegment {
|
||||||
|
|
||||||
fn serialize_parent_filename(&self, buf: &mut Vec<u8>) {
|
fn serialize_parent_filename(&self, buf: &mut Vec<u8>) {
|
||||||
if let Some(parent_file) = &self.parent_file {
|
if let Some(parent_file) = &self.parent_file {
|
||||||
buf.extend((parent_file.name().len() as u32).to_le_bytes());
|
buf.extend(
|
||||||
|
u32::try_from(parent_file.name().len())
|
||||||
|
.unwrap()
|
||||||
|
.to_le_bytes(),
|
||||||
|
);
|
||||||
buf.extend_from_slice(parent_file.name().as_bytes());
|
buf.extend_from_slice(parent_file.name().as_bytes());
|
||||||
} else {
|
} else {
|
||||||
buf.extend(0_u32.to_le_bytes());
|
buf.extend(0_u32.to_le_bytes());
|
||||||
|
@ -181,7 +185,7 @@ impl MutableIndexSegment {
|
||||||
fn serialize_local_entries(&self, buf: &mut Vec<u8>) {
|
fn serialize_local_entries(&self, buf: &mut Vec<u8>) {
|
||||||
assert_eq!(self.graph.len(), self.lookup.len());
|
assert_eq!(self.graph.len(), self.lookup.len());
|
||||||
|
|
||||||
let num_commits = self.graph.len() as u32;
|
let num_commits = u32::try_from(self.graph.len()).unwrap();
|
||||||
buf.extend(num_commits.to_le_bytes());
|
buf.extend(num_commits.to_le_bytes());
|
||||||
// We'll write the actual value later
|
// We'll write the actual value later
|
||||||
let parent_overflow_offset = buf.len();
|
let parent_overflow_offset = buf.len();
|
||||||
|
@ -194,9 +198,13 @@ impl MutableIndexSegment {
|
||||||
|
|
||||||
buf.extend(entry.generation_number.to_le_bytes());
|
buf.extend(entry.generation_number.to_le_bytes());
|
||||||
|
|
||||||
buf.extend((entry.parent_positions.len() as u32).to_le_bytes());
|
buf.extend(
|
||||||
|
u32::try_from(entry.parent_positions.len())
|
||||||
|
.unwrap()
|
||||||
|
.to_le_bytes(),
|
||||||
|
);
|
||||||
let mut parent1_pos = IndexPosition(0);
|
let mut parent1_pos = IndexPosition(0);
|
||||||
let parent_overflow_pos = parent_overflow.len() as u32;
|
let parent_overflow_pos = u32::try_from(parent_overflow.len()).unwrap();
|
||||||
for (i, parent_pos) in entry.parent_positions.iter().enumerate() {
|
for (i, parent_pos) in entry.parent_positions.iter().enumerate() {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
parent1_pos = *parent_pos;
|
parent1_pos = *parent_pos;
|
||||||
|
@ -220,7 +228,7 @@ impl MutableIndexSegment {
|
||||||
}
|
}
|
||||||
|
|
||||||
buf[parent_overflow_offset..][..4]
|
buf[parent_overflow_offset..][..4]
|
||||||
.copy_from_slice(&(parent_overflow.len() as u32).to_le_bytes());
|
.copy_from_slice(&u32::try_from(parent_overflow.len()).unwrap().to_le_bytes());
|
||||||
for parent_pos in parent_overflow {
|
for parent_pos in parent_overflow {
|
||||||
buf.extend(parent_pos.0.to_le_bytes());
|
buf.extend(parent_pos.0.to_le_bytes());
|
||||||
}
|
}
|
||||||
|
@ -303,7 +311,7 @@ impl IndexSegment for MutableIndexSegment {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn segment_num_commits(&self) -> u32 {
|
fn segment_num_commits(&self) -> u32 {
|
||||||
self.graph.len() as u32
|
self.graph.len().try_into().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn segment_parent_file(&self) -> Option<&Arc<ReadonlyIndexSegment>> {
|
fn segment_parent_file(&self) -> Option<&Arc<ReadonlyIndexSegment>> {
|
||||||
|
@ -363,7 +371,11 @@ impl IndexSegment for MutableIndexSegment {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn segment_num_parents(&self, local_pos: u32) -> u32 {
|
fn segment_num_parents(&self, local_pos: u32) -> u32 {
|
||||||
self.graph[local_pos as usize].parent_positions.len() as u32
|
self.graph[local_pos as usize]
|
||||||
|
.parent_positions
|
||||||
|
.len()
|
||||||
|
.try_into()
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn segment_parent_positions(&self, local_pos: u32) -> SmallIndexPositionsVec {
|
fn segment_parent_positions(&self, local_pos: u32) -> SmallIndexPositionsVec {
|
||||||
|
|
|
@ -256,19 +256,19 @@ impl MutableTable {
|
||||||
let mut buf = vec![];
|
let mut buf = vec![];
|
||||||
|
|
||||||
if let Some(parent_file) = &self.parent_file {
|
if let Some(parent_file) = &self.parent_file {
|
||||||
buf.extend((parent_file.name.len() as u32).to_le_bytes());
|
buf.extend(u32::try_from(parent_file.name.len()).unwrap().to_le_bytes());
|
||||||
buf.extend_from_slice(parent_file.name.as_bytes());
|
buf.extend_from_slice(parent_file.name.as_bytes());
|
||||||
} else {
|
} else {
|
||||||
buf.extend(0_u32.to_le_bytes());
|
buf.extend(0_u32.to_le_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.extend((self.entries.len() as u32).to_le_bytes());
|
buf.extend(u32::try_from(self.entries.len()).unwrap().to_le_bytes());
|
||||||
|
|
||||||
let mut value_offset = 0_u32;
|
let mut value_offset = 0_u32;
|
||||||
for (key, value) in &self.entries {
|
for (key, value) in &self.entries {
|
||||||
buf.extend_from_slice(key);
|
buf.extend_from_slice(key);
|
||||||
buf.extend(value_offset.to_le_bytes());
|
buf.extend(value_offset.to_le_bytes());
|
||||||
value_offset += value.len() as u32;
|
value_offset += u32::try_from(value.len()).unwrap();
|
||||||
}
|
}
|
||||||
for value in self.entries.values() {
|
for value in self.entries.values() {
|
||||||
buf.extend_from_slice(value);
|
buf.extend_from_slice(value);
|
||||||
|
|
Loading…
Reference in a new issue