Skip to content
21 changes: 13 additions & 8 deletions gitoxide-core/src/hours/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,23 @@ const MINUTES_PER_HOUR: f32 = 60.0;
pub const HOURS_PER_WORKDAY: f32 = 8.0;

pub fn estimate_hours(
commits: &[(u32, super::SignatureRef<'static>)],
first_commit: &(u32, super::SignatureRef<'static>),
other_commits: &[(u32, super::SignatureRef<'static>)],
stats: &[(u32, FileStats, LineStats)],
) -> WorkByEmail {
assert!(!commits.is_empty());
const MAX_COMMIT_DIFFERENCE_IN_MINUTES: f32 = 2.0 * MINUTES_PER_HOUR;
const FIRST_COMMIT_ADDITION_IN_MINUTES: f32 = 2.0 * MINUTES_PER_HOUR;

let hours_for_commits = {
let mut hours = 0.0;

let mut commits = commits.iter().map(|t| &t.1).rev();
let mut cur = commits.next().expect("at least one commit if we are here");
let mut commits = std::iter::once(first_commit)
.chain(other_commits.iter())
.map(|t| &t.1)
.rev();
let mut cur = commits
.next()
.expect("a commit sequence with explicit first commit is never empty");

for next in commits {
let change_in_minutes = (next.seconds().saturating_sub(cur.seconds())) as f32 / MINUTES_PER_HOUR;
Expand All @@ -43,11 +48,11 @@ pub fn estimate_hours(
hours
};

let author = &commits[0].1;
let author = &first_commit.1;
let (files, lines) = if !stats.is_empty() {
{
commits
.iter()
std::iter::once(first_commit)
.chain(other_commits.iter())
.map(|t| &t.0)
.fold((FileStats::default(), LineStats::default()), |mut acc, id| match stats
.binary_search_by(|t| t.0.cmp(id))
Expand All @@ -68,7 +73,7 @@ pub fn estimate_hours(
name: author.name,
email: author.email,
hours: FIRST_COMMIT_ADDITION_IN_MINUTES / 60.0 + hours_for_commits,
num_commits: commits.len() as u32,
num_commits: (1 + other_commits.len()) as u32,
files,
lines,
}
Expand Down
12 changes: 9 additions & 3 deletions gitoxide-core/src/hours/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,10 @@ where
let mut ignored_bot_commits = 0_u32;
for (idx, (_, elm)) in commit_authors.iter().enumerate() {
if elm.email != *current_email {
let estimate = estimate_hours(&commit_authors[slice_start..idx], &stats);
let (first, rest) = commit_authors[slice_start..idx]
.split_first()
.expect("a changed email always implies a non-empty range");
let estimate = estimate_hours(first, rest, &stats);
slice_start = idx;
current_email = &elm.email;
if ignore_bots && estimate.name.contains_str(b"[bot]") {
Expand All @@ -253,8 +256,11 @@ where
results_by_hours.push(estimate);
}
}
if let Some(commits) = commit_authors.get(slice_start..) {
results_by_hours.push(estimate_hours(commits, &stats));
if let Some((first, rest)) = commit_authors
.get(slice_start..)
.and_then(|commits| commits.split_first())
{
results_by_hours.push(estimate_hours(first, rest, &stats));
}

let num_authors = results_by_hours.len();
Expand Down
4 changes: 3 additions & 1 deletion gix-commitgraph/src/init.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::{File, Graph, MAX_COMMITS};
use crate::{File, Graph, NonEmptyFiles, MAX_COMMITS};
use gix_error::{message, ErrorExt, Exn, Message, ResultExt};
use std::{
io::{BufRead, BufReader},
Expand Down Expand Up @@ -54,6 +54,8 @@ impl Graph {

/// Create a new commit graph from a list of `files`.
pub fn new(files: Vec<File>) -> Result<Self, Message> {
let files =
NonEmptyFiles::from_vec(files).ok_or_else(|| message!("Commit-graph must contain at least one file"))?;
let num_commits: u64 = files.iter().map(|f| u64::from(f.num_commits())).sum();
if num_commits > u64::from(MAX_COMMITS) {
return Err(message!(
Expand Down
27 changes: 26 additions & 1 deletion gix-commitgraph/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub struct File {
/// may come from one or more `objects/info/commit-graphs/graph-*.graph` files. These files are
/// generated via `git commit-graph write ...` commands.
pub struct Graph {
files: Vec<File>,
files: NonEmptyFiles,
}

/// Instantiate a commit graph from an `.git/objects/info` directory, or one of the various commit-graph files.
Expand Down Expand Up @@ -76,3 +76,28 @@ impl std::fmt::Display for Position {
self.0.fmt(f)
}
}

struct NonEmptyFiles(Vec<File>);

impl NonEmptyFiles {
fn from_vec(files: Vec<File>) -> Option<Self> {
(!files.is_empty()).then_some(Self(files))
}
}

impl std::ops::Deref for NonEmptyFiles {
type Target = [File];

fn deref(&self) -> &Self::Target {
&self.0
}
}

impl<'a> IntoIterator for &'a NonEmptyFiles {
type Item = &'a File;
type IntoIter = std::slice::Iter<'a, File>;

fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
15 changes: 8 additions & 7 deletions gix-merge/src/commit/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,15 @@ pub fn commit<'objects>(
|commit_id: gix_hash::ObjectId| objects.find_commit(&commit_id, &mut state.buf1).map(|c| c.tree());

let (merge_base_tree_id, ancestor_name): (_, Cow<'_, str>) = match merge_bases.clone() {
Some(base_commit) if base_commit.len() == 1 => {
(commit_to_tree(base_commit[0])?, abbreviate_hash(&base_commit[0]).into())
}
Some(mut base_commits) => {
Some(base_commit) if base_commit.as_slice().len() == 1 => (
commit_to_tree(*base_commit.first())?,
abbreviate_hash(base_commit.first()).into(),
),
Some(base_commits) => {
let virtual_base_tree = if options.use_first_merge_base {
let first = base_commits.first().expect("if Some() there is at least one.");
commit_to_tree(*first)?
commit_to_tree(*base_commits.first())?
} else {
let mut base_commits = base_commits.into_vec();
let first = base_commits.pop().expect("at least two");
let second = base_commits.pop().expect("at least one left");
let out = crate::commit::virtual_merge_base(
Expand All @@ -81,7 +82,7 @@ pub fn commit<'objects>(
abbreviate_hash,
options.tree_merge.clone(),
)?;
virtual_merge_bases = out.virtual_merge_bases;
virtual_merge_bases = out.virtual_merge_bases.into_vec();
out.tree_id
};
(virtual_base_tree, "merged common ancestors".into())
Expand Down
2 changes: 1 addition & 1 deletion gix-merge/src/commit/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub struct Outcome<'a> {
/// * an empty tree, if [`allow_missing_merge_base`](Options::allow_missing_merge_base) is enabled.
pub merge_base_tree_id: gix_hash::ObjectId,
/// The object ids of all the commits which were found to be merge-bases, or `None` if there was no merge-base.
pub merge_bases: Option<Vec<gix_hash::ObjectId>>,
pub merge_bases: Option<gix_revision::merge_base::Bases>,
/// A list of virtual commits that were created to merge multiple merge-bases into one, the last one being
/// the one we used as merge-base for the merge.
/// As they are not reachable by anything they will be garbage collected, but knowing them provides options.
Expand Down
5 changes: 3 additions & 2 deletions gix-merge/src/commit/virtual_merge_base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pub struct Outcome {
/// As they have been written to the object database, they are still available until they are garbage collected.
/// The last one is the most recently produced and the one returned as `commit_id`.
/// This is never empty.
pub virtual_merge_bases: Vec<gix_hash::ObjectId>,
pub virtual_merge_bases: gix_revision::merge_base::Bases,
/// The id of the commit that was created to hold the merged tree.
pub commit_id: gix_hash::ObjectId,
/// The hash of the merged tree.
Expand Down Expand Up @@ -111,7 +111,8 @@ pub(super) mod function {
}

Ok(super::Outcome {
virtual_merge_bases,
virtual_merge_bases: gix_revision::merge_base::Bases::from_vec(virtual_merge_bases)
.expect("the virtual merge-base process always creates at least one commit"),
commit_id: merged_commit_id,
tree_id: tree_id.map_or_else(
|| {
Expand Down
61 changes: 47 additions & 14 deletions gix-merge/src/tree/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,40 @@ impl PossibleConflict {

/// The flat list of all tree-nodes so we can avoid having a linked-tree using pointers
/// which is useful for traversal and initial setup as that can then trivially be non-recursive.
pub struct TreeNodes(Vec<TreeNode>);
pub struct TreeNodes(NonEmptyVec<TreeNode>);

#[derive(Debug, Clone)]
struct NonEmptyVec<T>(Vec<T>);

impl<T> NonEmptyVec<T> {
fn new(first: T) -> Self {
Self(vec![first])
}

fn len(&self) -> usize {
self.0.len()
}

fn push(&mut self, value: T) {
self.0.push(value);
}

fn first(&self) -> &T {
self.0.first().expect("non-empty by construction")
}

fn first_mut(&mut self) -> &mut T {
self.0.first_mut().expect("non-empty by construction")
}

fn get(&self, idx: usize) -> Option<&T> {
self.0.get(idx)
}

fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
self.0.get_mut(idx)
}
}

/// Trees lead to other trees, or leafs (without children), and it can be represented by a renamed directory.
#[derive(Debug, Default, Clone)]
Expand Down Expand Up @@ -367,7 +400,7 @@ impl TreeNode {

impl TreeNodes {
pub fn new() -> Self {
TreeNodes(vec![TreeNode::default()])
TreeNodes(NonEmptyVec::new(TreeNode::default()))
}

/// Insert our `change` at `change_idx`, into a linked-tree, assuring that each `change` is non-conflicting
Expand All @@ -386,7 +419,7 @@ impl TreeNodes {
{
let mut components = to_components(path).peekable();
let mut next_index = self.0.len();
let mut cursor = &mut self.0[0];
let mut cursor = self.0.first_mut();
while let Some(component) = components.next() {
let is_last = components.peek().is_none();
match cursor.children.get(component).copied() {
Expand All @@ -398,11 +431,11 @@ impl TreeNodes {
};
cursor.children.insert(component.to_owned(), next_index);
self.0.push(new_node);
cursor = &mut self.0[next_index];
cursor = self.0.get_mut(next_index).expect("inserted above");
next_index += 1;
}
Some(index) => {
cursor = &mut self.0[index];
cursor = self.0.get_mut(index).expect("child indices are internal");
if is_last && !cursor.is_leaf_node() {
// NOTE: we might encounter the same path multiple times in rare conditions.
// At least we avoid overwriting existing intermediate changes, for good measure.
Expand All @@ -423,7 +456,7 @@ impl TreeNodes {
return None;
}
let components = to_components(theirs_location);
let mut cursor = &self.0[0];
let mut cursor = self.0.first();
let mut cursor_idx = 0;
let mut intermediate_change = None;
for component in components {
Expand All @@ -440,7 +473,7 @@ impl TreeNodes {
} else {
// a change somewhere else, i.e. `a/c` and we know `a/b` only.
intermediate_change.and_then(|(change, cursor_idx)| {
let cursor = &self.0[cursor_idx];
let cursor = self.0.get(cursor_idx).expect("cursor indices are internal");
// If this is a destination location of a rename, then the `their_location`
// is already at the right spot, and we can just ignore it.
if matches!(cursor.location, ChangeLocation::CurrentLocation) {
Expand All @@ -454,7 +487,7 @@ impl TreeNodes {
}
Some(child_idx) => {
cursor_idx = child_idx;
cursor = &self.0[cursor_idx];
cursor = self.0.get(cursor_idx).expect("child indices are internal");
}
}
}
Expand Down Expand Up @@ -497,22 +530,22 @@ impl TreeNodes {

fn remove_leaf_inner(&mut self, location: &BStr, must_exist: bool) {
let mut components = to_components(location).peekable();
let mut cursor = &mut self.0[0];
let mut cursor = self.0.first_mut();
while let Some(component) = components.next() {
match cursor.children.get(component).copied() {
None => debug_assert!(!must_exist, "didn't find '{location}' for removal"),
Some(existing_idx) => {
let is_last = components.peek().is_none();
if is_last {
cursor.children.remove(component);
cursor = &mut self.0[existing_idx];
cursor = self.0.get_mut(existing_idx).expect("child indices are internal");
debug_assert!(
cursor.is_leaf_node(),
"BUG: we should really only try to remove leaf nodes: {cursor:?}"
);
cursor.change_idx = None;
} else {
cursor = &mut self.0[existing_idx];
cursor = self.0.get_mut(existing_idx).expect("child indices are internal");
}
}
}
Expand All @@ -523,17 +556,17 @@ impl TreeNodes {
/// Panic if that change already exists as it must be made so that it definitely doesn't overlap with this tree.
pub fn insert(&mut self, new_change: &Change, new_change_idx: usize) {
let mut next_index = self.0.len();
let mut cursor = &mut self.0[0];
let mut cursor = self.0.first_mut();
for component in to_components(new_change.location()) {
match cursor.children.get(component).copied() {
None => {
cursor.children.insert(component.to_owned(), next_index);
self.0.push(TreeNode::default());
cursor = &mut self.0[next_index];
cursor = self.0.get_mut(next_index).expect("inserted above");
next_index += 1;
}
Some(existing_idx) => {
cursor = &mut self.0[existing_idx];
cursor = self.0.get_mut(existing_idx).expect("child indices are internal");
}
}
}
Expand Down
12 changes: 7 additions & 5 deletions gix-odb/src/store_impls/dynamic/iter.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{ops::Deref, option::Option::None, sync::Arc, vec::IntoIter};
use std::{ops::Deref, option::Option::None, vec::IntoIter};

use gix_hash::ObjectId;

Expand Down Expand Up @@ -56,7 +56,7 @@ pub enum Ordering {
pub struct AllObjects {
state: State,
num_objects: usize,
loose_dbs: Arc<Vec<loose::Store>>,
loose_dbs: dynamic::load_index::NonEmptyLooseDbs,
order: Ordering,
}

Expand All @@ -78,8 +78,10 @@ impl AllObjects {
.indices
.iter()
.fold(0usize, |dbc, index| dbc.saturating_add(index.num_objects() as usize));
let loose_dbs = snapshot
.non_empty_loose_dbs()
.expect("initialized snapshots always have at least one loose object db");
let mut index_iter = snapshot.indices.into_iter();
let loose_dbs = snapshot.loose_dbs;
let order = Default::default();
let state = match index_iter.next() {
Some(index) => {
Expand All @@ -95,7 +97,7 @@ impl AllObjects {
None => {
let index = 0;
State::Loose {
iter: loose_dbs.get(index).expect("at least one loose db").iter(),
iter: loose_dbs.first().iter(),
index,
}
}
Expand Down Expand Up @@ -180,7 +182,7 @@ impl Iterator for AllObjects {
None => {
let index = 0;
self.state = State::Loose {
iter: self.loose_dbs.get(index).expect("at least one loose odb").iter(),
iter: self.loose_dbs.first().iter(),
index,
}
}
Expand Down
Loading
Loading