Skip to content

Commit

Permalink
bugs ironed out, pre clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
benartuso committed Aug 18, 2023
1 parent 4611bb5 commit 739c453
Show file tree
Hide file tree
Showing 18 changed files with 285 additions and 233 deletions.
10 changes: 8 additions & 2 deletions src/lib/src/api/local/commits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,10 @@ pub fn commit_from_branch_or_commit_id<S: AsRef<str>>(
Ok(None)
}

pub fn list_with_missing_dbs(repo: &LocalRepository, commit_id: &str) -> Result<Vec<Commit>, OxenError> {
pub fn list_with_missing_dbs(
repo: &LocalRepository,
commit_id: &str,
) -> Result<Vec<Commit>, OxenError> {
let mut missing_db_commits: Vec<Commit> = vec![];

// Get full commit history for this repo to report any missing commits
Expand All @@ -130,7 +133,10 @@ pub fn list_with_missing_dbs(repo: &LocalRepository, commit_id: &str) -> Result<
Ok(missing_db_commits)
}

pub fn list_with_missing_entries(repo: &LocalRepository, commit_id: &str) -> Result<Vec<Commit>, OxenError> {
pub fn list_with_missing_entries(
repo: &LocalRepository,
commit_id: &str,
) -> Result<Vec<Commit>, OxenError> {
log::debug!("In here working on finding some commit entries");
let mut missing_entry_commits: Vec<Commit> = vec![];

Expand Down
4 changes: 1 addition & 3 deletions src/lib/src/api/local/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ pub fn list_diff_entries(
page: usize,
page_size: usize,
) -> Result<EntriesDiff, OxenError> {
// Okay, this part is working correctly.
// Okay, this part is working correctly.
log::debug!(
"list_diff_entries base_commit: '{}', head_commit: '{}'",
base_commit,
Expand Down Expand Up @@ -546,7 +546,6 @@ pub fn list_diff_entries(

log::debug!("Here are our counts...{:?}", counts);


let mut combined: Vec<_> = added_commit_entries
.into_iter()
.chain(removed_commit_entries)
Expand Down Expand Up @@ -586,7 +585,6 @@ pub fn list_diff_entries(

log::debug!("Final result all {:?}", all);


Ok(EntriesDiff {
entries: all,
counts,
Expand Down
46 changes: 44 additions & 2 deletions src/lib/src/api/local/entries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,31 @@ pub fn meta_entry_from_dir(
log::debug!("Reading commit here...{:?}", commit);
let latest_commit_path =
core::cache::cachers::repo_size::dir_latest_commit_path(repo, commit, path);
// Check if latest_commit_path exsists.

if latest_commit_path.exists() {
log::debug!("latest_commit_path does exist at commit {:?}", commit);
} else {
log::debug!("latest_commit_path does not exist at commit {:?}", commit);
}

let latest_commit = match util::fs::read_from_path(latest_commit_path) {
Ok(id) => commit_reader.get_commit_by_id(id)?,
Err(_) => {
Err(e) => {
// Log the error
log::debug!("Error reading latest commit from path: {:?}", e);
// cache failed, go compute it
log::debug!("going to compute the file, then.");
compute_latest_commit(repo, commit, path, commit_reader)?
}
};

log::debug!("Reading latest commit here...{:?}", latest_commit);

log::debug!("About to micro nap server");
// std::thread::sleep(std::time::Duration::from_millis(100));
log::debug!("finished micro nap server");

let total_size_path = core::cache::cachers::repo_size::dir_size_path(repo, commit, path);
let total_size = match util::fs::read_from_path(total_size_path) {
Ok(total_size_str) => total_size_str
Expand Down Expand Up @@ -106,6 +121,11 @@ fn compute_latest_commit(
let commits: HashMap<String, Commit> = HashMap::new();
let mut latest_commit = Some(commit.to_owned());
// This lists all the committed dirs
log::debug!(
"computing latest commit for path {:?} at commit {:?}",
path,
commit
);
let dirs = entry_reader.list_dirs()?;
for dir in dirs {
// Have to make sure we are in a subset of the dir (not really a tree structure)
Expand All @@ -119,11 +139,33 @@ fn compute_latest_commit(
};

if latest_commit.is_none() {
log::debug!("updating latest commit bc there wasn't one to {:?}", commit);
latest_commit = commit.clone();
}

if latest_commit.as_ref().unwrap().timestamp > commit.as_ref().unwrap().timestamp {
if latest_commit.as_ref().unwrap().timestamp < commit.as_ref().unwrap().timestamp {
log::debug!(
"updating latest commit bc it's newer to {:?} from {:?}",
commit,
latest_commit
);
log::debug!(
"timestamp {:?} > {:?}",
latest_commit.as_ref().unwrap().timestamp,
commit.as_ref().unwrap().timestamp
);
latest_commit = commit.clone();
} else {
log::debug!(
"skipping commit bc it's older than latest commit {:?} to {:?}",
commit,
latest_commit
);
log::debug!(
"timestamp {:?} < {:?}",
latest_commit.as_ref().unwrap().timestamp,
commit.as_ref().unwrap().timestamp
);
}
}
}
Expand Down
38 changes: 19 additions & 19 deletions src/lib/src/api/remote/commits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,10 @@ pub async fn commit_is_synced(
}

pub async fn latest_commit_synced(
remote_repo: &RemoteRepository,
remote_repo: &RemoteRepository,
commit_id: &str,
) -> Result<CommitSyncStatusResponse, OxenError> {
let uri = format!("/commits/latest_synced");
let uri = format!("/commits/{commit_id}/latest_synced");
let url = api::endpoint::url_from_repo(remote_repo, &uri)?;
log::debug!("latest_commit_synced checking URL: {}", url);

Expand All @@ -186,8 +187,9 @@ pub async fn latest_commit_synced(

let body = client::parse_json_body(&url, res).await?;
log::debug!("latest_commit_synced got response body: {}", body);
// Sync status response
let response: Result<CommitSyncStatusResponse, serde_json::Error> = serde_json::from_str(&body);
// Sync status response
let response: Result<CommitSyncStatusResponse, serde_json::Error> =
serde_json::from_str(&body);
match response {
Ok(result) => Ok(result),
Err(err) => {
Expand All @@ -198,7 +200,9 @@ pub async fn latest_commit_synced(
}
}
} else {
Err(OxenError::basic_str("latest_commit_synced() Request failed"))
Err(OxenError::basic_str(
"latest_commit_synced() Request failed",
))
}
}

Expand Down Expand Up @@ -379,17 +383,15 @@ pub async fn post_push_complete(

// Commits in oldest-to-newest-order
pub async fn bulk_post_push_complete(
remote_repo: &RemoteRepository,
branch: &Branch,
commits: &Vec<Commit>,) -> Result<(), OxenError> {
remote_repo: &RemoteRepository,
commits: &Vec<Commit>,
) -> Result<(), OxenError> {
use serde_json::json;

let uri = format!("/commits/complete");
let url = api::endpoint::url_from_repo(remote_repo, &uri)?;
log::debug!("bulk_post_push_complete: {}", url);
let body = serde_json::to_string(&json!(
commits
)).unwrap();
let body = serde_json::to_string(&json!(commits)).unwrap();

log::debug!("Sending this body... {:?}", body);

Expand All @@ -404,13 +406,15 @@ pub async fn bulk_post_push_complete(
))),
}
} else {
Err(OxenError::basic_str("bulk_post_push_complete() Request failed"))
Err(OxenError::basic_str(
"bulk_post_push_complete() Request failed",
))
}
}

pub async fn get_commits_with_unsynced_dbs(
remote_repo: &RemoteRepository,
branch: &Branch
branch: &Branch,
) -> Result<Vec<Commit>, OxenError> {
let revision = branch.commit_id.clone();

Expand All @@ -436,10 +440,10 @@ pub async fn get_commits_with_unsynced_dbs(

pub async fn get_commits_with_unsynced_entries(
remote_repo: &RemoteRepository,
branch: &Branch
branch: &Branch,
) -> Result<Vec<Commit>, OxenError> {
let commit_id = branch.commit_id.clone();

let uri = format!("/commits/{commit_id}/entries_status");
let url = api::endpoint::url_from_repo(remote_repo, &uri)?;

Expand Down Expand Up @@ -699,7 +703,6 @@ pub async fn post_data_to_server(
chunk_size,
is_compressed,
filename,
bar,
)
.await?;
} else {
Expand Down Expand Up @@ -788,7 +791,6 @@ async fn upload_data_to_server_in_chunks(
chunk_size: usize,
is_compressed: bool,
filename: &Option<String>,
bar: Arc<ProgressBar>,
) -> Result<(), OxenError> {
let total_size = buffer.len();
log::debug!(
Expand All @@ -810,8 +812,6 @@ async fn upload_data_to_server_in_chunks(
ByteSize::b(chunks.len() as u64)
);



let params = ChunkParams {
chunk_num: i,
total_chunks: chunks.len(),
Expand Down
8 changes: 8 additions & 0 deletions src/lib/src/api/remote/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ mod tests {

use std::path::Path;
use std::path::PathBuf;
use std::time::Instant;

use crate::api;
use crate::command;
Expand Down Expand Up @@ -891,6 +892,11 @@ who won the game?,The packers beat up on the bears,packers
// Push new branch real good
command::push_remote_branch(&repo, constants::DEFAULT_REMOTE_NAME, branch_name).await?;

log::debug!("Push is done, moving onto diff.");
// Log the current time
let now = Instant::now();
log::debug!("Push finished at {:?}", now);

let compare = api::remote::diff::list_diff_entries(
&remote_repo,
&og_branch.name,
Expand All @@ -900,6 +906,8 @@ who won the game?,The packers beat up on the bears,packers
)
.await?;

log::debug!("Diff is done.");

log::debug!("Here's our total compare situation: {:#?}", compare);

// Added 4 dogs, one dir
Expand Down
29 changes: 25 additions & 4 deletions src/lib/src/core/cache/cachers/repo_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
use fs_extra::dir::get_size;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::time::Instant;

use crate::api;
use crate::constants::{CACHE_DIR, DIRS_DIR, HISTORY_DIR};
Expand Down Expand Up @@ -69,20 +70,37 @@ pub fn compute(repo: &LocalRepository, commit: &Commit) -> Result<(), OxenError>

// TODO: do not copy pasta this code
for entry in entries {
log::debug!("looking for latest commit entry fro {:?} on commit {:?}", entry.path, entry.commit_id);
log::debug!(
"looking for latest commit entry fro {:?} on commit {:?}",
entry.path,
entry.commit_id
);
let commit = if commits.contains_key(&entry.commit_id) {
Some(commits[&entry.commit_id].clone())
} else {
commit_reader.get_commit_by_id(&entry.commit_id)?
};

if latest_commit.is_none() {
log::debug!("FOUND LATEST COMMIT PARENT EMPTY {:?} -> {:?}", entry.path, commit);
log::debug!(
"FOUND LATEST COMMIT PARENT EMPTY {:?} -> {:?}",
entry.path,
commit
);
latest_commit = commit.clone();
} else {
log::debug!("CONSIDERING COMMIT PARENT TIMESTAMP {:?} {:?} < {:?}", entry.path, latest_commit.as_ref().unwrap().timestamp, commit.as_ref().unwrap().timestamp);
log::debug!(
"CONSIDERING COMMIT PARENT TIMESTAMP {:?} {:?} < {:?}",
entry.path,
latest_commit.as_ref().unwrap().timestamp,
commit.as_ref().unwrap().timestamp
);
if latest_commit.as_ref().unwrap().timestamp < commit.as_ref().unwrap().timestamp {
log::debug!("FOUND LATEST COMMIT PARENT TIMESTAMP {:?} -> {:?}", entry.path, commit);
log::debug!(
"FOUND LATEST COMMIT PARENT TIMESTAMP {:?} -> {:?}",
entry.path,
commit
);
latest_commit = commit.clone();
}
}
Expand Down Expand Up @@ -138,6 +156,9 @@ pub fn compute(repo: &LocalRepository, commit: &Commit) -> Result<(), OxenError>
latest_commit.id,
latest_commit_path
);
// Write timestamp
let now = Instant::now();
log::debug!("Wrote latest commit at {:?}", now);
// create parent directory if not exists
if let Some(parent) = latest_commit_path.parent() {
util::fs::create_dir_all(parent)?;
Expand Down
11 changes: 10 additions & 1 deletion src/lib/src/core/cache/commit_cacher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,10 @@ fn get_db_connection(

/// Run all the cachers and update their status's as you go
pub fn run_all(repo: &LocalRepository, commit: &Commit, force: bool) -> Result<(), OxenError> {
// Write the LOCK file and delete when we are done processing
// Write the LOCK file and delete when we are done processin

// std::thread::sleep(std::time::Duration::from_millis(500));

let lock_path = cached_status_lock_path(repo, commit);
log::warn!("run_all called on commit {} force? {}", commit, force);

Expand Down Expand Up @@ -194,16 +197,22 @@ pub fn run_all(repo: &LocalRepository, commit: &Commit, force: bool) -> Result<(
}
Err(err) => {
let err = format!("{err}");
log::debug!("issue with running cachers?");
log::error!("{}", err);
let status_failed = CacherStatus::failed(&err);
str_json_db::put(&db, name, &status_failed)?;
}
}
// Right after each cacher?
}

// Right after the cachers...
// Sleep 500 ms

// Delete the LOCK file
log::debug!("run_all Deleting lock file {:?}", lock_path);
util::fs::remove_file(lock_path)?;
log::debug!("lock file deleted for commit {:?}", commit);

Ok(())
}
Loading

0 comments on commit 739c453

Please sign in to comment.