Skip to content

Commit

Permalink
feat(sqlite): start importing the gcroots data
Browse files Browse the repository at this point in the history
This is super crappy at the moment, but we want to ideally save most
of the data in the database instead of indirectly in the filesystem.
This gives us better leverage for storing other fun things, like which
files we last watched for a project, or how long changes to the files
took.
  • Loading branch information
Profpatsch committed Apr 19, 2024
1 parent dd33b50 commit 8d04978
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 20 deletions.
2 changes: 2 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ fn main() {
let logger = logging::root(verbosity);
debug!(logger, "input options"; "options" => ?opts);

lorri::sqlite::migrate_db(&logger).unwrap();

match run_command(&logger, opts) {
Err(err) => {
error!(logger, "{}", err.message());
Expand Down
14 changes: 7 additions & 7 deletions src/ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
mod direnv;
pub mod error;

use crate::sqlite;
use crate::build_loop::BuildLoop;
use crate::build_loop::Event;
use crate::build_loop::Reason;
Expand All @@ -26,6 +25,7 @@ use crate::path_to_json_string;
use crate::project::{NixGcRootUserDir, Project};
use crate::run_async::Async;
use crate::socket::path::SocketPath;
use crate::sqlite;
use crate::AbsPathBuf;
use crate::NixFile;
use crate::VERSION_BUILD_REV;
Expand Down Expand Up @@ -962,15 +962,15 @@ fn main_run_once(
}

/// Represents a gc root along with some metadata, used for json output of lorri gc info
struct GcRootInfo {
pub struct GcRootInfo {
/// directory where root is stored
gc_dir: AbsPathBuf,
pub gc_dir: AbsPathBuf,
/// nix file from which the root originates. If None, then the root is considered dead.
nix_file: Option<PathBuf>,
pub nix_file: Option<PathBuf>,
/// timestamp of the last build
timestamp: SystemTime,
pub timestamp: SystemTime,
/// whether `nix_file` still exists
alive: bool,
pub alive: bool,
}

impl GcRootInfo {
Expand Down Expand Up @@ -998,7 +998,7 @@ impl GcRootInfo {
}

/// Returns a list of existing gc roots along with some metadata
fn list_roots(logger: &slog::Logger) -> Result<Vec<GcRootInfo>, ExitError> {
pub fn list_roots(logger: &slog::Logger) -> Result<Vec<GcRootInfo>, ExitError> {
let paths = crate::ops::get_paths()?;
let mut res = Vec::new();
let gc_root_dir = paths.gc_root_dir();
Expand Down
91 changes: 78 additions & 13 deletions src/sqlite.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,87 @@
use std::{
ffi::OsString,
os::unix::ffi::{OsStrExt, OsStringExt},
path::PathBuf,
time::{Duration, SystemTime},
};

use rusqlite as sqlite;
use sqlite::Connection;
use slog::info;
use sqlite::{named_params, Connection};

use crate::ops::{self, error::ExitError};

#[test]
fn migrate_db() {
pub fn migrate_db(logger: &slog::Logger) -> Result<(), ExitError> {
let conn = sqlite::Connection::open_in_memory().expect("cannot open sqlite db");
conn.execute_batch(
r#"CREATE TABLE foo(id INTEGER PRIMARY KEY, bla TEXT);
INSERT INTO foo (id, bla) VALUES (1, 'blabla');
INSERT INTO foo (id, bla) VALUES (2, 'bubatz');
"#,
r#"CREATE TABLE gc_roots(
id INTEGER PRIMARY KEY,
nix_file PATH,
last_updated EPOCH_TIME
);
"#,
)
.unwrap();
let mut stmt = conn.prepare("SELECT * from foo;").unwrap();
let res = stmt
.query_map((), |row| row.get::<_, String>("bla"))

let infos = ops::list_roots(logger)?;

let mut stmt = conn
.prepare("INSERT INTO gc_roots (nix_file, last_updated) VALUES (:nix_file, :last_updated);")
.unwrap();
for info in infos {
let ts = info
.timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.expect("expect file timestamp to be a unix timestamp")
.as_secs();
stmt.execute(named_params! {
":nix_file": info.nix_file.map(|pb| pb.as_os_str().as_bytes().to_owned()),
":last_updated": ts
})
.expect("cannot insert");
}

let mut stmt = conn
.prepare("SELECT nix_file, last_updated from gc_roots")
.unwrap();
let mut res = stmt
.query_map((), |row| {
let nix_file = row
.get::<_, Option<Vec<u8>>>("nix_file")
.unwrap()
.map(|v: Vec<u8>| OsString::from_vec(v));
let t = row.get::<_, Option<u64>>("last_updated").unwrap().map(|u| {
SystemTime::elapsed(&(SystemTime::UNIX_EPOCH + Duration::from_secs(u))).unwrap()
});
Ok((nix_file, t, t.map(ago)))
})
.unwrap()
.map(|x| x.unwrap())
.collect::<Vec<String>>();
.filter_map(|r| match r {
Err(_) => None,
Ok(r) => r.0.map(|nix| (nix, r.1, r.2)),
})
.collect::<Vec<_>>();
res.sort_by_key(|r| r.1);
info!(logger, "We have these nix files: {:#?}", res);

Ok(())
}

fn ago(dur: Duration) -> String {
let secs = dur.as_secs();
let mins = dur.as_secs() / 60;
let hours = dur.as_secs() / (60 * 60);
let days = dur.as_secs() / (60 * 60 * 24);

if days > 0 {
return format!("{} days ago", days);
}
if hours > 0 {
return format!("{} hours ago", hours);
}
if mins > 0 {
return format!("{} minutes ago", mins);
}

assert_eq!(res, vec!["blabla", "bubatz"]);
format!("{} seconds ago", secs)
}

0 comments on commit 8d04978

Please sign in to comment.