mirror of
				https://github.com/RGBCube/dix
				synced 2025-10-31 00:42:44 +00:00 
			
		
		
		
	feat: refactor store.rs
This commit is contained in:
		
							parent
							
								
									531fa0278f
								
							
						
					
					
						commit
						db09147da6
					
				
					 14 changed files with 1168 additions and 839 deletions
				
			
		
							
								
								
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -1,9 +1,2 @@ | ||||||
| /.direnv | /.direnv | ||||||
| /target | /target | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # Added by cargo |  | ||||||
| # |  | ||||||
| # already existing elements were commented out |  | ||||||
| 
 |  | ||||||
| #/target |  | ||||||
|  |  | ||||||
							
								
								
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,30 @@ | ||||||
|  | # Taken from https://github.com/cull-os/carcass. | ||||||
|  | # Modified to have 2 space indents and 80 line width. | ||||||
|  | 
 | ||||||
|  | # float_literal_trailing_zero  = "Always" # TODO: Warning for some reason? | ||||||
|  | condense_wildcard_suffixes   = true | ||||||
|  | doc_comment_code_block_width = 80 | ||||||
|  | edition                      = "2024"             # Keep in sync with Cargo.toml. | ||||||
|  | enum_discrim_align_threshold = 60 | ||||||
|  | force_explicit_abi           = false | ||||||
|  | force_multiline_blocks       = true | ||||||
|  | format_code_in_doc_comments  = true | ||||||
|  | format_macro_matchers        = true | ||||||
|  | format_strings               = true | ||||||
|  | group_imports                = "StdExternalCrate" | ||||||
|  | hex_literal_case             = "Upper" | ||||||
|  | imports_granularity          = "Crate" | ||||||
|  | imports_layout               = "Vertical" | ||||||
|  | inline_attribute_width       = 60 | ||||||
|  | match_block_trailing_comma   = true | ||||||
|  | max_width                    = 80 | ||||||
|  | newline_style                = "Unix" | ||||||
|  | normalize_comments           = true | ||||||
|  | normalize_doc_attributes     = true | ||||||
|  | overflow_delimited_expr      = true | ||||||
|  | struct_field_align_threshold = 60 | ||||||
|  | tab_spaces                   = 2 | ||||||
|  | unstable_features            = true | ||||||
|  | use_field_init_shorthand     = true | ||||||
|  | use_try_shorthand            = true | ||||||
|  | wrap_comments                = true | ||||||
							
								
								
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,15 @@ | ||||||
|  | # Taken from https://github.com/cull-os/carcass. | ||||||
|  | 
 | ||||||
|  | [formatting] | ||||||
|  | align_entries         = true | ||||||
|  | column_width          = 100 | ||||||
|  | compact_arrays        = false | ||||||
|  | reorder_inline_tables = true | ||||||
|  | reorder_keys          = true | ||||||
|  | 
 | ||||||
|  | [[rule]] | ||||||
|  | include = [ "**/Cargo.toml" ] | ||||||
|  | keys    = [ "package" ] | ||||||
|  | 
 | ||||||
|  | [rule.formatting] | ||||||
|  | reorder_keys = false | ||||||
							
								
								
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							
							
						
						
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							|  | @ -61,6 +61,12 @@ dependencies = [ | ||||||
|  "windows-sys", |  "windows-sys", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "anyhow" | ||||||
|  | version = "1.0.98" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "atty" | name = "atty" | ||||||
| version = "0.2.14" | version = "0.2.14" | ||||||
|  | @ -174,6 +180,15 @@ version = "1.0.3" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" | checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "convert_case" | ||||||
|  | version = "0.7.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" | ||||||
|  | dependencies = [ | ||||||
|  |  "unicode-segmentation", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "criterion" | name = "criterion" | ||||||
| version = "0.3.6" | version = "0.3.6" | ||||||
|  | @ -256,6 +271,28 @@ dependencies = [ | ||||||
|  "memchr", |  "memchr", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "derive_more" | ||||||
|  | version = "2.0.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" | ||||||
|  | dependencies = [ | ||||||
|  |  "derive_more-impl", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "derive_more-impl" | ||||||
|  | version = "2.0.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" | ||||||
|  | dependencies = [ | ||||||
|  |  "convert_case", | ||||||
|  |  "proc-macro2", | ||||||
|  |  "quote", | ||||||
|  |  "syn", | ||||||
|  |  "unicode-xid", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "diff" | name = "diff" | ||||||
| version = "0.1.13" | version = "0.1.13" | ||||||
|  | @ -266,14 +303,18 @@ checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" | ||||||
| name = "dix" | name = "dix" | ||||||
| version = "0.1.0" | version = "0.1.0" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  |  "anyhow", | ||||||
|  "clap 4.5.37", |  "clap 4.5.37", | ||||||
|  "criterion", |  "criterion", | ||||||
|  |  "derive_more", | ||||||
|  "diff", |  "diff", | ||||||
|  "env_logger", |  "env_logger", | ||||||
|  "libc", |  "libc", | ||||||
|  "log", |  "log", | ||||||
|  |  "ref-cast", | ||||||
|  "regex", |  "regex", | ||||||
|  "rusqlite", |  "rusqlite", | ||||||
|  |  "rustc-hash", | ||||||
|  "thiserror", |  "thiserror", | ||||||
|  "yansi", |  "yansi", | ||||||
| ] | ] | ||||||
|  | @ -562,6 +603,26 @@ dependencies = [ | ||||||
|  "crossbeam-utils", |  "crossbeam-utils", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "ref-cast" | ||||||
|  | version = "1.0.24" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" | ||||||
|  | dependencies = [ | ||||||
|  |  "ref-cast-impl", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "ref-cast-impl" | ||||||
|  | version = "1.0.24" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" | ||||||
|  | dependencies = [ | ||||||
|  |  "proc-macro2", | ||||||
|  |  "quote", | ||||||
|  |  "syn", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "regex" | name = "regex" | ||||||
| version = "1.11.1" | version = "1.11.1" | ||||||
|  | @ -605,6 +666,12 @@ dependencies = [ | ||||||
|  "smallvec", |  "smallvec", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "rustc-hash" | ||||||
|  | version = "2.1.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "rustversion" | name = "rustversion" | ||||||
| version = "1.0.20" | version = "1.0.20" | ||||||
|  | @ -742,12 +809,24 @@ version = "1.0.18" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" | checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "unicode-segmentation" | ||||||
|  | version = "1.12.0" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "unicode-width" | name = "unicode-width" | ||||||
| version = "0.1.14" | version = "0.1.14" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" | checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "unicode-xid" | ||||||
|  | version = "0.2.6" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "utf8parse" | name = "utf8parse" | ||||||
| version = "0.2.2" | version = "0.2.2" | ||||||
|  |  | ||||||
							
								
								
									
										118
									
								
								Cargo.toml
									
										
									
									
									
								
							
							
						
						
									
										118
									
								
								Cargo.toml
									
										
									
									
									
								
							|  | @ -3,37 +3,117 @@ name = "dix" | ||||||
| version = "0.1.0" | version = "0.1.0" | ||||||
| edition = "2024" | edition = "2024" | ||||||
| 
 | 
 | ||||||
| [[bin]] |  | ||||||
| name = "dix" |  | ||||||
| path = "src/main.rs" |  | ||||||
| 
 |  | ||||||
| [lib] |  | ||||||
| name = "dixlib" |  | ||||||
| path = "src/lib.rs" |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| [dependencies] | [dependencies] | ||||||
| clap = { version = "4.5.37", features = ["derive"] } | anyhow     = "1.0.98" | ||||||
| regex = "1.11.1" | clap       = { version = "4.5.37", features = [ "derive" ] } | ||||||
| yansi = "1.0.1" | derive_more = { version = "2.0.1", features = ["full"] } | ||||||
| thiserror = "2.0.12" |  | ||||||
| log = "0.4.20" |  | ||||||
| env_logger = "0.11.3" |  | ||||||
| rusqlite = { version = "0.35.0", features = ["bundled"] } |  | ||||||
| diff       = "0.1.13" | diff       = "0.1.13" | ||||||
|  | env_logger = "0.11.3" | ||||||
|  | log        = "0.4.20" | ||||||
|  | ref-cast = "1.0.24" | ||||||
|  | regex      = "1.11.1" | ||||||
|  | rusqlite   = { version = "0.35.0", features = [ "bundled" ] } | ||||||
|  | rustc-hash = "2.1.1" | ||||||
|  | thiserror  = "2.0.12" | ||||||
|  | yansi      = "1.0.1" | ||||||
| 
 | 
 | ||||||
| [dev-dependencies] | [dev-dependencies] | ||||||
| criterion = "0.3" | criterion = "0.3" | ||||||
| libc      = "0.2" | libc      = "0.2" | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
|  | harness = false | ||||||
| name    = "store" | name    = "store" | ||||||
| harness=false |  | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
|  | harness = false | ||||||
| name    = "print" | name    = "print" | ||||||
| harness=false |  | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
|  | harness = false | ||||||
| name    = "util" | name    = "util" | ||||||
| harness=false | 
 | ||||||
|  | [lints.clippy] | ||||||
|  | pedantic = { level = "warn", priority = -1 } | ||||||
|  | 
 | ||||||
|  | blanket_clippy_restriction_lints = "allow" | ||||||
|  | restriction                      = { level = "warn", priority = -1 } | ||||||
|  | 
 | ||||||
|  | alloc_instead_of_core             = "allow" | ||||||
|  | allow_attributes_without_reason   = "allow" | ||||||
|  | arbitrary_source_item_ordering    = "allow" | ||||||
|  | arithmetic_side_effects           = "allow" | ||||||
|  | as_conversions                    = "allow" | ||||||
|  | as_pointer_underscore             = "allow" | ||||||
|  | as_underscore                     = "allow" | ||||||
|  | big_endian_bytes                  = "allow" | ||||||
|  | clone_on_ref_ptr                  = "allow" | ||||||
|  | dbg_macro                         = "allow" | ||||||
|  | disallowed_script_idents          = "allow" | ||||||
|  | else_if_without_else              = "allow" | ||||||
|  | error_impl_error                  = "allow" | ||||||
|  | exhaustive_enums                  = "allow" | ||||||
|  | exhaustive_structs                = "allow" | ||||||
|  | expect_used                       = "allow" | ||||||
|  | field_scoped_visibility_modifiers = "allow" | ||||||
|  | float_arithmetic                  = "allow" | ||||||
|  | host_endian_bytes                 = "allow" | ||||||
|  | impl_trait_in_params              = "allow" | ||||||
|  | implicit_return                   = "allow" | ||||||
|  | indexing_slicing                  = "allow" | ||||||
|  | inline_asm_x86_intel_syntax       = "allow" | ||||||
|  | integer_division                  = "allow" | ||||||
|  | integer_division_remainder_used   = "allow" | ||||||
|  | large_include_file                = "allow" | ||||||
|  | let_underscore_must_use           = "allow" | ||||||
|  | let_underscore_untyped            = "allow" | ||||||
|  | little_endian_bytes               = "allow" | ||||||
|  | map_err_ignore                    = "allow" | ||||||
|  | match_same_arms                   = "allow" | ||||||
|  | missing_assert_message            = "allow" | ||||||
|  | missing_docs_in_private_items     = "allow" | ||||||
|  | missing_errors_doc                = "allow" | ||||||
|  | missing_inline_in_public_items    = "allow" | ||||||
|  | missing_panics_doc                = "allow" | ||||||
|  | missing_trait_methods             = "allow" | ||||||
|  | mod_module_files                  = "allow" | ||||||
|  | multiple_inherent_impl            = "allow" | ||||||
|  | mutex_atomic                      = "allow" | ||||||
|  | mutex_integer                     = "allow" | ||||||
|  | new_without_default               = "allow" | ||||||
|  | non_ascii_literal                 = "allow" | ||||||
|  | panic                             = "allow" | ||||||
|  | panic_in_result_fn                = "allow" | ||||||
|  | partial_pub_fields                = "allow" | ||||||
|  | print_stderr                      = "allow" | ||||||
|  | print_stdout                      = "allow" | ||||||
|  | pub_use                           = "allow" | ||||||
|  | pub_with_shorthand                = "allow" | ||||||
|  | pub_without_shorthand             = "allow" | ||||||
|  | question_mark_used                = "allow" | ||||||
|  | ref_patterns                      = "allow" | ||||||
|  | renamed_function_params           = "allow" | ||||||
|  | same_name_method                  = "allow" | ||||||
|  | semicolon_outside_block           = "allow" | ||||||
|  | separated_literal_suffix          = "allow" | ||||||
|  | shadow_reuse                      = "allow" | ||||||
|  | shadow_same                       = "allow" | ||||||
|  | shadow_unrelated                  = "allow" | ||||||
|  | single_call_fn                    = "allow" | ||||||
|  | single_char_lifetime_names        = "allow" | ||||||
|  | single_match_else                 = "allow" | ||||||
|  | std_instead_of_alloc              = "allow" | ||||||
|  | std_instead_of_core               = "allow" | ||||||
|  | string_add                        = "allow" | ||||||
|  | string_slice                      = "allow" | ||||||
|  | todo                              = "allow" | ||||||
|  | too_many_lines                    = "allow" | ||||||
|  | try_err                           = "allow" | ||||||
|  | unimplemented                     = "allow" | ||||||
|  | unnecessary_safety_comment        = "allow" | ||||||
|  | unnecessary_safety_doc            = "allow" | ||||||
|  | unreachable                       = "allow" | ||||||
|  | unwrap_in_result                  = "allow" | ||||||
|  | unwrap_used                       = "allow" | ||||||
|  | use_debug                         = "allow" | ||||||
|  | wildcard_enum_match_arm           = "allow" | ||||||
|  |  | ||||||
|  | @ -1,11 +1,14 @@ | ||||||
| use std::{ | use std::{ | ||||||
|   env, |   env, | ||||||
|     fs::{self, DirEntry}, |   fs, | ||||||
|   path::PathBuf, |   path::PathBuf, | ||||||
|   sync::OnceLock, |   sync::OnceLock, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| use dixlib::{store, util::PackageDiff}; | use dix::{ | ||||||
|  |   store, | ||||||
|  |   util::PackageDiff, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| /// tries to get the path of the oldest nixos system derivation
 | /// tries to get the path of the oldest nixos system derivation
 | ||||||
| /// this function is pretty hacky and only used so that
 | /// this function is pretty hacky and only used so that
 | ||||||
|  | @ -25,8 +28,8 @@ fn get_oldest_nixos_system() -> Option<PathBuf> { | ||||||
|     // extract all digits from the file name and use that as key
 |     // extract all digits from the file name and use that as key
 | ||||||
|     let p = path.as_os_str().to_str().unwrap_or_default(); |     let p = path.as_os_str().to_str().unwrap_or_default(); | ||||||
|     let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); |     let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); | ||||||
|         // if we are not able to produce a key (e.g. because the path does not contain digits)
 |     // if we are not able to produce a key (e.g. because the path does not
 | ||||||
|         // we put it last
 |     // contain digits) we put it last
 | ||||||
|     digits.parse::<u32>().unwrap_or(u32::MAX) |     digits.parse::<u32>().unwrap_or(u32::MAX) | ||||||
|   }) |   }) | ||||||
| } | } | ||||||
|  | @ -55,13 +58,15 @@ pub fn get_deriv_query_old() -> &'static PathBuf { | ||||||
| 
 | 
 | ||||||
| pub fn get_packages() -> &'static (Vec<String>, Vec<String>) { | pub fn get_packages() -> &'static (Vec<String>, Vec<String>) { | ||||||
|   static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); |   static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); | ||||||
|     _PKGS.get_or_init(|| { |   _PKGS.get_or_init(|| {query_depdendents | ||||||
|         let pkgs_before = store::get_packages(std::path::Path::new(get_deriv_query_old())) |     let pkgs_before = | ||||||
|  |       store::query_packages(std::path::Path::new(get_deriv_query_old())) | ||||||
|         .unwrap() |         .unwrap() | ||||||
|         .into_iter() |         .into_iter() | ||||||
|             .map(|(_, name)| name) |         .map(|(_, name)| name)query_depdendents | ||||||
|         .collect::<Vec<String>>(); |         .collect::<Vec<String>>(); | ||||||
|         let pkgs_after = store::get_packages(std::path::Path::new(get_deriv_query())) |     let pkgs_after = | ||||||
|  |       store::query_packages(std::path::Path::new(get_deriv_query())) | ||||||
|         .unwrap() |         .unwrap() | ||||||
|         .into_iter() |         .into_iter() | ||||||
|         .map(|(_, name)| name) |         .map(|(_, name)| name) | ||||||
|  |  | ||||||
|  | @ -1,10 +1,21 @@ | ||||||
| mod common; | mod common; | ||||||
| 
 | 
 | ||||||
| use std::{fs::File, os::fd::AsRawFd}; | use std::{ | ||||||
|  |   fs::File, | ||||||
|  |   os::fd::AsRawFd, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| use common::{get_pkg_diff, print_used_nixos_systems}; | use common::{ | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; |   get_pkg_diff, | ||||||
| use dixlib::print; |   print_used_nixos_systems, | ||||||
|  | }; | ||||||
|  | use criterion::{ | ||||||
|  |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::print; | ||||||
| 
 | 
 | ||||||
| /// reroutes stdout and stderr to the null device before
 | /// reroutes stdout and stderr to the null device before
 | ||||||
| /// executing `f`
 | /// executing `f`
 | ||||||
|  |  | ||||||
|  | @ -1,6 +1,11 @@ | ||||||
| mod common; | mod common; | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | use criterion::{ | ||||||
| use dixlib::store; |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::store; | ||||||
| 
 | 
 | ||||||
| // basic benchmarks using the current system
 | // basic benchmarks using the current system
 | ||||||
| //
 | //
 | ||||||
|  | @ -13,17 +18,19 @@ use dixlib::store; | ||||||
| 
 | 
 | ||||||
| pub fn bench_get_packages(c: &mut Criterion) { | pub fn bench_get_packages(c: &mut Criterion) { | ||||||
|   c.bench_function("get_packages", |b| { |   c.bench_function("get_packages", |b| { | ||||||
|         b.iter(|| store::get_packages(black_box(common::get_deriv_query()))); |     b.iter(|| store::query_depdendents(black_box(common::get_deriv_query()))); | ||||||
|   }); |   }); | ||||||
| } | } | ||||||
| pub fn bench_get_closure_size(c: &mut Criterion) { | pub fn bench_get_closure_size(c: &mut Criterion) { | ||||||
|   c.bench_function("get_closure_size", |b| { |   c.bench_function("get_closure_size", |b| { | ||||||
|         b.iter(|| store::get_closure_size(black_box(common::get_deriv_query()))); |     b.iter(|| store::gequery_closure_sizelack_box(common::get_deriv_query()))); | ||||||
|   }); |   }); | ||||||
| } | } | ||||||
| pub fn bench_get_dependency_graph(c: &mut Criterion) { | pub fn bench_get_dependency_graph(c: &mut Criterion) { | ||||||
|   c.bench_function("get_dependency_graph", |b| { |   c.bench_function("get_dependency_graph", |b| { | ||||||
|         b.iter(|| store::get_dependency_graph(black_box(common::get_deriv_query()))); |     b.iter(|| { | ||||||
|  |       store::query_dependency_graph(black_box(common::get_deriv_query())) | ||||||
|  |     }); | ||||||
|   }); |   }); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,8 +1,13 @@ | ||||||
| mod common; | mod common; | ||||||
| 
 | 
 | ||||||
| use common::get_packages; | use common::get_packages; | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | use criterion::{ | ||||||
| use dixlib::util::PackageDiff; |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::util::PackageDiff; | ||||||
| 
 | 
 | ||||||
| pub fn bench_package_diff(c: &mut Criterion) { | pub fn bench_package_diff(c: &mut Criterion) { | ||||||
|   let (pkgs_before, pkgs_after) = get_packages(); |   let (pkgs_before, pkgs_after) = get_packages(); | ||||||
|  |  | ||||||
							
								
								
									
										16
									
								
								src/error.rs
									
										
									
									
									
								
							
							
						
						
									
										16
									
								
								src/error.rs
									
										
									
									
									
								
							|  | @ -76,7 +76,11 @@ impl From<regex::Error> for AppError { | ||||||
| 
 | 
 | ||||||
| impl AppError { | impl AppError { | ||||||
|   /// Create a command failure error with context
 |   /// Create a command failure error with context
 | ||||||
|     pub fn command_failed<S: Into<String>>(command: S, args: &[&str], message: S) -> Self { |   pub fn command_failed<S: Into<String>>( | ||||||
|  |     command: S, | ||||||
|  |     args: &[&str], | ||||||
|  |     message: S, | ||||||
|  |   ) -> Self { | ||||||
|     Self::CommandFailed { |     Self::CommandFailed { | ||||||
|       command: command.into(), |       command: command.into(), | ||||||
|       args:    args.iter().map(|&s| s.to_string()).collect(), |       args:    args.iter().map(|&s| s.to_string()).collect(), | ||||||
|  | @ -106,7 +110,10 @@ impl AppError { | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   /// Create a regex error with context
 |   /// Create a regex error with context
 | ||||||
|     pub fn regex_error<C: Into<String>>(source: regex::Error, context: C) -> Self { |   pub fn regex_error<C: Into<String>>( | ||||||
|  |     source: regex::Error, | ||||||
|  |     context: C, | ||||||
|  |   ) -> Self { | ||||||
|     Self::RegexError { |     Self::RegexError { | ||||||
|       source, |       source, | ||||||
|       context: context.into(), |       context: context.into(), | ||||||
|  | @ -114,7 +121,10 @@ impl AppError { | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   /// Create a command output error with context
 |   /// Create a command output error with context
 | ||||||
|     pub fn command_output_error<C: Into<String>>(source: std::str::Utf8Error, context: C) -> Self { |   pub fn command_output_error<C: Into<String>>( | ||||||
|  |     source: std::str::Utf8Error, | ||||||
|  |     context: C, | ||||||
|  |   ) -> Self { | ||||||
|     Self::CommandOutputError { |     Self::CommandOutputError { | ||||||
|       source, |       source, | ||||||
|       context: context.into(), |       context: context.into(), | ||||||
|  |  | ||||||
							
								
								
									
										53
									
								
								src/main.rs
									
										
									
									
									
								
							
							
						
						
									
										53
									
								
								src/main.rs
									
										
									
									
									
								
							|  | @ -1,13 +1,19 @@ | ||||||
| use clap::Parser; |  | ||||||
| use core::str; | use core::str; | ||||||
| use dixlib::print; |  | ||||||
| use dixlib::store; |  | ||||||
| use dixlib::util::PackageDiff; |  | ||||||
| use log::{debug, error}; |  | ||||||
| use std::{ | use std::{ | ||||||
|     collections::{HashMap, HashSet}, |   collections::HashSet, | ||||||
|   thread, |   thread, | ||||||
| }; | }; | ||||||
|  | 
 | ||||||
|  | use clap::Parser; | ||||||
|  | use dixlib::{ | ||||||
|  |   print, | ||||||
|  |   store, | ||||||
|  |   util::PackageDiff, | ||||||
|  | }; | ||||||
|  | use log::{ | ||||||
|  |   debug, | ||||||
|  |   error, | ||||||
|  | }; | ||||||
| use yansi::Paint; | use yansi::Paint; | ||||||
| 
 | 
 | ||||||
| #[derive(Parser, Debug)] | #[derive(Parser, Debug)] | ||||||
|  | @ -66,7 +72,8 @@ fn main() { | ||||||
| 
 | 
 | ||||||
|   // Configure logger based on verbosity flags and environment variables
 |   // Configure logger based on verbosity flags and environment variables
 | ||||||
|   // Respects RUST_LOG environment variable if present.
 |   // Respects RUST_LOG environment variable if present.
 | ||||||
|     // XXX:We can also dedicate a specific env variable for this tool, if we want to.
 |   // XXX:We can also dedicate a specific env variable for this tool, if we want
 | ||||||
|  |   // to.
 | ||||||
|   let env = env_logger::Env::default().filter_or( |   let env = env_logger::Env::default().filter_or( | ||||||
|     "RUST_LOG", |     "RUST_LOG", | ||||||
|     if args.quiet { |     if args.quiet { | ||||||
|  | @ -100,11 +107,11 @@ fn main() { | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   // Get package lists and handle potential errors
 |   // Get package lists and handle potential errors
 | ||||||
|     let package_list_pre = match store::get_packages(&args.path) { |   let package_list_pre = match store::query_packages(&args.path) { | ||||||
|     Ok(packages) => { |     Ok(packages) => { | ||||||
|       debug!("Found {} packages in first closure", packages.len()); |       debug!("Found {} packages in first closure", packages.len()); | ||||||
|       packages.into_iter().map(|(_, path)| path).collect() |       packages.into_iter().map(|(_, path)| path).collect() | ||||||
|         } |     }, | ||||||
|     Err(e) => { |     Err(e) => { | ||||||
|       error!( |       error!( | ||||||
|         "Error getting packages from path {}: {}", |         "Error getting packages from path {}: {}", | ||||||
|  | @ -117,14 +124,14 @@ fn main() { | ||||||
|         e |         e | ||||||
|       ); |       ); | ||||||
|       Vec::new() |       Vec::new() | ||||||
|         } |     }, | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|     let package_list_post = match store::get_packages(&args.path2) { |   let package_list_post = match store::query_packages(&args.path2) { | ||||||
|     Ok(packages) => { |     Ok(packages) => { | ||||||
|       debug!("Found {} packages in second closure", packages.len()); |       debug!("Found {} packages in second closure", packages.len()); | ||||||
|       packages.into_iter().map(|(_, path)| path).collect() |       packages.into_iter().map(|(_, path)| path).collect() | ||||||
|         } |     }, | ||||||
|     Err(e) => { |     Err(e) => { | ||||||
|       error!( |       error!( | ||||||
|         "Error getting packages from path {}: {}", |         "Error getting packages from path {}: {}", | ||||||
|  | @ -137,7 +144,7 @@ fn main() { | ||||||
|         e |         e | ||||||
|       ); |       ); | ||||||
|       Vec::new() |       Vec::new() | ||||||
|         } |     }, | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   let PackageDiff { |   let PackageDiff { | ||||||
|  | @ -156,10 +163,12 @@ fn main() { | ||||||
|     "Changed packages: {}", |     "Changed packages: {}", | ||||||
|     changed |     changed | ||||||
|       .iter() |       .iter() | ||||||
|             .filter(|p| !p.is_empty() |       .filter(|p| { | ||||||
|  |         !p.is_empty() | ||||||
|           && match (pre.get(*p), post.get(*p)) { |           && match (pre.get(*p), post.get(*p)) { | ||||||
|             (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, |             (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, | ||||||
|             _ => false, |             _ => false, | ||||||
|  |           } | ||||||
|       }) |       }) | ||||||
|       .count() |       .count() | ||||||
|   ); |   ); | ||||||
|  | @ -167,11 +176,11 @@ fn main() { | ||||||
|   println!("Difference between the two generations:"); |   println!("Difference between the two generations:"); | ||||||
|   println!(); |   println!(); | ||||||
| 
 | 
 | ||||||
|     let width_changes = changed |   let width_changes = changed.iter().filter(|&&p| { | ||||||
|         .iter() |     match (pre.get(p), post.get(p)) { | ||||||
|         .filter(|&&p| match (pre.get(p), post.get(p)) { |  | ||||||
|       (Some(version_pre), Some(version_post)) => version_pre != version_post, |       (Some(version_pre), Some(version_post)) => version_pre != version_post, | ||||||
|       _ => false, |       _ => false, | ||||||
|  |     } | ||||||
|   }); |   }); | ||||||
| 
 | 
 | ||||||
|   let col_width = added |   let col_width = added | ||||||
|  | @ -200,15 +209,17 @@ fn main() { | ||||||
|         println!("Before: {pre_size} MiB"); |         println!("Before: {pre_size} MiB"); | ||||||
|         println!("After: {post_size} MiB"); |         println!("After: {post_size} MiB"); | ||||||
|         println!("Difference: {} MiB", post_size - pre_size); |         println!("Difference: {} MiB", post_size - pre_size); | ||||||
|             } |       }, | ||||||
|       (Ok(Err(e)), _) | (_, Ok(Err(e))) => { |       (Ok(Err(e)), _) | (_, Ok(Err(e))) => { | ||||||
|         error!("Error getting closure size: {e}"); |         error!("Error getting closure size: {e}"); | ||||||
|         eprintln!("Error getting closure size: {e}"); |         eprintln!("Error getting closure size: {e}"); | ||||||
|             } |       }, | ||||||
|       _ => { |       _ => { | ||||||
|         error!("Failed to get closure size information due to a thread error"); |         error!("Failed to get closure size information due to a thread error"); | ||||||
|                 eprintln!("Error: Failed to get closure size information due to a thread error"); |         eprintln!( | ||||||
|             } |           "Error: Failed to get closure size information due to a thread error" | ||||||
|  |         ); | ||||||
|  |       }, | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										52
									
								
								src/print.rs
									
										
									
									
									
								
							
							
						
						
									
										52
									
								
								src/print.rs
									
										
									
									
									
								
							|  | @ -1,10 +1,14 @@ | ||||||
| use core::str; | use core::str; | ||||||
| use regex::Regex; |  | ||||||
| use std::{ | use std::{ | ||||||
|     collections::{HashMap, HashSet}, |   collections::{ | ||||||
|  |     HashMap, | ||||||
|  |     HashSet, | ||||||
|  |   }, | ||||||
|   string::ToString, |   string::ToString, | ||||||
|   sync::OnceLock, |   sync::OnceLock, | ||||||
| }; | }; | ||||||
|  | 
 | ||||||
|  | use regex::Regex; | ||||||
| use yansi::Paint; | use yansi::Paint; | ||||||
| 
 | 
 | ||||||
| /// diffs two strings character by character, and returns a tuple of strings
 | /// diffs two strings character by character, and returns a tuple of strings
 | ||||||
|  | @ -12,13 +16,14 @@ use yansi::Paint; | ||||||
| ///
 | ///
 | ||||||
| /// # Returns:
 | /// # Returns:
 | ||||||
| ///
 | ///
 | ||||||
| /// * (String, String) - The differing chars being red in the left, and green in the right one.
 | /// * (String, String) - The differing chars being red in the left, and green in
 | ||||||
|  | ///   the right one.
 | ||||||
| fn diff_versions(left: &str, right: &str) -> (String, String) { | fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||||
|   let mut prev = "\x1b[33m".to_string(); |   let mut prev = "\x1b[33m".to_string(); | ||||||
|   let mut post = "\x1b[33m".to_string(); |   let mut post = "\x1b[33m".to_string(); | ||||||
| 
 | 
 | ||||||
|     // We only have to filter the left once, since we stop if the left one is empty.
 |   // We only have to filter the left once, since we stop if the left one is
 | ||||||
|     // We do this to display things like -man, -dev properly.
 |   // empty. We do this to display things like -man, -dev properly.
 | ||||||
|   let matches = name_regex().captures(left); |   let matches = name_regex().captures(left); | ||||||
|   let mut suffix = String::new(); |   let mut suffix = String::new(); | ||||||
| 
 | 
 | ||||||
|  | @ -36,16 +41,16 @@ fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||||
|         let string_to_push = format!("{l}"); |         let string_to_push = format!("{l}"); | ||||||
|         prev.push_str(&string_to_push); |         prev.push_str(&string_to_push); | ||||||
|         post.push_str(&string_to_push); |         post.push_str(&string_to_push); | ||||||
|             } |       }, | ||||||
|       diff::Result::Left(l) => { |       diff::Result::Left(l) => { | ||||||
|         let string_to_push = format!("\x1b[1;91m{l}"); |         let string_to_push = format!("\x1b[1;91m{l}"); | ||||||
|         prev.push_str(&string_to_push); |         prev.push_str(&string_to_push); | ||||||
|             } |       }, | ||||||
| 
 | 
 | ||||||
|       diff::Result::Right(r) => { |       diff::Result::Right(r) => { | ||||||
|         let string_to_push = format!("\x1b[1;92m{r}"); |         let string_to_push = format!("\x1b[1;92m{r}"); | ||||||
|         post.push_str(&string_to_push); |         post.push_str(&string_to_push); | ||||||
|             } |       }, | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|  | @ -53,7 +58,7 @@ fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||||
|   prev.push_str(&format!("\x1b[33m{}", &suffix)); |   prev.push_str(&format!("\x1b[33m{}", &suffix)); | ||||||
|   post.push_str(&format!("\x1b[33m{}", &suffix)); |   post.push_str(&format!("\x1b[33m{}", &suffix)); | ||||||
| 
 | 
 | ||||||
|     //reset
 |   // reset
 | ||||||
|   prev.push_str("\x1b[0m"); |   prev.push_str("\x1b[0m"); | ||||||
|   post.push_str("\x1b[0m"); |   post.push_str("\x1b[0m"); | ||||||
| 
 | 
 | ||||||
|  | @ -61,7 +66,11 @@ fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// print the packages added between two closures.
 | /// print the packages added between two closures.
 | ||||||
| pub fn print_added(set: &HashSet<&str>, post: &HashMap<&str, HashSet<&str>>, col_width: usize) { | pub fn print_added( | ||||||
|  |   set: &HashSet<&str>, | ||||||
|  |   post: &HashMap<&str, HashSet<&str>>, | ||||||
|  |   col_width: usize, | ||||||
|  | ) { | ||||||
|   println!("{}", "Packages added:".underline().bold()); |   println!("{}", "Packages added:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|   // Use sorted outpu
 |   // Use sorted outpu
 | ||||||
|  | @ -87,7 +96,11 @@ pub fn print_added(set: &HashSet<&str>, post: &HashMap<&str, HashSet<&str>>, col | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// print the packages removed between two closures.
 | /// print the packages removed between two closures.
 | ||||||
| pub fn print_removed(set: &HashSet<&str>, pre: &HashMap<&str, HashSet<&str>>, col_width: usize) { | pub fn print_removed( | ||||||
|  |   set: &HashSet<&str>, | ||||||
|  |   pre: &HashMap<&str, HashSet<&str>>, | ||||||
|  |   col_width: usize, | ||||||
|  | ) { | ||||||
|   println!("{}", "Packages removed:".underline().bold()); |   println!("{}", "Packages removed:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|   // Use sorted output for more predictable and readable results
 |   // Use sorted output for more predictable and readable results
 | ||||||
|  | @ -118,7 +131,7 @@ pub fn print_changes( | ||||||
|   post: &HashMap<&str, HashSet<&str>>, |   post: &HashMap<&str, HashSet<&str>>, | ||||||
|   col_width: usize, |   col_width: usize, | ||||||
| ) { | ) { | ||||||
|     println!("{}", "Version changes:".underline().bold()); |   println!("{}", "Versions changed:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|   // Use sorted output for more predictable and readable results
 |   // Use sorted output for more predictable and readable results
 | ||||||
|   let mut changes = Vec::new(); |   let mut changes = Vec::new(); | ||||||
|  | @ -132,11 +145,13 @@ pub fn print_changes( | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   // Sort by package name for consistent output
 |   // Sort by package name for consistent output
 | ||||||
|     changes.sort_by(|(a, _, _), (b, _, _)| a.cmp(b)); |   changes.sort_by(|(a, ..), (b, ..)| a.cmp(b)); | ||||||
| 
 | 
 | ||||||
|   for (p, ver_pre, ver_post) in changes { |   for (p, ver_pre, ver_post) in changes { | ||||||
|         let mut version_vec_pre = ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); |     let mut version_vec_pre = | ||||||
|         let mut version_vec_post = ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); |       ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); | ||||||
|  |     let mut version_vec_post = | ||||||
|  |       ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); | ||||||
| 
 | 
 | ||||||
|     version_vec_pre.sort_unstable(); |     version_vec_pre.sort_unstable(); | ||||||
|     version_vec_post.sort_unstable(); |     version_vec_post.sort_unstable(); | ||||||
|  | @ -158,11 +173,12 @@ pub fn print_changes( | ||||||
|     } else { |     } else { | ||||||
|       let version_str_pre = version_vec_pre.join(", "); |       let version_str_pre = version_vec_pre.join(", "); | ||||||
|       let version_str_post = version_vec_post.join(", "); |       let version_str_post = version_vec_post.join(", "); | ||||||
|             (diffed_pre, diffed_post) = diff_versions(&version_str_pre, &version_str_post); |       (diffed_pre, diffed_post) = | ||||||
|  |         diff_versions(&version_str_pre, &version_str_post); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|         // push a space to the diffed_pre, if it is non-empty, we do this here and not in the println
 |     // push a space to the diffed_pre, if it is non-empty, we do this here and
 | ||||||
|         // in order to properly align the ±.
 |     // not in the println in order to properly align the ±.
 | ||||||
|     if !version_vec_pre.is_empty() { |     if !version_vec_pre.is_empty() { | ||||||
|       let mut tmp = " ".to_string(); |       let mut tmp = " ".to_string(); | ||||||
|       tmp.push_str(&diffed_pre); |       tmp.push_str(&diffed_pre); | ||||||
|  |  | ||||||
							
								
								
									
										194
									
								
								src/store.rs
									
										
									
									
									
								
							
							
						
						
									
										194
									
								
								src/store.rs
									
										
									
									
									
								
							|  | @ -1,15 +1,69 @@ | ||||||
| use std::collections::HashMap; | use std::{ | ||||||
|  |   path::{ | ||||||
|  |     Path, | ||||||
|  |     PathBuf, | ||||||
|  |   }, | ||||||
|  |   result, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| use crate::error::AppError; | use anyhow::{ | ||||||
|  |   Context as _, | ||||||
|  |   Result, | ||||||
|  | }; | ||||||
|  | use derive_more::Deref; | ||||||
|  | use ref_cast::RefCast; | ||||||
| use rusqlite::Connection; | use rusqlite::Connection; | ||||||
|  | use rustc_hash::{ | ||||||
|  |   FxBuildHasher, | ||||||
|  |   FxHashMap, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| // Use type alias for Result with our custom error type
 | macro_rules! path_to_str { | ||||||
| type Result<T> = std::result::Result<T, AppError>; |   ($path:ident) => { | ||||||
|  |     let $path = $path.canonicalize().with_context(|| { | ||||||
|  |       format!( | ||||||
|  |         "failed to canonicalize path '{path}'", | ||||||
|  |         path = $path.display(), | ||||||
|  |       ) | ||||||
|  |     })?; | ||||||
| 
 | 
 | ||||||
| const DATABASE_URL: &str = "/nix/var/nix/db/db.sqlite"; |     let $path = $path.to_str().with_context(|| { | ||||||
|  |       format!( | ||||||
|  |         "failed to convert path '{path}' to valid unicode", | ||||||
|  |         path = $path.display(), | ||||||
|  |       ) | ||||||
|  |     })?; | ||||||
|  |   }; | ||||||
|  | } | ||||||
| 
 | 
 | ||||||
| const QUERY_PKGS: &str = " | #[derive(Deref, Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||||||
| WITH RECURSIVE | pub struct DerivationId(i64); | ||||||
|  | 
 | ||||||
|  | #[expect(clippy::module_name_repetitions)] | ||||||
|  | #[derive(RefCast, Deref, Debug, PartialEq, Eq)] | ||||||
|  | #[repr(transparent)] | ||||||
|  | pub struct StorePath(Path); | ||||||
|  | 
 | ||||||
|  | #[expect(clippy::module_name_repetitions)] | ||||||
|  | #[derive(Deref, Debug, Clone, PartialEq, Eq)] | ||||||
|  | pub struct StorePathBuf(PathBuf); | ||||||
|  | 
 | ||||||
|  | /// Connects to the Nix database.
 | ||||||
|  | pub fn connect() -> Result<Connection> { | ||||||
|  |   const DATABASE_PATH: &str = "/nix/var/nix/db/db.sqlite"; | ||||||
|  | 
 | ||||||
|  |   Connection::open(DATABASE_PATH).with_context(|| { | ||||||
|  |     format!("failed to connect to Nix database at {DATABASE_PATH}") | ||||||
|  |   }) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | /// Gathers all derivations that the given store path depends on.
 | ||||||
|  | pub fn query_depdendents( | ||||||
|  |   connection: &mut Connection, | ||||||
|  |   path: &StorePath, | ||||||
|  | ) -> Result<Vec<(DerivationId, StorePathBuf)>> { | ||||||
|  |   const QUERY: &str = " | ||||||
|  |     WITH RECURSIVE | ||||||
|       graph(p) AS ( |       graph(p) AS ( | ||||||
|         SELECT id 
 |         SELECT id 
 | ||||||
|         FROM ValidPaths |         FROM ValidPaths | ||||||
|  | @ -18,12 +72,34 @@ WITH RECURSIVE | ||||||
|         SELECT reference FROM Refs |         SELECT reference FROM Refs | ||||||
|         JOIN graph ON referrer = p |         JOIN graph ON referrer = p | ||||||
|       ) |       ) | ||||||
| SELECT id, path from graph |     SELECT id, path from graph | ||||||
| JOIN ValidPaths ON id = p; |     JOIN ValidPaths ON id = p; | ||||||
| ";
 |   ";
 | ||||||
| 
 | 
 | ||||||
| const QUERY_CLOSURE_SIZE: &str = " |   path_to_str!(path); | ||||||
| WITH RECURSIVE | 
 | ||||||
|  |   let packages: result::Result<Vec<(DerivationId, StorePathBuf)>, _> = | ||||||
|  |     connection | ||||||
|  |       .prepare_cached(QUERY)? | ||||||
|  |       .query_map([path], |row| { | ||||||
|  |         Ok(( | ||||||
|  |           DerivationId(row.get(0)?), | ||||||
|  |           StorePathBuf(row.get::<_, String>(1)?.into()), | ||||||
|  |         )) | ||||||
|  |       })? | ||||||
|  |       .collect(); | ||||||
|  | 
 | ||||||
|  |   Ok(packages?) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | /// Gets the total closure size of the given store path by summing up the nar
 | ||||||
|  | /// size of all depdendent derivations.
 | ||||||
|  | pub fn query_closure_size( | ||||||
|  |   connection: &mut Connection, | ||||||
|  |   path: &StorePath, | ||||||
|  | ) -> Result<usize> { | ||||||
|  |   const QUERY: &str = " | ||||||
|  |     WITH RECURSIVE | ||||||
|       graph(p) AS ( |       graph(p) AS ( | ||||||
|         SELECT id 
 |         SELECT id 
 | ||||||
|         FROM ValidPaths |         FROM ValidPaths | ||||||
|  | @ -32,12 +108,31 @@ WITH RECURSIVE | ||||||
|         SELECT reference FROM Refs |         SELECT reference FROM Refs | ||||||
|         JOIN graph ON referrer = p |         JOIN graph ON referrer = p | ||||||
|       ) |       ) | ||||||
| SELECT SUM(narSize) as sum from graph |     SELECT SUM(narSize) as sum from graph | ||||||
| JOIN ValidPaths ON p = id; |     JOIN ValidPaths ON p = id; | ||||||
| ";
 |   ";
 | ||||||
| 
 | 
 | ||||||
| const QUERY_DEPENDENCY_GRAPH: &str = " |   path_to_str!(path); | ||||||
| WITH RECURSIVE | 
 | ||||||
|  |   let closure_size = connection | ||||||
|  |     .prepare_cached(QUERY)? | ||||||
|  |     .query_row([path], |row| row.get(0))?; | ||||||
|  | 
 | ||||||
|  |   Ok(closure_size) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | /// Gathers the complete dependency graph of of the store path as an adjacency
 | ||||||
|  | /// list.
 | ||||||
|  | ///
 | ||||||
|  | /// We might want to collect the paths in the graph directly as
 | ||||||
|  | /// well in the future, depending on how much we use them
 | ||||||
|  | /// in the operations on the graph.
 | ||||||
|  | pub fn query_dependency_graph( | ||||||
|  |   connection: &mut Connection, | ||||||
|  |   path: &StorePath, | ||||||
|  | ) -> Result<FxHashMap<DerivationId, Vec<DerivationId>>> { | ||||||
|  |   const QUERY: &str = " | ||||||
|  |     WITH RECURSIVE | ||||||
|       graph(p, c) AS ( |       graph(p, c) AS ( | ||||||
|         SELECT id as par, reference as chd 
 |         SELECT id as par, reference as chd 
 | ||||||
|         FROM ValidPaths |         FROM ValidPaths | ||||||
|  | @ -47,66 +142,23 @@ WITH RECURSIVE | ||||||
|         SELECT referrer as par, reference as chd FROM Refs |         SELECT referrer as par, reference as chd FROM Refs | ||||||
|         JOIN graph ON referrer = c |         JOIN graph ON referrer = c | ||||||
|       ) |       ) | ||||||
| SELECT p, c from graph; |     SELECT p, c from graph; | ||||||
| ";
 |   ";
 | ||||||
| 
 | 
 | ||||||
| /// executes a query on the nix db directly
 |   path_to_str!(path); | ||||||
| /// to gather all derivations that the derivation given by the path
 |  | ||||||
| /// depends on
 |  | ||||||
| ///
 |  | ||||||
| /// The ids of the derivations in the database are returned as well, since these
 |  | ||||||
| /// can be used to later convert nodes (represented by the the ids) of the
 |  | ||||||
| /// dependency graph to actual paths
 |  | ||||||
| ///
 |  | ||||||
| /// in the future, we might wan't to switch to async
 |  | ||||||
| pub fn get_packages(path: &std::path::Path) -> Result<Vec<(i64, String)>> { |  | ||||||
|     // resolve symlinks and convert to a string
 |  | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |  | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |  | ||||||
| 
 | 
 | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_PKGS)?; |   let mut adj = | ||||||
|     let queried_pkgs: std::result::Result<Vec<(i64, String)>, _> = stmt |     FxHashMap::<DerivationId, Vec<DerivationId>>::with_hasher(FxBuildHasher); | ||||||
|         .query_map([p], |row| Ok((row.get(0)?, row.get(1)?)))? |  | ||||||
|         .collect(); |  | ||||||
|     Ok(queried_pkgs?) |  | ||||||
| } |  | ||||||
| 
 | 
 | ||||||
| /// executes a query on the nix db directly
 |   let mut statement = connection.prepare_cached(QUERY)?; | ||||||
| /// to get the total closure size of the derivation
 |  | ||||||
| /// by summing up the nar size of all derivations
 |  | ||||||
| /// depending on the derivation
 |  | ||||||
| ///
 |  | ||||||
| /// in the future, we might wan't to switch to async
 |  | ||||||
| pub fn get_closure_size(path: &std::path::Path) -> Result<i64> { |  | ||||||
|     // resolve symlinks and convert to a string
 |  | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |  | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |  | ||||||
| 
 | 
 | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_CLOSURE_SIZE)?; |   let edges = statement.query_map([path], |row| { | ||||||
|     let queried_sum = stmt.query_row([p], |row| row.get(0))?; |     Ok((DerivationId(row.get(0)?), DerivationId(row.get(1)?))) | ||||||
|     Ok(queried_sum) |   })?; | ||||||
| } |  | ||||||
| 
 | 
 | ||||||
| /// returns the complete dependency graph of
 |   for row in edges { | ||||||
| /// of the derivation as an adjacency list. The nodes are
 |  | ||||||
| /// represented by the DB ids
 |  | ||||||
| ///
 |  | ||||||
| /// We might want to collect the paths in the graph directly as
 |  | ||||||
| /// well in the future, depending on how much we use them
 |  | ||||||
| /// in the operations on the graph
 |  | ||||||
| ///
 |  | ||||||
| /// The mapping from id to graph can be obtained by using [``get_packages``]
 |  | ||||||
| pub fn get_dependency_graph(path: &std::path::Path) -> Result<HashMap<i64, Vec<i64>>> { |  | ||||||
|     // resolve symlinks and convert to a string
 |  | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |  | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |  | ||||||
| 
 |  | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_DEPENDENCY_GRAPH)?; |  | ||||||
|     let mut adj = HashMap::<i64, Vec<i64>>::new(); |  | ||||||
|     let queried_edges = |  | ||||||
|         stmt.query_map([p], |row| Ok::<(i64, i64), _>((row.get(0)?, row.get(1)?)))?; |  | ||||||
|     for row in queried_edges { |  | ||||||
|     let (from, to) = row?; |     let (from, to) = row?; | ||||||
|  | 
 | ||||||
|     adj.entry(from).or_default().push(to); |     adj.entry(from).or_default().push(to); | ||||||
|     adj.entry(to).or_default(); |     adj.entry(to).or_default(); | ||||||
|   } |   } | ||||||
|  |  | ||||||
							
								
								
									
										53
									
								
								src/util.rs
									
										
									
									
									
								
							
							
						
						
									
										53
									
								
								src/util.rs
									
										
									
									
									
								
							|  | @ -1,13 +1,17 @@ | ||||||
| use std::{ | use std::{ | ||||||
|   cmp::Ordering, |   cmp::Ordering, | ||||||
|     collections::{HashMap, HashSet}, |   collections::{ | ||||||
|  |     HashMap, | ||||||
|  |     HashSet, | ||||||
|  |   }, | ||||||
|   sync::OnceLock, |   sync::OnceLock, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| use crate::error::AppError; |  | ||||||
| use log::debug; | use log::debug; | ||||||
| use regex::Regex; | use regex::Regex; | ||||||
| 
 | 
 | ||||||
|  | use crate::error::AppError; | ||||||
|  | 
 | ||||||
| // Use type alias for Result with our custom error type
 | // Use type alias for Result with our custom error type
 | ||||||
| type Result<T> = std::result::Result<T, AppError>; | type Result<T> = std::result::Result<T, AppError>; | ||||||
| 
 | 
 | ||||||
|  | @ -21,13 +25,18 @@ enum VersionComponent { | ||||||
| 
 | 
 | ||||||
| impl std::cmp::Ord for VersionComponent { | impl std::cmp::Ord for VersionComponent { | ||||||
|   fn cmp(&self, other: &Self) -> Ordering { |   fn cmp(&self, other: &Self) -> Ordering { | ||||||
|         use VersionComponent::{Number, Text}; |     use VersionComponent::{ | ||||||
|  |       Number, | ||||||
|  |       Text, | ||||||
|  |     }; | ||||||
|     match (self, other) { |     match (self, other) { | ||||||
|       (Number(x), Number(y)) => x.cmp(y), |       (Number(x), Number(y)) => x.cmp(y), | ||||||
|             (Text(x), Text(y)) => match (x.as_str(), y.as_str()) { |       (Text(x), Text(y)) => { | ||||||
|  |         match (x.as_str(), y.as_str()) { | ||||||
|           ("pre", _) => Ordering::Less, |           ("pre", _) => Ordering::Less, | ||||||
|           (_, "pre") => Ordering::Greater, |           (_, "pre") => Ordering::Greater, | ||||||
|           _ => x.cmp(y), |           _ => x.cmp(y), | ||||||
|  |         } | ||||||
|       }, |       }, | ||||||
|       (Text(_), Number(_)) => Ordering::Less, |       (Text(_), Number(_)) => Ordering::Less, | ||||||
|       (Number(_), Text(_)) => Ordering::Greater, |       (Number(_), Text(_)) => Ordering::Greater, | ||||||
|  | @ -77,7 +86,8 @@ impl Iterator for VersionComponentIterator<'_> { | ||||||
|       .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') |       .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') | ||||||
|       .count(); |       .count(); | ||||||
|     let component = |     let component = | ||||||
|             String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]).into_owned(); |       String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]) | ||||||
|  |         .into_owned(); | ||||||
| 
 | 
 | ||||||
|     // remember what chars we used
 |     // remember what chars we used
 | ||||||
|     self.pos += component_len; |     self.pos += component_len; | ||||||
|  | @ -106,12 +116,14 @@ pub fn compare_versions(a: &str, b: &str) -> Ordering { | ||||||
| 
 | 
 | ||||||
| /// Parses a nix store path to extract the packages name and version
 | /// Parses a nix store path to extract the packages name and version
 | ||||||
| ///
 | ///
 | ||||||
| /// This function first drops the inputs first 44 chars, since that is exactly the length of the /nix/store/... prefix. Then it matches that against our store path regex.
 | /// This function first drops the inputs first 44 chars, since that is exactly
 | ||||||
|  | /// the length of the /nix/store/... prefix. Then it matches that against our
 | ||||||
|  | /// store path regex.
 | ||||||
| ///
 | ///
 | ||||||
| /// # Returns
 | /// # Returns
 | ||||||
| ///
 | ///
 | ||||||
| /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error if
 | /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error
 | ||||||
| ///   one or both cannot be retrieved.
 | ///   if one or both cannot be retrieved.
 | ||||||
| pub fn get_version<'a>(pack: impl Into<&'a str>) -> Result<(&'a str, &'a str)> { | pub fn get_version<'a>(pack: impl Into<&'a str>) -> Result<(&'a str, &'a str)> { | ||||||
|   let path = pack.into(); |   let path = pack.into(); | ||||||
| 
 | 
 | ||||||
|  | @ -172,7 +184,10 @@ pub struct PackageDiff<'a> { | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl<'a> PackageDiff<'a> { | impl<'a> PackageDiff<'a> { | ||||||
|     pub fn new<S: AsRef<str> + 'a>(pkgs_pre: &'a [S], pkgs_post: &'a [S]) -> Self { |   pub fn new<S: AsRef<str> + 'a>( | ||||||
|  |     pkgs_pre: &'a [S], | ||||||
|  |     pkgs_post: &'a [S], | ||||||
|  |   ) -> Self { | ||||||
|     // Map from packages of the first closure to their version
 |     // Map from packages of the first closure to their version
 | ||||||
|     let mut pre = HashMap::<&str, HashSet<&str>>::new(); |     let mut pre = HashMap::<&str, HashSet<&str>>::new(); | ||||||
|     let mut post = HashMap::<&str, HashSet<&str>>::new(); |     let mut post = HashMap::<&str, HashSet<&str>>::new(); | ||||||
|  | @ -181,10 +196,10 @@ impl<'a> PackageDiff<'a> { | ||||||
|       match get_version(p.as_ref()) { |       match get_version(p.as_ref()) { | ||||||
|         Ok((name, version)) => { |         Ok((name, version)) => { | ||||||
|           pre.entry(name).or_default().insert(version); |           pre.entry(name).or_default().insert(version); | ||||||
|                 } |         }, | ||||||
|         Err(e) => { |         Err(e) => { | ||||||
|           debug!("Error parsing package version: {e}"); |           debug!("Error parsing package version: {e}"); | ||||||
|                 } |         }, | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | @ -192,10 +207,10 @@ impl<'a> PackageDiff<'a> { | ||||||
|       match get_version(p.as_ref()) { |       match get_version(p.as_ref()) { | ||||||
|         Ok((name, version)) => { |         Ok((name, version)) => { | ||||||
|           post.entry(name).or_default().insert(version); |           post.entry(name).or_default().insert(version); | ||||||
|                 } |         }, | ||||||
|         Err(e) => { |         Err(e) => { | ||||||
|           debug!("Error parsing package version: {e}"); |           debug!("Error parsing package version: {e}"); | ||||||
|                 } |         }, | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | @ -225,14 +240,15 @@ mod test { | ||||||
| 
 | 
 | ||||||
|   #[test] |   #[test] | ||||||
|   fn test_version_component_iter() { |   fn test_version_component_iter() { | ||||||
|         use super::VersionComponent::{Number, Text}; |     use super::VersionComponent::{ | ||||||
|  |       Number, | ||||||
|  |       Text, | ||||||
|  |     }; | ||||||
|     use crate::util::VersionComponentIterator; |     use crate::util::VersionComponentIterator; | ||||||
|     let v = "132.1.2test234-1-man----.--.......---------..---"; |     let v = "132.1.2test234-1-man----.--.......---------..---"; | ||||||
| 
 | 
 | ||||||
|     let comp: Vec<_> = VersionComponentIterator::new(v).collect(); |     let comp: Vec<_> = VersionComponentIterator::new(v).collect(); | ||||||
|         assert_eq!( |     assert_eq!(comp, [ | ||||||
|             comp, |  | ||||||
|             [ |  | ||||||
|       Number(132), |       Number(132), | ||||||
|       Number(1), |       Number(1), | ||||||
|       Number(2), |       Number(2), | ||||||
|  | @ -240,7 +256,6 @@ mod test { | ||||||
|       Number(234), |       Number(234), | ||||||
|       Number(1), |       Number(1), | ||||||
|       Text("man".into()) |       Text("man".into()) | ||||||
|             ] |     ]); | ||||||
|         ); |  | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 bloxx12
							bloxx12