mirror of
				https://github.com/RGBCube/dix
				synced 2025-10-31 08:52:44 +00:00 
			
		
		
		
	feat: refactor store.rs
This commit is contained in:
		
							parent
							
								
									531fa0278f
								
							
						
					
					
						commit
						db09147da6
					
				
					 14 changed files with 1168 additions and 839 deletions
				
			
		
							
								
								
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -1,9 +1,2 @@ | |||
| /.direnv | ||||
| /target | ||||
| 
 | ||||
| 
 | ||||
| # Added by cargo | ||||
| # | ||||
| # already existing elements were commented out | ||||
| 
 | ||||
| #/target | ||||
|  |  | |||
							
								
								
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,30 @@ | |||
| # Taken from https://github.com/cull-os/carcass. | ||||
| # Modified to have 2 space indents and 80 line width. | ||||
| 
 | ||||
| # float_literal_trailing_zero  = "Always" # TODO: Warning for some reason? | ||||
| condense_wildcard_suffixes   = true | ||||
| doc_comment_code_block_width = 80 | ||||
| edition                      = "2024"             # Keep in sync with Cargo.toml. | ||||
| enum_discrim_align_threshold = 60 | ||||
| force_explicit_abi           = false | ||||
| force_multiline_blocks       = true | ||||
| format_code_in_doc_comments  = true | ||||
| format_macro_matchers        = true | ||||
| format_strings               = true | ||||
| group_imports                = "StdExternalCrate" | ||||
| hex_literal_case             = "Upper" | ||||
| imports_granularity          = "Crate" | ||||
| imports_layout               = "Vertical" | ||||
| inline_attribute_width       = 60 | ||||
| match_block_trailing_comma   = true | ||||
| max_width                    = 80 | ||||
| newline_style                = "Unix" | ||||
| normalize_comments           = true | ||||
| normalize_doc_attributes     = true | ||||
| overflow_delimited_expr      = true | ||||
| struct_field_align_threshold = 60 | ||||
| tab_spaces                   = 2 | ||||
| unstable_features            = true | ||||
| use_field_init_shorthand     = true | ||||
| use_try_shorthand            = true | ||||
| wrap_comments                = true | ||||
							
								
								
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,15 @@ | |||
| # Taken from https://github.com/cull-os/carcass. | ||||
| 
 | ||||
| [formatting] | ||||
| align_entries         = true | ||||
| column_width          = 100 | ||||
| compact_arrays        = false | ||||
| reorder_inline_tables = true | ||||
| reorder_keys          = true | ||||
| 
 | ||||
| [[rule]] | ||||
| include = [ "**/Cargo.toml" ] | ||||
| keys    = [ "package" ] | ||||
| 
 | ||||
| [rule.formatting] | ||||
| reorder_keys = false | ||||
							
								
								
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							
							
						
						
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							|  | @ -61,6 +61,12 @@ dependencies = [ | |||
|  "windows-sys", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "anyhow" | ||||
| version = "1.0.98" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "atty" | ||||
| version = "0.2.14" | ||||
|  | @ -174,6 +180,15 @@ version = "1.0.3" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "convert_case" | ||||
| version = "0.7.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" | ||||
| dependencies = [ | ||||
|  "unicode-segmentation", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "criterion" | ||||
| version = "0.3.6" | ||||
|  | @ -256,6 +271,28 @@ dependencies = [ | |||
|  "memchr", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "derive_more" | ||||
| version = "2.0.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" | ||||
| dependencies = [ | ||||
|  "derive_more-impl", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "derive_more-impl" | ||||
| version = "2.0.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" | ||||
| dependencies = [ | ||||
|  "convert_case", | ||||
|  "proc-macro2", | ||||
|  "quote", | ||||
|  "syn", | ||||
|  "unicode-xid", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "diff" | ||||
| version = "0.1.13" | ||||
|  | @ -266,14 +303,18 @@ checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" | |||
| name = "dix" | ||||
| version = "0.1.0" | ||||
| dependencies = [ | ||||
|  "anyhow", | ||||
|  "clap 4.5.37", | ||||
|  "criterion", | ||||
|  "derive_more", | ||||
|  "diff", | ||||
|  "env_logger", | ||||
|  "libc", | ||||
|  "log", | ||||
|  "ref-cast", | ||||
|  "regex", | ||||
|  "rusqlite", | ||||
|  "rustc-hash", | ||||
|  "thiserror", | ||||
|  "yansi", | ||||
| ] | ||||
|  | @ -562,6 +603,26 @@ dependencies = [ | |||
|  "crossbeam-utils", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ref-cast" | ||||
| version = "1.0.24" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" | ||||
| dependencies = [ | ||||
|  "ref-cast-impl", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ref-cast-impl" | ||||
| version = "1.0.24" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" | ||||
| dependencies = [ | ||||
|  "proc-macro2", | ||||
|  "quote", | ||||
|  "syn", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "regex" | ||||
| version = "1.11.1" | ||||
|  | @ -605,6 +666,12 @@ dependencies = [ | |||
|  "smallvec", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rustc-hash" | ||||
| version = "2.1.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rustversion" | ||||
| version = "1.0.20" | ||||
|  | @ -742,12 +809,24 @@ version = "1.0.18" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "unicode-segmentation" | ||||
| version = "1.12.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "unicode-width" | ||||
| version = "0.1.14" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "unicode-xid" | ||||
| version = "0.2.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "utf8parse" | ||||
| version = "0.2.2" | ||||
|  |  | |||
							
								
								
									
										128
									
								
								Cargo.toml
									
										
									
									
									
								
							
							
						
						
									
										128
									
								
								Cargo.toml
									
										
									
									
									
								
							|  | @ -1,39 +1,119 @@ | |||
| [package] | ||||
| name = "dix" | ||||
| name    = "dix" | ||||
| version = "0.1.0" | ||||
| edition = "2024" | ||||
| 
 | ||||
| [[bin]] | ||||
| name = "dix" | ||||
| path = "src/main.rs" | ||||
| 
 | ||||
| [lib] | ||||
| name = "dixlib" | ||||
| path = "src/lib.rs" | ||||
| 
 | ||||
| 
 | ||||
| [dependencies] | ||||
| clap = { version = "4.5.37", features = ["derive"] } | ||||
| regex = "1.11.1" | ||||
| yansi = "1.0.1" | ||||
| thiserror = "2.0.12" | ||||
| log = "0.4.20" | ||||
| anyhow     = "1.0.98" | ||||
| clap       = { version = "4.5.37", features = [ "derive" ] } | ||||
| derive_more = { version = "2.0.1", features = ["full"] } | ||||
| diff       = "0.1.13" | ||||
| env_logger = "0.11.3" | ||||
| rusqlite = { version = "0.35.0", features = ["bundled"] } | ||||
| diff = "0.1.13" | ||||
| log        = "0.4.20" | ||||
| ref-cast = "1.0.24" | ||||
| regex      = "1.11.1" | ||||
| rusqlite   = { version = "0.35.0", features = [ "bundled" ] } | ||||
| rustc-hash = "2.1.1" | ||||
| thiserror  = "2.0.12" | ||||
| yansi      = "1.0.1" | ||||
| 
 | ||||
| [dev-dependencies] | ||||
| criterion = "0.3" | ||||
| libc = "0.2" | ||||
| libc      = "0.2" | ||||
| 
 | ||||
| [[bench]] | ||||
| name = "store" | ||||
| harness=false | ||||
| harness = false | ||||
| name    = "store" | ||||
| 
 | ||||
| [[bench]] | ||||
| name = "print" | ||||
| harness=false | ||||
| harness = false | ||||
| name    = "print" | ||||
| 
 | ||||
| [[bench]] | ||||
| name = "util" | ||||
| harness=false | ||||
| harness = false | ||||
| name    = "util" | ||||
| 
 | ||||
| [lints.clippy] | ||||
| pedantic = { level = "warn", priority = -1 } | ||||
| 
 | ||||
| blanket_clippy_restriction_lints = "allow" | ||||
| restriction                      = { level = "warn", priority = -1 } | ||||
| 
 | ||||
| alloc_instead_of_core             = "allow" | ||||
| allow_attributes_without_reason   = "allow" | ||||
| arbitrary_source_item_ordering    = "allow" | ||||
| arithmetic_side_effects           = "allow" | ||||
| as_conversions                    = "allow" | ||||
| as_pointer_underscore             = "allow" | ||||
| as_underscore                     = "allow" | ||||
| big_endian_bytes                  = "allow" | ||||
| clone_on_ref_ptr                  = "allow" | ||||
| dbg_macro                         = "allow" | ||||
| disallowed_script_idents          = "allow" | ||||
| else_if_without_else              = "allow" | ||||
| error_impl_error                  = "allow" | ||||
| exhaustive_enums                  = "allow" | ||||
| exhaustive_structs                = "allow" | ||||
| expect_used                       = "allow" | ||||
| field_scoped_visibility_modifiers = "allow" | ||||
| float_arithmetic                  = "allow" | ||||
| host_endian_bytes                 = "allow" | ||||
| impl_trait_in_params              = "allow" | ||||
| implicit_return                   = "allow" | ||||
| indexing_slicing                  = "allow" | ||||
| inline_asm_x86_intel_syntax       = "allow" | ||||
| integer_division                  = "allow" | ||||
| integer_division_remainder_used   = "allow" | ||||
| large_include_file                = "allow" | ||||
| let_underscore_must_use           = "allow" | ||||
| let_underscore_untyped            = "allow" | ||||
| little_endian_bytes               = "allow" | ||||
| map_err_ignore                    = "allow" | ||||
| match_same_arms                   = "allow" | ||||
| missing_assert_message            = "allow" | ||||
| missing_docs_in_private_items     = "allow" | ||||
| missing_errors_doc                = "allow" | ||||
| missing_inline_in_public_items    = "allow" | ||||
| missing_panics_doc                = "allow" | ||||
| missing_trait_methods             = "allow" | ||||
| mod_module_files                  = "allow" | ||||
| multiple_inherent_impl            = "allow" | ||||
| mutex_atomic                      = "allow" | ||||
| mutex_integer                     = "allow" | ||||
| new_without_default               = "allow" | ||||
| non_ascii_literal                 = "allow" | ||||
| panic                             = "allow" | ||||
| panic_in_result_fn                = "allow" | ||||
| partial_pub_fields                = "allow" | ||||
| print_stderr                      = "allow" | ||||
| print_stdout                      = "allow" | ||||
| pub_use                           = "allow" | ||||
| pub_with_shorthand                = "allow" | ||||
| pub_without_shorthand             = "allow" | ||||
| question_mark_used                = "allow" | ||||
| ref_patterns                      = "allow" | ||||
| renamed_function_params           = "allow" | ||||
| same_name_method                  = "allow" | ||||
| semicolon_outside_block           = "allow" | ||||
| separated_literal_suffix          = "allow" | ||||
| shadow_reuse                      = "allow" | ||||
| shadow_same                       = "allow" | ||||
| shadow_unrelated                  = "allow" | ||||
| single_call_fn                    = "allow" | ||||
| single_char_lifetime_names        = "allow" | ||||
| single_match_else                 = "allow" | ||||
| std_instead_of_alloc              = "allow" | ||||
| std_instead_of_core               = "allow" | ||||
| string_add                        = "allow" | ||||
| string_slice                      = "allow" | ||||
| todo                              = "allow" | ||||
| too_many_lines                    = "allow" | ||||
| try_err                           = "allow" | ||||
| unimplemented                     = "allow" | ||||
| unnecessary_safety_comment        = "allow" | ||||
| unnecessary_safety_doc            = "allow" | ||||
| unreachable                       = "allow" | ||||
| unwrap_in_result                  = "allow" | ||||
| unwrap_used                       = "allow" | ||||
| use_debug                         = "allow" | ||||
| wildcard_enum_match_arm           = "allow" | ||||
|  |  | |||
|  | @ -1,89 +1,94 @@ | |||
| use std::{ | ||||
|     env, | ||||
|     fs::{self, DirEntry}, | ||||
|     path::PathBuf, | ||||
|     sync::OnceLock, | ||||
|   env, | ||||
|   fs, | ||||
|   path::PathBuf, | ||||
|   sync::OnceLock, | ||||
| }; | ||||
| 
 | ||||
| use dixlib::{store, util::PackageDiff}; | ||||
| use dix::{ | ||||
|   store, | ||||
|   util::PackageDiff, | ||||
| }; | ||||
| 
 | ||||
| /// tries to get the path of the oldest nixos system derivation
 | ||||
| /// this function is pretty hacky and only used so that
 | ||||
| /// you don't have to specify a specific derivation to
 | ||||
| /// run the benchmarks
 | ||||
| fn get_oldest_nixos_system() -> Option<PathBuf> { | ||||
|     let profile_dir = fs::read_dir("/nix/var/nix/profiles").ok()?; | ||||
|   let profile_dir = fs::read_dir("/nix/var/nix/profiles").ok()?; | ||||
| 
 | ||||
|     let files = profile_dir.filter_map(Result::ok).filter_map(|entry| { | ||||
|         entry | ||||
|             .file_type() | ||||
|             .ok() | ||||
|             .and_then(|f| f.is_symlink().then_some(entry.path())) | ||||
|     }); | ||||
|   let files = profile_dir.filter_map(Result::ok).filter_map(|entry| { | ||||
|     entry | ||||
|       .file_type() | ||||
|       .ok() | ||||
|       .and_then(|f| f.is_symlink().then_some(entry.path())) | ||||
|   }); | ||||
| 
 | ||||
|     files.min_by_key(|path| { | ||||
|         // extract all digits from the file name and use that as key
 | ||||
|         let p = path.as_os_str().to_str().unwrap_or_default(); | ||||
|         let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); | ||||
|         // if we are not able to produce a key (e.g. because the path does not contain digits)
 | ||||
|         // we put it last
 | ||||
|         digits.parse::<u32>().unwrap_or(u32::MAX) | ||||
|     }) | ||||
|   files.min_by_key(|path| { | ||||
|     // extract all digits from the file name and use that as key
 | ||||
|     let p = path.as_os_str().to_str().unwrap_or_default(); | ||||
|     let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); | ||||
|     // if we are not able to produce a key (e.g. because the path does not
 | ||||
|     // contain digits) we put it last
 | ||||
|     digits.parse::<u32>().unwrap_or(u32::MAX) | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| pub fn get_deriv_query() -> &'static PathBuf { | ||||
|     static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||
|     _QUERY_DERIV.get_or_init(|| { | ||||
|         let path = PathBuf::from( | ||||
|             env::var("DIX_BENCH_NEW_SYSTEM") | ||||
|                 .unwrap_or_else(|_| "/run/current-system/system".into()), | ||||
|         ); | ||||
|         path | ||||
|     }) | ||||
|   static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||
|   _QUERY_DERIV.get_or_init(|| { | ||||
|     let path = PathBuf::from( | ||||
|       env::var("DIX_BENCH_NEW_SYSTEM") | ||||
|         .unwrap_or_else(|_| "/run/current-system/system".into()), | ||||
|     ); | ||||
|     path | ||||
|   }) | ||||
| } | ||||
| pub fn get_deriv_query_old() -> &'static PathBuf { | ||||
|     static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||
|     _QUERY_DERIV.get_or_init(|| { | ||||
|         let path = env::var("DIX_BENCH_OLD_SYSTEM") | ||||
|             .ok() | ||||
|             .map(PathBuf::from) | ||||
|             .or(get_oldest_nixos_system()) | ||||
|             .unwrap_or_else(|| PathBuf::from("/run/current-system/system")); | ||||
|         path | ||||
|     }) | ||||
|   static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||
|   _QUERY_DERIV.get_or_init(|| { | ||||
|     let path = env::var("DIX_BENCH_OLD_SYSTEM") | ||||
|       .ok() | ||||
|       .map(PathBuf::from) | ||||
|       .or(get_oldest_nixos_system()) | ||||
|       .unwrap_or_else(|| PathBuf::from("/run/current-system/system")); | ||||
|     path | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| pub fn get_packages() -> &'static (Vec<String>, Vec<String>) { | ||||
|     static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); | ||||
|     _PKGS.get_or_init(|| { | ||||
|         let pkgs_before = store::get_packages(std::path::Path::new(get_deriv_query_old())) | ||||
|             .unwrap() | ||||
|             .into_iter() | ||||
|             .map(|(_, name)| name) | ||||
|             .collect::<Vec<String>>(); | ||||
|         let pkgs_after = store::get_packages(std::path::Path::new(get_deriv_query())) | ||||
|             .unwrap() | ||||
|             .into_iter() | ||||
|             .map(|(_, name)| name) | ||||
|             .collect::<Vec<String>>(); | ||||
|         (pkgs_before, pkgs_after) | ||||
|     }) | ||||
|   static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); | ||||
|   _PKGS.get_or_init(|| {query_depdendents | ||||
|     let pkgs_before = | ||||
|       store::query_packages(std::path::Path::new(get_deriv_query_old())) | ||||
|         .unwrap() | ||||
|         .into_iter() | ||||
|         .map(|(_, name)| name)query_depdendents | ||||
|         .collect::<Vec<String>>(); | ||||
|     let pkgs_after = | ||||
|       store::query_packages(std::path::Path::new(get_deriv_query())) | ||||
|         .unwrap() | ||||
|         .into_iter() | ||||
|         .map(|(_, name)| name) | ||||
|         .collect::<Vec<String>>(); | ||||
|     (pkgs_before, pkgs_after) | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| pub fn get_pkg_diff() -> &'static PackageDiff<'static> { | ||||
|     static _PKG_DIFF: OnceLock<PackageDiff> = OnceLock::new(); | ||||
|     _PKG_DIFF.get_or_init(|| { | ||||
|         let (pkgs_before, pkgs_after) = get_packages(); | ||||
|         PackageDiff::new(pkgs_before, pkgs_after) | ||||
|     }) | ||||
|   static _PKG_DIFF: OnceLock<PackageDiff> = OnceLock::new(); | ||||
|   _PKG_DIFF.get_or_init(|| { | ||||
|     let (pkgs_before, pkgs_after) = get_packages(); | ||||
|     PackageDiff::new(pkgs_before, pkgs_after) | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| /// prints the old and new NixOs system used for benchmarking
 | ||||
| ///
 | ||||
| /// is used to give information about the old and new system
 | ||||
| pub fn print_used_nixos_systems() { | ||||
|     let old = get_deriv_query_old(); | ||||
|     let new = get_deriv_query(); | ||||
|     println!("old system used {:?}", old); | ||||
|     println!("new system used {:?}", new); | ||||
|   let old = get_deriv_query_old(); | ||||
|   let new = get_deriv_query(); | ||||
|   println!("old system used {:?}", old); | ||||
|   println!("new system used {:?}", new); | ||||
| } | ||||
|  |  | |||
							
								
								
									
										129
									
								
								benches/print.rs
									
										
									
									
									
								
							
							
						
						
									
										129
									
								
								benches/print.rs
									
										
									
									
									
								
							|  | @ -1,86 +1,97 @@ | |||
| mod common; | ||||
| 
 | ||||
| use std::{fs::File, os::fd::AsRawFd}; | ||||
| use std::{ | ||||
|   fs::File, | ||||
|   os::fd::AsRawFd, | ||||
| }; | ||||
| 
 | ||||
| use common::{get_pkg_diff, print_used_nixos_systems}; | ||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | ||||
| use dixlib::print; | ||||
| use common::{ | ||||
|   get_pkg_diff, | ||||
|   print_used_nixos_systems, | ||||
| }; | ||||
| use criterion::{ | ||||
|   Criterion, | ||||
|   black_box, | ||||
|   criterion_group, | ||||
|   criterion_main, | ||||
| }; | ||||
| use dix::print; | ||||
| 
 | ||||
| /// reroutes stdout and stderr to the null device before
 | ||||
| /// executing `f`
 | ||||
| fn suppress_output<F: FnOnce()>(f: F) { | ||||
|     let stdout = std::io::stdout(); | ||||
|     let stderr = std::io::stderr(); | ||||
|   let stdout = std::io::stdout(); | ||||
|   let stderr = std::io::stderr(); | ||||
| 
 | ||||
|     // Save original FDs
 | ||||
|     let orig_stdout_fd = stdout.as_raw_fd(); | ||||
|     let orig_stderr_fd = stderr.as_raw_fd(); | ||||
|   // Save original FDs
 | ||||
|   let orig_stdout_fd = stdout.as_raw_fd(); | ||||
|   let orig_stderr_fd = stderr.as_raw_fd(); | ||||
| 
 | ||||
|     // Open /dev/null and get its FD
 | ||||
|     let devnull = File::create("/dev/null").unwrap(); | ||||
|     let null_fd = devnull.as_raw_fd(); | ||||
|   // Open /dev/null and get its FD
 | ||||
|   let devnull = File::create("/dev/null").unwrap(); | ||||
|   let null_fd = devnull.as_raw_fd(); | ||||
| 
 | ||||
|     // Redirect stdout and stderr to /dev/null
 | ||||
|     let _ = unsafe { libc::dup2(null_fd, orig_stdout_fd) }; | ||||
|     let _ = unsafe { libc::dup2(null_fd, orig_stderr_fd) }; | ||||
|   // Redirect stdout and stderr to /dev/null
 | ||||
|   let _ = unsafe { libc::dup2(null_fd, orig_stdout_fd) }; | ||||
|   let _ = unsafe { libc::dup2(null_fd, orig_stderr_fd) }; | ||||
| 
 | ||||
|     f(); | ||||
|   f(); | ||||
| 
 | ||||
|     let _ = unsafe { libc::dup2(orig_stdout_fd, 1) }; | ||||
|     let _ = unsafe { libc::dup2(orig_stderr_fd, 2) }; | ||||
|   let _ = unsafe { libc::dup2(orig_stdout_fd, 1) }; | ||||
|   let _ = unsafe { libc::dup2(orig_stderr_fd, 2) }; | ||||
| } | ||||
| 
 | ||||
| pub fn bench_print_added(c: &mut Criterion) { | ||||
|     print_used_nixos_systems(); | ||||
|     let diff = get_pkg_diff(); | ||||
|     c.bench_function("print_added", |b| { | ||||
|         b.iter(|| { | ||||
|             suppress_output(|| { | ||||
|                 print::print_added( | ||||
|                     black_box(&diff.added), | ||||
|                     black_box(&diff.pkg_to_versions_post), | ||||
|                     30, | ||||
|                 ); | ||||
|             }); | ||||
|         }); | ||||
|   print_used_nixos_systems(); | ||||
|   let diff = get_pkg_diff(); | ||||
|   c.bench_function("print_added", |b| { | ||||
|     b.iter(|| { | ||||
|       suppress_output(|| { | ||||
|         print::print_added( | ||||
|           black_box(&diff.added), | ||||
|           black_box(&diff.pkg_to_versions_post), | ||||
|           30, | ||||
|         ); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| } | ||||
| pub fn bench_print_removed(c: &mut Criterion) { | ||||
|     print_used_nixos_systems(); | ||||
|     let diff = get_pkg_diff(); | ||||
|     c.bench_function("print_removed", |b| { | ||||
|         b.iter(|| { | ||||
|             suppress_output(|| { | ||||
|                 print::print_removed( | ||||
|                     black_box(&diff.removed), | ||||
|                     black_box(&diff.pkg_to_versions_pre), | ||||
|                     30, | ||||
|                 ); | ||||
|             }); | ||||
|         }); | ||||
|   print_used_nixos_systems(); | ||||
|   let diff = get_pkg_diff(); | ||||
|   c.bench_function("print_removed", |b| { | ||||
|     b.iter(|| { | ||||
|       suppress_output(|| { | ||||
|         print::print_removed( | ||||
|           black_box(&diff.removed), | ||||
|           black_box(&diff.pkg_to_versions_pre), | ||||
|           30, | ||||
|         ); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| } | ||||
| pub fn bench_print_changed(c: &mut Criterion) { | ||||
|     print_used_nixos_systems(); | ||||
|     let diff = get_pkg_diff(); | ||||
|     c.bench_function("print_changed", |b| { | ||||
|         b.iter(|| { | ||||
|             suppress_output(|| { | ||||
|                 print::print_changes( | ||||
|                     black_box(&diff.changed), | ||||
|                     black_box(&diff.pkg_to_versions_pre), | ||||
|                     black_box(&diff.pkg_to_versions_post), | ||||
|                     30, | ||||
|                 ); | ||||
|             }); | ||||
|         }); | ||||
|   print_used_nixos_systems(); | ||||
|   let diff = get_pkg_diff(); | ||||
|   c.bench_function("print_changed", |b| { | ||||
|     b.iter(|| { | ||||
|       suppress_output(|| { | ||||
|         print::print_changes( | ||||
|           black_box(&diff.changed), | ||||
|           black_box(&diff.pkg_to_versions_pre), | ||||
|           black_box(&diff.pkg_to_versions_post), | ||||
|           30, | ||||
|         ); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| } | ||||
| 
 | ||||
| criterion_group!( | ||||
|     benches, | ||||
|     bench_print_added, | ||||
|     bench_print_removed, | ||||
|     bench_print_changed | ||||
|   benches, | ||||
|   bench_print_added, | ||||
|   bench_print_removed, | ||||
|   bench_print_changed | ||||
| ); | ||||
| criterion_main!(benches); | ||||
|  |  | |||
|  | @ -1,6 +1,11 @@ | |||
| mod common; | ||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | ||||
| use dixlib::store; | ||||
| use criterion::{ | ||||
|   Criterion, | ||||
|   black_box, | ||||
|   criterion_group, | ||||
|   criterion_main, | ||||
| }; | ||||
| use dix::store; | ||||
| 
 | ||||
| // basic benchmarks using the current system
 | ||||
| //
 | ||||
|  | @ -12,25 +17,27 @@ use dixlib::store; | |||
| // db to benchmark instead to make the results comparable
 | ||||
| 
 | ||||
| pub fn bench_get_packages(c: &mut Criterion) { | ||||
|     c.bench_function("get_packages", |b| { | ||||
|         b.iter(|| store::get_packages(black_box(common::get_deriv_query()))); | ||||
|     }); | ||||
|   c.bench_function("get_packages", |b| { | ||||
|     b.iter(|| store::query_depdendents(black_box(common::get_deriv_query()))); | ||||
|   }); | ||||
| } | ||||
| pub fn bench_get_closure_size(c: &mut Criterion) { | ||||
|     c.bench_function("get_closure_size", |b| { | ||||
|         b.iter(|| store::get_closure_size(black_box(common::get_deriv_query()))); | ||||
|     }); | ||||
|   c.bench_function("get_closure_size", |b| { | ||||
|     b.iter(|| store::gequery_closure_sizelack_box(common::get_deriv_query()))); | ||||
|   }); | ||||
| } | ||||
| pub fn bench_get_dependency_graph(c: &mut Criterion) { | ||||
|     c.bench_function("get_dependency_graph", |b| { | ||||
|         b.iter(|| store::get_dependency_graph(black_box(common::get_deriv_query()))); | ||||
|   c.bench_function("get_dependency_graph", |b| { | ||||
|     b.iter(|| { | ||||
|       store::query_dependency_graph(black_box(common::get_deriv_query())) | ||||
|     }); | ||||
|   }); | ||||
| } | ||||
| 
 | ||||
| criterion_group!( | ||||
|     benches, | ||||
|     bench_get_packages, | ||||
|     bench_get_closure_size, | ||||
|     bench_get_dependency_graph | ||||
|   benches, | ||||
|   bench_get_packages, | ||||
|   bench_get_closure_size, | ||||
|   bench_get_dependency_graph | ||||
| ); | ||||
| criterion_main!(benches); | ||||
|  |  | |||
|  | @ -1,14 +1,19 @@ | |||
| mod common; | ||||
| 
 | ||||
| use common::get_packages; | ||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | ||||
| use dixlib::util::PackageDiff; | ||||
| use criterion::{ | ||||
|   Criterion, | ||||
|   black_box, | ||||
|   criterion_group, | ||||
|   criterion_main, | ||||
| }; | ||||
| use dix::util::PackageDiff; | ||||
| 
 | ||||
| pub fn bench_package_diff(c: &mut Criterion) { | ||||
|     let (pkgs_before, pkgs_after) = get_packages(); | ||||
|     c.bench_function("PackageDiff::new", |b| { | ||||
|         b.iter(|| PackageDiff::new(black_box(pkgs_before), black_box(pkgs_after))); | ||||
|     }); | ||||
|   let (pkgs_before, pkgs_after) = get_packages(); | ||||
|   c.bench_function("PackageDiff::new", |b| { | ||||
|     b.iter(|| PackageDiff::new(black_box(pkgs_before), black_box(pkgs_after))); | ||||
|   }); | ||||
| } | ||||
| 
 | ||||
| criterion_group!(benches, bench_package_diff); | ||||
|  |  | |||
							
								
								
									
										178
									
								
								src/error.rs
									
										
									
									
									
								
							
							
						
						
									
										178
									
								
								src/error.rs
									
										
									
									
									
								
							|  | @ -3,121 +3,131 @@ use thiserror::Error; | |||
| /// Application errors with thiserror
 | ||||
| #[derive(Debug, Error)] | ||||
| pub enum AppError { | ||||
|     #[error("Command failed: {command} {args:?} - {message}")] | ||||
|     CommandFailed { | ||||
|         command: String, | ||||
|         args: Vec<String>, | ||||
|         message: String, | ||||
|     }, | ||||
|   #[error("Command failed: {command} {args:?} - {message}")] | ||||
|   CommandFailed { | ||||
|     command: String, | ||||
|     args:    Vec<String>, | ||||
|     message: String, | ||||
|   }, | ||||
| 
 | ||||
|     #[error("Failed to decode command output from {context}: {source}")] | ||||
|     CommandOutputError { | ||||
|         source: std::str::Utf8Error, | ||||
|         context: String, | ||||
|     }, | ||||
|   #[error("Failed to decode command output from {context}: {source}")] | ||||
|   CommandOutputError { | ||||
|     source:  std::str::Utf8Error, | ||||
|     context: String, | ||||
|   }, | ||||
| 
 | ||||
|     #[error("Failed to parse data in {context}: {message}")] | ||||
|     ParseError { | ||||
|         message: String, | ||||
|         context: String, | ||||
|         #[source] | ||||
|         source: Option<Box<dyn std::error::Error + Send + Sync>>, | ||||
|     }, | ||||
|   #[error("Failed to parse data in {context}: {message}")] | ||||
|   ParseError { | ||||
|     message: String, | ||||
|     context: String, | ||||
|     #[source] | ||||
|     source:  Option<Box<dyn std::error::Error + Send + Sync>>, | ||||
|   }, | ||||
| 
 | ||||
|     #[error("Regex error in {context}: {source}")] | ||||
|     RegexError { | ||||
|         source: regex::Error, | ||||
|         context: String, | ||||
|     }, | ||||
|   #[error("Regex error in {context}: {source}")] | ||||
|   RegexError { | ||||
|     source:  regex::Error, | ||||
|     context: String, | ||||
|   }, | ||||
| 
 | ||||
|     #[error("IO error in {context}: {source}")] | ||||
|     IoError { | ||||
|         source: std::io::Error, | ||||
|         context: String, | ||||
|     }, | ||||
|   #[error("IO error in {context}: {source}")] | ||||
|   IoError { | ||||
|     source:  std::io::Error, | ||||
|     context: String, | ||||
|   }, | ||||
| 
 | ||||
|     #[error("Database error: {source}")] | ||||
|     DatabaseError { source: rusqlite::Error }, | ||||
|   #[error("Database error: {source}")] | ||||
|   DatabaseError { source: rusqlite::Error }, | ||||
| } | ||||
| 
 | ||||
| // Implement From traits to support the ? operator
 | ||||
| impl From<std::io::Error> for AppError { | ||||
|     fn from(source: std::io::Error) -> Self { | ||||
|         Self::IoError { | ||||
|             source, | ||||
|             context: "unknown context".into(), | ||||
|         } | ||||
|   fn from(source: std::io::Error) -> Self { | ||||
|     Self::IoError { | ||||
|       source, | ||||
|       context: "unknown context".into(), | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl From<std::str::Utf8Error> for AppError { | ||||
|     fn from(source: std::str::Utf8Error) -> Self { | ||||
|         Self::CommandOutputError { | ||||
|             source, | ||||
|             context: "command output".into(), | ||||
|         } | ||||
|   fn from(source: std::str::Utf8Error) -> Self { | ||||
|     Self::CommandOutputError { | ||||
|       source, | ||||
|       context: "command output".into(), | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl From<rusqlite::Error> for AppError { | ||||
|     fn from(source: rusqlite::Error) -> Self { | ||||
|         Self::DatabaseError { source } | ||||
|     } | ||||
|   fn from(source: rusqlite::Error) -> Self { | ||||
|     Self::DatabaseError { source } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl From<regex::Error> for AppError { | ||||
|     fn from(source: regex::Error) -> Self { | ||||
|         Self::RegexError { | ||||
|             source, | ||||
|             context: "regex operation".into(), | ||||
|         } | ||||
|   fn from(source: regex::Error) -> Self { | ||||
|     Self::RegexError { | ||||
|       source, | ||||
|       context: "regex operation".into(), | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl AppError { | ||||
|     /// Create a command failure error with context
 | ||||
|     pub fn command_failed<S: Into<String>>(command: S, args: &[&str], message: S) -> Self { | ||||
|         Self::CommandFailed { | ||||
|             command: command.into(), | ||||
|             args: args.iter().map(|&s| s.to_string()).collect(), | ||||
|             message: message.into(), | ||||
|         } | ||||
|   /// Create a command failure error with context
 | ||||
|   pub fn command_failed<S: Into<String>>( | ||||
|     command: S, | ||||
|     args: &[&str], | ||||
|     message: S, | ||||
|   ) -> Self { | ||||
|     Self::CommandFailed { | ||||
|       command: command.into(), | ||||
|       args:    args.iter().map(|&s| s.to_string()).collect(), | ||||
|       message: message.into(), | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|     /// Create a parse error with context
 | ||||
|     pub fn parse_error<S: Into<String>, C: Into<String>>( | ||||
|         message: S, | ||||
|         context: C, | ||||
|         source: Option<Box<dyn std::error::Error + Send + Sync>>, | ||||
|     ) -> Self { | ||||
|         Self::ParseError { | ||||
|             message: message.into(), | ||||
|             context: context.into(), | ||||
|             source, | ||||
|         } | ||||
|   /// Create a parse error with context
 | ||||
|   pub fn parse_error<S: Into<String>, C: Into<String>>( | ||||
|     message: S, | ||||
|     context: C, | ||||
|     source: Option<Box<dyn std::error::Error + Send + Sync>>, | ||||
|   ) -> Self { | ||||
|     Self::ParseError { | ||||
|       message: message.into(), | ||||
|       context: context.into(), | ||||
|       source, | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|     /// Create an IO error with context
 | ||||
|     pub fn io_error<C: Into<String>>(source: std::io::Error, context: C) -> Self { | ||||
|         Self::IoError { | ||||
|             source, | ||||
|             context: context.into(), | ||||
|         } | ||||
|   /// Create an IO error with context
 | ||||
|   pub fn io_error<C: Into<String>>(source: std::io::Error, context: C) -> Self { | ||||
|     Self::IoError { | ||||
|       source, | ||||
|       context: context.into(), | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|     /// Create a regex error with context
 | ||||
|     pub fn regex_error<C: Into<String>>(source: regex::Error, context: C) -> Self { | ||||
|         Self::RegexError { | ||||
|             source, | ||||
|             context: context.into(), | ||||
|         } | ||||
|   /// Create a regex error with context
 | ||||
|   pub fn regex_error<C: Into<String>>( | ||||
|     source: regex::Error, | ||||
|     context: C, | ||||
|   ) -> Self { | ||||
|     Self::RegexError { | ||||
|       source, | ||||
|       context: context.into(), | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|     /// Create a command output error with context
 | ||||
|     pub fn command_output_error<C: Into<String>>(source: std::str::Utf8Error, context: C) -> Self { | ||||
|         Self::CommandOutputError { | ||||
|             source, | ||||
|             context: context.into(), | ||||
|         } | ||||
|   /// Create a command output error with context
 | ||||
|   pub fn command_output_error<C: Into<String>>( | ||||
|     source: std::str::Utf8Error, | ||||
|     context: C, | ||||
|   ) -> Self { | ||||
|     Self::CommandOutputError { | ||||
|       source, | ||||
|       context: context.into(), | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  |  | |||
							
								
								
									
										357
									
								
								src/main.rs
									
										
									
									
									
								
							
							
						
						
									
										357
									
								
								src/main.rs
									
										
									
									
									
								
							|  | @ -1,12 +1,18 @@ | |||
| use clap::Parser; | ||||
| use core::str; | ||||
| use dixlib::print; | ||||
| use dixlib::store; | ||||
| use dixlib::util::PackageDiff; | ||||
| use log::{debug, error}; | ||||
| use std::{ | ||||
|     collections::{HashMap, HashSet}, | ||||
|     thread, | ||||
|   collections::HashSet, | ||||
|   thread, | ||||
| }; | ||||
| 
 | ||||
| use clap::Parser; | ||||
| use dixlib::{ | ||||
|   print, | ||||
|   store, | ||||
|   util::PackageDiff, | ||||
| }; | ||||
| use log::{ | ||||
|   debug, | ||||
|   error, | ||||
| }; | ||||
| use yansi::Paint; | ||||
| 
 | ||||
|  | @ -16,199 +22,204 @@ use yansi::Paint; | |||
| #[command(about = "Diff Nix stuff", long_about = None)] | ||||
| #[command(version, about, long_about = None)] | ||||
| struct Args { | ||||
|     path: std::path::PathBuf, | ||||
|     path2: std::path::PathBuf, | ||||
|   path:  std::path::PathBuf, | ||||
|   path2: std::path::PathBuf, | ||||
| 
 | ||||
|     /// Print the whole store paths
 | ||||
|     #[arg(short, long)] | ||||
|     paths: bool, | ||||
|   /// Print the whole store paths
 | ||||
|   #[arg(short, long)] | ||||
|   paths: bool, | ||||
| 
 | ||||
|     /// Print the closure size
 | ||||
|     #[arg(long, short)] | ||||
|     closure_size: bool, | ||||
|   /// Print the closure size
 | ||||
|   #[arg(long, short)] | ||||
|   closure_size: bool, | ||||
| 
 | ||||
|     /// Verbosity level: -v for debug, -vv for trace
 | ||||
|     #[arg(short, long, action = clap::ArgAction::Count)] | ||||
|     verbose: u8, | ||||
|   /// Verbosity level: -v for debug, -vv for trace
 | ||||
|   #[arg(short, long, action = clap::ArgAction::Count)] | ||||
|   verbose: u8, | ||||
| 
 | ||||
|     /// Silence all output except errors
 | ||||
|     #[arg(short, long)] | ||||
|     quiet: bool, | ||||
|   /// Silence all output except errors
 | ||||
|   #[arg(short, long)] | ||||
|   quiet: bool, | ||||
| } | ||||
| 
 | ||||
| #[derive(Debug, Clone)] | ||||
| struct Package<'a> { | ||||
|     name: &'a str, | ||||
|     versions: HashSet<&'a str>, | ||||
|     /// Save if a package is a dependency of another package
 | ||||
|     is_dep: bool, | ||||
|   name:     &'a str, | ||||
|   versions: HashSet<&'a str>, | ||||
|   /// Save if a package is a dependency of another package
 | ||||
|   is_dep:   bool, | ||||
| } | ||||
| 
 | ||||
| impl<'a> Package<'a> { | ||||
|     fn new(name: &'a str, version: &'a str, is_dep: bool) -> Self { | ||||
|         let mut versions = HashSet::new(); | ||||
|         versions.insert(version); | ||||
|         Self { | ||||
|             name, | ||||
|             versions, | ||||
|             is_dep, | ||||
|         } | ||||
|   fn new(name: &'a str, version: &'a str, is_dep: bool) -> Self { | ||||
|     let mut versions = HashSet::new(); | ||||
|     versions.insert(version); | ||||
|     Self { | ||||
|       name, | ||||
|       versions, | ||||
|       is_dep, | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|     fn add_version(&mut self, version: &'a str) { | ||||
|         self.versions.insert(version); | ||||
|     } | ||||
|   fn add_version(&mut self, version: &'a str) { | ||||
|     self.versions.insert(version); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| #[allow(clippy::cognitive_complexity, clippy::too_many_lines)] | ||||
| fn main() { | ||||
|     let args = Args::parse(); | ||||
|   let args = Args::parse(); | ||||
| 
 | ||||
|     // Configure logger based on verbosity flags and environment variables
 | ||||
|     // Respects RUST_LOG environment variable if present.
 | ||||
|     // XXX:We can also dedicate a specific env variable for this tool, if we want to.
 | ||||
|     let env = env_logger::Env::default().filter_or( | ||||
|         "RUST_LOG", | ||||
|         if args.quiet { | ||||
|             "error" | ||||
|         } else { | ||||
|             match args.verbose { | ||||
|                 0 => "info", | ||||
|                 1 => "debug", | ||||
|                 _ => "trace", | ||||
|             } | ||||
|         }, | ||||
|     ); | ||||
| 
 | ||||
|     // Build and initialize the logger
 | ||||
|     env_logger::Builder::from_env(env) | ||||
|         .format_timestamp(Some(env_logger::fmt::TimestampPrecision::Seconds)) | ||||
|         .init(); | ||||
| 
 | ||||
|     // handles to the threads collecting closure size information
 | ||||
|     // We do this as early as possible because nix is slow.
 | ||||
|     let closure_size_handles = if args.closure_size { | ||||
|         debug!("Calculating closure sizes in background"); | ||||
|         let path = args.path.clone(); | ||||
|         let path2 = args.path2.clone(); | ||||
|         Some(( | ||||
|             thread::spawn(move || store::get_closure_size(&path)), | ||||
|             thread::spawn(move || store::get_closure_size(&path2)), | ||||
|         )) | ||||
|   // Configure logger based on verbosity flags and environment variables
 | ||||
|   // Respects RUST_LOG environment variable if present.
 | ||||
|   // XXX:We can also dedicate a specific env variable for this tool, if we want
 | ||||
|   // to.
 | ||||
|   let env = env_logger::Env::default().filter_or( | ||||
|     "RUST_LOG", | ||||
|     if args.quiet { | ||||
|       "error" | ||||
|     } else { | ||||
|         None | ||||
|     }; | ||||
|       match args.verbose { | ||||
|         0 => "info", | ||||
|         1 => "debug", | ||||
|         _ => "trace", | ||||
|       } | ||||
|     }, | ||||
|   ); | ||||
| 
 | ||||
|     // Get package lists and handle potential errors
 | ||||
|     let package_list_pre = match store::get_packages(&args.path) { | ||||
|         Ok(packages) => { | ||||
|             debug!("Found {} packages in first closure", packages.len()); | ||||
|             packages.into_iter().map(|(_, path)| path).collect() | ||||
|         } | ||||
|         Err(e) => { | ||||
|             error!( | ||||
|                 "Error getting packages from path {}: {}", | ||||
|                 args.path.display(), | ||||
|                 e | ||||
|             ); | ||||
|             eprintln!( | ||||
|                 "Error getting packages from path {}: {}", | ||||
|                 args.path.display(), | ||||
|                 e | ||||
|             ); | ||||
|             Vec::new() | ||||
|         } | ||||
|     }; | ||||
|   // Build and initialize the logger
 | ||||
|   env_logger::Builder::from_env(env) | ||||
|     .format_timestamp(Some(env_logger::fmt::TimestampPrecision::Seconds)) | ||||
|     .init(); | ||||
| 
 | ||||
|     let package_list_post = match store::get_packages(&args.path2) { | ||||
|         Ok(packages) => { | ||||
|             debug!("Found {} packages in second closure", packages.len()); | ||||
|             packages.into_iter().map(|(_, path)| path).collect() | ||||
|         } | ||||
|         Err(e) => { | ||||
|             error!( | ||||
|                 "Error getting packages from path {}: {}", | ||||
|                 args.path2.display(), | ||||
|                 e | ||||
|             ); | ||||
|             eprintln!( | ||||
|                 "Error getting packages from path {}: {}", | ||||
|                 args.path2.display(), | ||||
|                 e | ||||
|             ); | ||||
|             Vec::new() | ||||
|         } | ||||
|     }; | ||||
|   // handles to the threads collecting closure size information
 | ||||
|   // We do this as early as possible because nix is slow.
 | ||||
|   let closure_size_handles = if args.closure_size { | ||||
|     debug!("Calculating closure sizes in background"); | ||||
|     let path = args.path.clone(); | ||||
|     let path2 = args.path2.clone(); | ||||
|     Some(( | ||||
|       thread::spawn(move || store::get_closure_size(&path)), | ||||
|       thread::spawn(move || store::get_closure_size(&path2)), | ||||
|     )) | ||||
|   } else { | ||||
|     None | ||||
|   }; | ||||
| 
 | ||||
|     let PackageDiff { | ||||
|         pkg_to_versions_pre: pre, | ||||
|         pkg_to_versions_post: post, | ||||
|         pre_keys: _, | ||||
|         post_keys: _, | ||||
|         added, | ||||
|         removed, | ||||
|         changed, | ||||
|     } = PackageDiff::new(&package_list_pre, &package_list_post); | ||||
|   // Get package lists and handle potential errors
 | ||||
|   let package_list_pre = match store::query_packages(&args.path) { | ||||
|     Ok(packages) => { | ||||
|       debug!("Found {} packages in first closure", packages.len()); | ||||
|       packages.into_iter().map(|(_, path)| path).collect() | ||||
|     }, | ||||
|     Err(e) => { | ||||
|       error!( | ||||
|         "Error getting packages from path {}: {}", | ||||
|         args.path.display(), | ||||
|         e | ||||
|       ); | ||||
|       eprintln!( | ||||
|         "Error getting packages from path {}: {}", | ||||
|         args.path.display(), | ||||
|         e | ||||
|       ); | ||||
|       Vec::new() | ||||
|     }, | ||||
|   }; | ||||
| 
 | ||||
|     debug!("Added packages: {}", added.len()); | ||||
|     debug!("Removed packages: {}", removed.len()); | ||||
|     debug!( | ||||
|         "Changed packages: {}", | ||||
|         changed | ||||
|             .iter() | ||||
|             .filter(|p| !p.is_empty() | ||||
|                 && match (pre.get(*p), post.get(*p)) { | ||||
|                     (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, | ||||
|                     _ => false, | ||||
|                 }) | ||||
|             .count() | ||||
|     ); | ||||
|   let package_list_post = match store::query_packages(&args.path2) { | ||||
|     Ok(packages) => { | ||||
|       debug!("Found {} packages in second closure", packages.len()); | ||||
|       packages.into_iter().map(|(_, path)| path).collect() | ||||
|     }, | ||||
|     Err(e) => { | ||||
|       error!( | ||||
|         "Error getting packages from path {}: {}", | ||||
|         args.path2.display(), | ||||
|         e | ||||
|       ); | ||||
|       eprintln!( | ||||
|         "Error getting packages from path {}: {}", | ||||
|         args.path2.display(), | ||||
|         e | ||||
|       ); | ||||
|       Vec::new() | ||||
|     }, | ||||
|   }; | ||||
| 
 | ||||
|     println!("Difference between the two generations:"); | ||||
|     println!(); | ||||
|   let PackageDiff { | ||||
|     pkg_to_versions_pre: pre, | ||||
|     pkg_to_versions_post: post, | ||||
|     pre_keys: _, | ||||
|     post_keys: _, | ||||
|     added, | ||||
|     removed, | ||||
|     changed, | ||||
|   } = PackageDiff::new(&package_list_pre, &package_list_post); | ||||
| 
 | ||||
|     let width_changes = changed | ||||
|         .iter() | ||||
|         .filter(|&&p| match (pre.get(p), post.get(p)) { | ||||
|             (Some(version_pre), Some(version_post)) => version_pre != version_post, | ||||
|   debug!("Added packages: {}", added.len()); | ||||
|   debug!("Removed packages: {}", removed.len()); | ||||
|   debug!( | ||||
|     "Changed packages: {}", | ||||
|     changed | ||||
|       .iter() | ||||
|       .filter(|p| { | ||||
|         !p.is_empty() | ||||
|           && match (pre.get(*p), post.get(*p)) { | ||||
|             (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, | ||||
|             _ => false, | ||||
|         }); | ||||
|           } | ||||
|       }) | ||||
|       .count() | ||||
|   ); | ||||
| 
 | ||||
|     let col_width = added | ||||
|         .iter() | ||||
|         .chain(removed.iter()) | ||||
|         .chain(width_changes) | ||||
|         .map(|p| p.len()) | ||||
|         .max() | ||||
|         .unwrap_or_default(); | ||||
|   println!("Difference between the two generations:"); | ||||
|   println!(); | ||||
| 
 | ||||
|     println!("<<< {}", args.path.to_string_lossy()); | ||||
|     println!(">>> {}", args.path2.to_string_lossy()); | ||||
|     print::print_added(&added, &post, col_width); | ||||
|     print::print_removed(&removed, &pre, col_width); | ||||
|     print::print_changes(&changed, &pre, &post, col_width); | ||||
| 
 | ||||
|     if let Some((pre_handle, post_handle)) = closure_size_handles { | ||||
|         match (pre_handle.join(), post_handle.join()) { | ||||
|             (Ok(Ok(pre_size)), Ok(Ok(post_size))) => { | ||||
|                 let pre_size = pre_size / 1024 / 1024; | ||||
|                 let post_size = post_size / 1024 / 1024; | ||||
|                 debug!("Pre closure size: {pre_size} MiB"); | ||||
|                 debug!("Post closure size: {post_size} MiB"); | ||||
| 
 | ||||
|                 println!("{}", "Closure Size:".underline().bold()); | ||||
|                 println!("Before: {pre_size} MiB"); | ||||
|                 println!("After: {post_size} MiB"); | ||||
|                 println!("Difference: {} MiB", post_size - pre_size); | ||||
|             } | ||||
|             (Ok(Err(e)), _) | (_, Ok(Err(e))) => { | ||||
|                 error!("Error getting closure size: {e}"); | ||||
|                 eprintln!("Error getting closure size: {e}"); | ||||
|             } | ||||
|             _ => { | ||||
|                 error!("Failed to get closure size information due to a thread error"); | ||||
|                 eprintln!("Error: Failed to get closure size information due to a thread error"); | ||||
|             } | ||||
|         } | ||||
|   let width_changes = changed.iter().filter(|&&p| { | ||||
|     match (pre.get(p), post.get(p)) { | ||||
|       (Some(version_pre), Some(version_post)) => version_pre != version_post, | ||||
|       _ => false, | ||||
|     } | ||||
|   }); | ||||
| 
 | ||||
|   let col_width = added | ||||
|     .iter() | ||||
|     .chain(removed.iter()) | ||||
|     .chain(width_changes) | ||||
|     .map(|p| p.len()) | ||||
|     .max() | ||||
|     .unwrap_or_default(); | ||||
| 
 | ||||
|   println!("<<< {}", args.path.to_string_lossy()); | ||||
|   println!(">>> {}", args.path2.to_string_lossy()); | ||||
|   print::print_added(&added, &post, col_width); | ||||
|   print::print_removed(&removed, &pre, col_width); | ||||
|   print::print_changes(&changed, &pre, &post, col_width); | ||||
| 
 | ||||
|   if let Some((pre_handle, post_handle)) = closure_size_handles { | ||||
|     match (pre_handle.join(), post_handle.join()) { | ||||
|       (Ok(Ok(pre_size)), Ok(Ok(post_size))) => { | ||||
|         let pre_size = pre_size / 1024 / 1024; | ||||
|         let post_size = post_size / 1024 / 1024; | ||||
|         debug!("Pre closure size: {pre_size} MiB"); | ||||
|         debug!("Post closure size: {post_size} MiB"); | ||||
| 
 | ||||
|         println!("{}", "Closure Size:".underline().bold()); | ||||
|         println!("Before: {pre_size} MiB"); | ||||
|         println!("After: {post_size} MiB"); | ||||
|         println!("Difference: {} MiB", post_size - pre_size); | ||||
|       }, | ||||
|       (Ok(Err(e)), _) | (_, Ok(Err(e))) => { | ||||
|         error!("Error getting closure size: {e}"); | ||||
|         eprintln!("Error getting closure size: {e}"); | ||||
|       }, | ||||
|       _ => { | ||||
|         error!("Failed to get closure size information due to a thread error"); | ||||
|         eprintln!( | ||||
|           "Error: Failed to get closure size information due to a thread error" | ||||
|         ); | ||||
|       }, | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  |  | |||
							
								
								
									
										308
									
								
								src/print.rs
									
										
									
									
									
								
							
							
						
						
									
										308
									
								
								src/print.rs
									
										
									
									
									
								
							|  | @ -1,10 +1,14 @@ | |||
| use core::str; | ||||
| use regex::Regex; | ||||
| use std::{ | ||||
|     collections::{HashMap, HashSet}, | ||||
|     string::ToString, | ||||
|     sync::OnceLock, | ||||
|   collections::{ | ||||
|     HashMap, | ||||
|     HashSet, | ||||
|   }, | ||||
|   string::ToString, | ||||
|   sync::OnceLock, | ||||
| }; | ||||
| 
 | ||||
| use regex::Regex; | ||||
| use yansi::Paint; | ||||
| 
 | ||||
| /// diffs two strings character by character, and returns a tuple of strings
 | ||||
|  | @ -12,179 +16,191 @@ use yansi::Paint; | |||
| ///
 | ||||
| /// # Returns:
 | ||||
| ///
 | ||||
| /// * (String, String) - The differing chars being red in the left, and green in the right one.
 | ||||
| /// * (String, String) - The differing chars being red in the left, and green in
 | ||||
| ///   the right one.
 | ||||
| fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||
|     let mut prev = "\x1b[33m".to_string(); | ||||
|     let mut post = "\x1b[33m".to_string(); | ||||
|   let mut prev = "\x1b[33m".to_string(); | ||||
|   let mut post = "\x1b[33m".to_string(); | ||||
| 
 | ||||
|     // We only have to filter the left once, since we stop if the left one is empty.
 | ||||
|     // We do this to display things like -man, -dev properly.
 | ||||
|     let matches = name_regex().captures(left); | ||||
|     let mut suffix = String::new(); | ||||
|   // We only have to filter the left once, since we stop if the left one is
 | ||||
|   // empty. We do this to display things like -man, -dev properly.
 | ||||
|   let matches = name_regex().captures(left); | ||||
|   let mut suffix = String::new(); | ||||
| 
 | ||||
|     if let Some(m) = matches { | ||||
|         let tmp = m.get(0).map_or("", |m| m.as_str()); | ||||
|         suffix.push_str(tmp); | ||||
|   if let Some(m) = matches { | ||||
|     let tmp = m.get(0).map_or("", |m| m.as_str()); | ||||
|     suffix.push_str(tmp); | ||||
|   } | ||||
|   // string without the suffix
 | ||||
|   let filtered_left = &left[..left.len() - suffix.len()]; | ||||
|   let filtered_right = &right[..right.len() - suffix.len()]; | ||||
| 
 | ||||
|   for diff in diff::chars(filtered_left, filtered_right) { | ||||
|     match diff { | ||||
|       diff::Result::Both(l, _) => { | ||||
|         let string_to_push = format!("{l}"); | ||||
|         prev.push_str(&string_to_push); | ||||
|         post.push_str(&string_to_push); | ||||
|       }, | ||||
|       diff::Result::Left(l) => { | ||||
|         let string_to_push = format!("\x1b[1;91m{l}"); | ||||
|         prev.push_str(&string_to_push); | ||||
|       }, | ||||
| 
 | ||||
|       diff::Result::Right(r) => { | ||||
|         let string_to_push = format!("\x1b[1;92m{r}"); | ||||
|         post.push_str(&string_to_push); | ||||
|       }, | ||||
|     } | ||||
|     // string without the suffix
 | ||||
|     let filtered_left = &left[..left.len() - suffix.len()]; | ||||
|     let filtered_right = &right[..right.len() - suffix.len()]; | ||||
|   } | ||||
| 
 | ||||
|     for diff in diff::chars(filtered_left, filtered_right) { | ||||
|         match diff { | ||||
|             diff::Result::Both(l, _) => { | ||||
|                 let string_to_push = format!("{l}"); | ||||
|                 prev.push_str(&string_to_push); | ||||
|                 post.push_str(&string_to_push); | ||||
|             } | ||||
|             diff::Result::Left(l) => { | ||||
|                 let string_to_push = format!("\x1b[1;91m{l}"); | ||||
|                 prev.push_str(&string_to_push); | ||||
|             } | ||||
|   // push removed suffix
 | ||||
|   prev.push_str(&format!("\x1b[33m{}", &suffix)); | ||||
|   post.push_str(&format!("\x1b[33m{}", &suffix)); | ||||
| 
 | ||||
|             diff::Result::Right(r) => { | ||||
|                 let string_to_push = format!("\x1b[1;92m{r}"); | ||||
|                 post.push_str(&string_to_push); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|   // reset
 | ||||
|   prev.push_str("\x1b[0m"); | ||||
|   post.push_str("\x1b[0m"); | ||||
| 
 | ||||
|     // push removed suffix
 | ||||
|     prev.push_str(&format!("\x1b[33m{}", &suffix)); | ||||
|     post.push_str(&format!("\x1b[33m{}", &suffix)); | ||||
| 
 | ||||
|     //reset
 | ||||
|     prev.push_str("\x1b[0m"); | ||||
|     post.push_str("\x1b[0m"); | ||||
| 
 | ||||
|     (prev, post) | ||||
|   (prev, post) | ||||
| } | ||||
| 
 | ||||
| /// print the packages added between two closures.
 | ||||
| pub fn print_added(set: &HashSet<&str>, post: &HashMap<&str, HashSet<&str>>, col_width: usize) { | ||||
|     println!("{}", "Packages added:".underline().bold()); | ||||
| pub fn print_added( | ||||
|   set: &HashSet<&str>, | ||||
|   post: &HashMap<&str, HashSet<&str>>, | ||||
|   col_width: usize, | ||||
| ) { | ||||
|   println!("{}", "Packages added:".underline().bold()); | ||||
| 
 | ||||
|     // Use sorted outpu
 | ||||
|     let mut sorted: Vec<_> = set | ||||
|         .iter() | ||||
|         .filter_map(|p| post.get(p).map(|ver| (*p, ver))) | ||||
|         .collect(); | ||||
|   // Use sorted outpu
 | ||||
|   let mut sorted: Vec<_> = set | ||||
|     .iter() | ||||
|     .filter_map(|p| post.get(p).map(|ver| (*p, ver))) | ||||
|     .collect(); | ||||
| 
 | ||||
|     // Sort by package name for consistent output
 | ||||
|     sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||
|   // Sort by package name for consistent output
 | ||||
|   sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||
| 
 | ||||
|     for (p, ver) in sorted { | ||||
|         let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||
|         version_vec.sort_unstable(); | ||||
|         let version_str = version_vec.join(", "); | ||||
|         println!( | ||||
|             "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||
|             "A:".green().bold(), | ||||
|             p, | ||||
|             version_str | ||||
|         ); | ||||
|     } | ||||
|   for (p, ver) in sorted { | ||||
|     let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||
|     version_vec.sort_unstable(); | ||||
|     let version_str = version_vec.join(", "); | ||||
|     println!( | ||||
|       "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||
|       "A:".green().bold(), | ||||
|       p, | ||||
|       version_str | ||||
|     ); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /// print the packages removed between two closures.
 | ||||
| pub fn print_removed(set: &HashSet<&str>, pre: &HashMap<&str, HashSet<&str>>, col_width: usize) { | ||||
|     println!("{}", "Packages removed:".underline().bold()); | ||||
| pub fn print_removed( | ||||
|   set: &HashSet<&str>, | ||||
|   pre: &HashMap<&str, HashSet<&str>>, | ||||
|   col_width: usize, | ||||
| ) { | ||||
|   println!("{}", "Packages removed:".underline().bold()); | ||||
| 
 | ||||
|     // Use sorted output for more predictable and readable results
 | ||||
|     let mut sorted: Vec<_> = set | ||||
|         .iter() | ||||
|         .filter_map(|p| pre.get(p).map(|ver| (*p, ver))) | ||||
|         .collect(); | ||||
|   // Use sorted output for more predictable and readable results
 | ||||
|   let mut sorted: Vec<_> = set | ||||
|     .iter() | ||||
|     .filter_map(|p| pre.get(p).map(|ver| (*p, ver))) | ||||
|     .collect(); | ||||
| 
 | ||||
|     // Sort by package name for consistent output
 | ||||
|     sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||
|   // Sort by package name for consistent output
 | ||||
|   sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||
| 
 | ||||
|     for (p, ver) in sorted { | ||||
|         let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||
|         version_vec.sort_unstable(); | ||||
|         let version_str = version_vec.join(", "); | ||||
|         println!( | ||||
|             "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||
|             "R:".red().bold(), | ||||
|             p, | ||||
|             version_str | ||||
|         ); | ||||
|     } | ||||
|   for (p, ver) in sorted { | ||||
|     let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||
|     version_vec.sort_unstable(); | ||||
|     let version_str = version_vec.join(", "); | ||||
|     println!( | ||||
|       "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||
|       "R:".red().bold(), | ||||
|       p, | ||||
|       version_str | ||||
|     ); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| pub fn print_changes( | ||||
|     set: &HashSet<&str>, | ||||
|     pre: &HashMap<&str, HashSet<&str>>, | ||||
|     post: &HashMap<&str, HashSet<&str>>, | ||||
|     col_width: usize, | ||||
|   set: &HashSet<&str>, | ||||
|   pre: &HashMap<&str, HashSet<&str>>, | ||||
|   post: &HashMap<&str, HashSet<&str>>, | ||||
|   col_width: usize, | ||||
| ) { | ||||
|     println!("{}", "Version changes:".underline().bold()); | ||||
|   println!("{}", "Versions changed:".underline().bold()); | ||||
| 
 | ||||
|     // Use sorted output for more predictable and readable results
 | ||||
|     let mut changes = Vec::new(); | ||||
|   // Use sorted output for more predictable and readable results
 | ||||
|   let mut changes = Vec::new(); | ||||
| 
 | ||||
|     for p in set.iter().filter(|p| !p.is_empty()) { | ||||
|         if let (Some(ver_pre), Some(ver_post)) = (pre.get(p), post.get(p)) { | ||||
|             if ver_pre != ver_post { | ||||
|                 changes.push((*p, ver_pre, ver_post)); | ||||
|             } | ||||
|         } | ||||
|   for p in set.iter().filter(|p| !p.is_empty()) { | ||||
|     if let (Some(ver_pre), Some(ver_post)) = (pre.get(p), post.get(p)) { | ||||
|       if ver_pre != ver_post { | ||||
|         changes.push((*p, ver_pre, ver_post)); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // Sort by package name for consistent output
 | ||||
|   changes.sort_by(|(a, ..), (b, ..)| a.cmp(b)); | ||||
| 
 | ||||
|   for (p, ver_pre, ver_post) in changes { | ||||
|     let mut version_vec_pre = | ||||
|       ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); | ||||
|     let mut version_vec_post = | ||||
|       ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); | ||||
| 
 | ||||
|     version_vec_pre.sort_unstable(); | ||||
|     version_vec_post.sort_unstable(); | ||||
| 
 | ||||
|     let mut diffed_pre: String; | ||||
|     let diffed_post: String; | ||||
| 
 | ||||
|     if version_vec_pre.len() == version_vec_post.len() { | ||||
|       let mut diff_pre: Vec<String> = vec![]; | ||||
|       let mut diff_post: Vec<String> = vec![]; | ||||
| 
 | ||||
|       for (pre, post) in version_vec_pre.iter().zip(version_vec_post.iter()) { | ||||
|         let (a, b) = diff_versions(pre, post); | ||||
|         diff_pre.push(a); | ||||
|         diff_post.push(b); | ||||
|       } | ||||
|       diffed_pre = diff_pre.join(", "); | ||||
|       diffed_post = diff_post.join(", "); | ||||
|     } else { | ||||
|       let version_str_pre = version_vec_pre.join(", "); | ||||
|       let version_str_post = version_vec_post.join(", "); | ||||
|       (diffed_pre, diffed_post) = | ||||
|         diff_versions(&version_str_pre, &version_str_post); | ||||
|     } | ||||
| 
 | ||||
|     // Sort by package name for consistent output
 | ||||
|     changes.sort_by(|(a, _, _), (b, _, _)| a.cmp(b)); | ||||
| 
 | ||||
|     for (p, ver_pre, ver_post) in changes { | ||||
|         let mut version_vec_pre = ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); | ||||
|         let mut version_vec_post = ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); | ||||
| 
 | ||||
|         version_vec_pre.sort_unstable(); | ||||
|         version_vec_post.sort_unstable(); | ||||
| 
 | ||||
|         let mut diffed_pre: String; | ||||
|         let diffed_post: String; | ||||
| 
 | ||||
|         if version_vec_pre.len() == version_vec_post.len() { | ||||
|             let mut diff_pre: Vec<String> = vec![]; | ||||
|             let mut diff_post: Vec<String> = vec![]; | ||||
| 
 | ||||
|             for (pre, post) in version_vec_pre.iter().zip(version_vec_post.iter()) { | ||||
|                 let (a, b) = diff_versions(pre, post); | ||||
|                 diff_pre.push(a); | ||||
|                 diff_post.push(b); | ||||
|             } | ||||
|             diffed_pre = diff_pre.join(", "); | ||||
|             diffed_post = diff_post.join(", "); | ||||
|         } else { | ||||
|             let version_str_pre = version_vec_pre.join(", "); | ||||
|             let version_str_post = version_vec_post.join(", "); | ||||
|             (diffed_pre, diffed_post) = diff_versions(&version_str_pre, &version_str_post); | ||||
|         } | ||||
| 
 | ||||
|         // push a space to the diffed_pre, if it is non-empty, we do this here and not in the println
 | ||||
|         // in order to properly align the ±.
 | ||||
|         if !version_vec_pre.is_empty() { | ||||
|             let mut tmp = " ".to_string(); | ||||
|             tmp.push_str(&diffed_pre); | ||||
|             diffed_pre = tmp; | ||||
|         } | ||||
| 
 | ||||
|         println!( | ||||
|             "[{}] {:col_width$}{} \x1b[0m\u{00B1}\x1b[0m {}", | ||||
|             "C:".bold().bright_yellow(), | ||||
|             p, | ||||
|             diffed_pre, | ||||
|             diffed_post | ||||
|         ); | ||||
|     // push a space to the diffed_pre, if it is non-empty, we do this here and
 | ||||
|     // not in the println in order to properly align the ±.
 | ||||
|     if !version_vec_pre.is_empty() { | ||||
|       let mut tmp = " ".to_string(); | ||||
|       tmp.push_str(&diffed_pre); | ||||
|       diffed_pre = tmp; | ||||
|     } | ||||
| 
 | ||||
|     println!( | ||||
|       "[{}] {:col_width$}{} \x1b[0m\u{00B1}\x1b[0m {}", | ||||
|       "C:".bold().bright_yellow(), | ||||
|       p, | ||||
|       diffed_pre, | ||||
|       diffed_post | ||||
|     ); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| // Returns a reference to the compiled regex pattern.
 | ||||
| // The regex is compiled only once.
 | ||||
| fn name_regex() -> &'static Regex { | ||||
|     static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||
|     REGEX.get_or_init(|| { | ||||
|         Regex::new(r"(-man|-lib|-doc|-dev|-out|-terminfo)") | ||||
|             .expect("Failed to compile regex pattern for name") | ||||
|     }) | ||||
|   static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||
|   REGEX.get_or_init(|| { | ||||
|     Regex::new(r"(-man|-lib|-doc|-dev|-out|-terminfo)") | ||||
|       .expect("Failed to compile regex pattern for name") | ||||
|   }) | ||||
| } | ||||
|  |  | |||
							
								
								
									
										252
									
								
								src/store.rs
									
										
									
									
									
								
							
							
						
						
									
										252
									
								
								src/store.rs
									
										
									
									
									
								
							|  | @ -1,115 +1,167 @@ | |||
| use std::collections::HashMap; | ||||
| use std::{ | ||||
|   path::{ | ||||
|     Path, | ||||
|     PathBuf, | ||||
|   }, | ||||
|   result, | ||||
| }; | ||||
| 
 | ||||
| use crate::error::AppError; | ||||
| use anyhow::{ | ||||
|   Context as _, | ||||
|   Result, | ||||
| }; | ||||
| use derive_more::Deref; | ||||
| use ref_cast::RefCast; | ||||
| use rusqlite::Connection; | ||||
| use rustc_hash::{ | ||||
|   FxBuildHasher, | ||||
|   FxHashMap, | ||||
| }; | ||||
| 
 | ||||
| // Use type alias for Result with our custom error type
 | ||||
| type Result<T> = std::result::Result<T, AppError>; | ||||
| macro_rules! path_to_str { | ||||
|   ($path:ident) => { | ||||
|     let $path = $path.canonicalize().with_context(|| { | ||||
|       format!( | ||||
|         "failed to canonicalize path '{path}'", | ||||
|         path = $path.display(), | ||||
|       ) | ||||
|     })?; | ||||
| 
 | ||||
| const DATABASE_URL: &str = "/nix/var/nix/db/db.sqlite"; | ||||
| 
 | ||||
| const QUERY_PKGS: &str = " | ||||
| WITH RECURSIVE | ||||
| 	graph(p) AS ( | ||||
| 		SELECT id 
 | ||||
| 		FROM ValidPaths | ||||
| 		WHERE path = ? | ||||
| 	UNION | ||||
| 		SELECT reference FROM Refs | ||||
| 		JOIN graph ON referrer = p | ||||
| 	) | ||||
| SELECT id, path from graph | ||||
| JOIN ValidPaths ON id = p; | ||||
| ";
 | ||||
| 
 | ||||
| const QUERY_CLOSURE_SIZE: &str = " | ||||
| WITH RECURSIVE | ||||
| 	graph(p) AS ( | ||||
| 		SELECT id 
 | ||||
| 		FROM ValidPaths | ||||
| 		WHERE path = ? | ||||
| 	UNION | ||||
| 		SELECT reference FROM Refs | ||||
| 		JOIN graph ON referrer = p | ||||
| 	) | ||||
| SELECT SUM(narSize) as sum from graph | ||||
| JOIN ValidPaths ON p = id; | ||||
| ";
 | ||||
| 
 | ||||
| const QUERY_DEPENDENCY_GRAPH: &str = " | ||||
| WITH RECURSIVE | ||||
| 	graph(p, c) AS ( | ||||
| 		SELECT id as par, reference as chd 
 | ||||
| 		FROM ValidPaths | ||||
| 		JOIN Refs ON referrer = id | ||||
| 		WHERE path = ? | ||||
| 	UNION | ||||
| 		SELECT referrer as par, reference as chd FROM Refs | ||||
| 		JOIN graph ON referrer = c | ||||
| 	) | ||||
| SELECT p, c from graph; | ||||
| ";
 | ||||
| 
 | ||||
| /// executes a query on the nix db directly
 | ||||
| /// to gather all derivations that the derivation given by the path
 | ||||
| /// depends on
 | ||||
| ///
 | ||||
| /// The ids of the derivations in the database are returned as well, since these
 | ||||
| /// can be used to later convert nodes (represented by the the ids) of the
 | ||||
| /// dependency graph to actual paths
 | ||||
| ///
 | ||||
| /// in the future, we might wan't to switch to async
 | ||||
| pub fn get_packages(path: &std::path::Path) -> Result<Vec<(i64, String)>> { | ||||
|     // resolve symlinks and convert to a string
 | ||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); | ||||
|     let conn = Connection::open(DATABASE_URL)?; | ||||
| 
 | ||||
|     let mut stmt = conn.prepare_cached(QUERY_PKGS)?; | ||||
|     let queried_pkgs: std::result::Result<Vec<(i64, String)>, _> = stmt | ||||
|         .query_map([p], |row| Ok((row.get(0)?, row.get(1)?)))? | ||||
|         .collect(); | ||||
|     Ok(queried_pkgs?) | ||||
|     let $path = $path.to_str().with_context(|| { | ||||
|       format!( | ||||
|         "failed to convert path '{path}' to valid unicode", | ||||
|         path = $path.display(), | ||||
|       ) | ||||
|     })?; | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| /// executes a query on the nix db directly
 | ||||
| /// to get the total closure size of the derivation
 | ||||
| /// by summing up the nar size of all derivations
 | ||||
| /// depending on the derivation
 | ||||
| ///
 | ||||
| /// in the future, we might wan't to switch to async
 | ||||
| pub fn get_closure_size(path: &std::path::Path) -> Result<i64> { | ||||
|     // resolve symlinks and convert to a string
 | ||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); | ||||
|     let conn = Connection::open(DATABASE_URL)?; | ||||
| #[derive(Deref, Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||||
| pub struct DerivationId(i64); | ||||
| 
 | ||||
|     let mut stmt = conn.prepare_cached(QUERY_CLOSURE_SIZE)?; | ||||
|     let queried_sum = stmt.query_row([p], |row| row.get(0))?; | ||||
|     Ok(queried_sum) | ||||
| #[expect(clippy::module_name_repetitions)] | ||||
| #[derive(RefCast, Deref, Debug, PartialEq, Eq)] | ||||
| #[repr(transparent)] | ||||
| pub struct StorePath(Path); | ||||
| 
 | ||||
| #[expect(clippy::module_name_repetitions)] | ||||
| #[derive(Deref, Debug, Clone, PartialEq, Eq)] | ||||
| pub struct StorePathBuf(PathBuf); | ||||
| 
 | ||||
| /// Connects to the Nix database.
 | ||||
| pub fn connect() -> Result<Connection> { | ||||
|   const DATABASE_PATH: &str = "/nix/var/nix/db/db.sqlite"; | ||||
| 
 | ||||
|   Connection::open(DATABASE_PATH).with_context(|| { | ||||
|     format!("failed to connect to Nix database at {DATABASE_PATH}") | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| /// returns the complete dependency graph of
 | ||||
| /// of the derivation as an adjacency list. The nodes are
 | ||||
| /// represented by the DB ids
 | ||||
| /// Gathers all derivations that the given store path depends on.
 | ||||
| pub fn query_depdendents( | ||||
|   connection: &mut Connection, | ||||
|   path: &StorePath, | ||||
| ) -> Result<Vec<(DerivationId, StorePathBuf)>> { | ||||
|   const QUERY: &str = " | ||||
|     WITH RECURSIVE | ||||
|       graph(p) AS ( | ||||
|         SELECT id 
 | ||||
|         FROM ValidPaths | ||||
|         WHERE path = ? | ||||
|       UNION | ||||
|         SELECT reference FROM Refs | ||||
|         JOIN graph ON referrer = p | ||||
|       ) | ||||
|     SELECT id, path from graph | ||||
|     JOIN ValidPaths ON id = p; | ||||
|   ";
 | ||||
| 
 | ||||
|   path_to_str!(path); | ||||
| 
 | ||||
|   let packages: result::Result<Vec<(DerivationId, StorePathBuf)>, _> = | ||||
|     connection | ||||
|       .prepare_cached(QUERY)? | ||||
|       .query_map([path], |row| { | ||||
|         Ok(( | ||||
|           DerivationId(row.get(0)?), | ||||
|           StorePathBuf(row.get::<_, String>(1)?.into()), | ||||
|         )) | ||||
|       })? | ||||
|       .collect(); | ||||
| 
 | ||||
|   Ok(packages?) | ||||
| } | ||||
| 
 | ||||
| /// Gets the total closure size of the given store path by summing up the nar
 | ||||
| /// size of all depdendent derivations.
 | ||||
| pub fn query_closure_size( | ||||
|   connection: &mut Connection, | ||||
|   path: &StorePath, | ||||
| ) -> Result<usize> { | ||||
|   const QUERY: &str = " | ||||
|     WITH RECURSIVE | ||||
|       graph(p) AS ( | ||||
|         SELECT id 
 | ||||
|         FROM ValidPaths | ||||
|         WHERE path = ? | ||||
|       UNION | ||||
|         SELECT reference FROM Refs | ||||
|         JOIN graph ON referrer = p | ||||
|       ) | ||||
|     SELECT SUM(narSize) as sum from graph | ||||
|     JOIN ValidPaths ON p = id; | ||||
|   ";
 | ||||
| 
 | ||||
|   path_to_str!(path); | ||||
| 
 | ||||
|   let closure_size = connection | ||||
|     .prepare_cached(QUERY)? | ||||
|     .query_row([path], |row| row.get(0))?; | ||||
| 
 | ||||
|   Ok(closure_size) | ||||
| } | ||||
| 
 | ||||
| /// Gathers the complete dependency graph of of the store path as an adjacency
 | ||||
| /// list.
 | ||||
| ///
 | ||||
| /// We might want to collect the paths in the graph directly as
 | ||||
| /// well in the future, depending on how much we use them
 | ||||
| /// in the operations on the graph
 | ||||
| ///
 | ||||
| /// The mapping from id to graph can be obtained by using [``get_packages``]
 | ||||
| pub fn get_dependency_graph(path: &std::path::Path) -> Result<HashMap<i64, Vec<i64>>> { | ||||
|     // resolve symlinks and convert to a string
 | ||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); | ||||
|     let conn = Connection::open(DATABASE_URL)?; | ||||
| /// in the operations on the graph.
 | ||||
| pub fn query_dependency_graph( | ||||
|   connection: &mut Connection, | ||||
|   path: &StorePath, | ||||
| ) -> Result<FxHashMap<DerivationId, Vec<DerivationId>>> { | ||||
|   const QUERY: &str = " | ||||
|     WITH RECURSIVE | ||||
|       graph(p, c) AS ( | ||||
|         SELECT id as par, reference as chd 
 | ||||
|         FROM ValidPaths | ||||
|         JOIN Refs ON referrer = id | ||||
|         WHERE path = ? | ||||
|       UNION | ||||
|         SELECT referrer as par, reference as chd FROM Refs | ||||
|         JOIN graph ON referrer = c | ||||
|       ) | ||||
|     SELECT p, c from graph; | ||||
|   ";
 | ||||
| 
 | ||||
|     let mut stmt = conn.prepare_cached(QUERY_DEPENDENCY_GRAPH)?; | ||||
|     let mut adj = HashMap::<i64, Vec<i64>>::new(); | ||||
|     let queried_edges = | ||||
|         stmt.query_map([p], |row| Ok::<(i64, i64), _>((row.get(0)?, row.get(1)?)))?; | ||||
|     for row in queried_edges { | ||||
|         let (from, to) = row?; | ||||
|         adj.entry(from).or_default().push(to); | ||||
|         adj.entry(to).or_default(); | ||||
|     } | ||||
|   path_to_str!(path); | ||||
| 
 | ||||
|     Ok(adj) | ||||
|   let mut adj = | ||||
|     FxHashMap::<DerivationId, Vec<DerivationId>>::with_hasher(FxBuildHasher); | ||||
| 
 | ||||
|   let mut statement = connection.prepare_cached(QUERY)?; | ||||
| 
 | ||||
|   let edges = statement.query_map([path], |row| { | ||||
|     Ok((DerivationId(row.get(0)?), DerivationId(row.get(1)?))) | ||||
|   })?; | ||||
| 
 | ||||
|   for row in edges { | ||||
|     let (from, to) = row?; | ||||
| 
 | ||||
|     adj.entry(from).or_default().push(to); | ||||
|     adj.entry(to).or_default(); | ||||
|   } | ||||
| 
 | ||||
|   Ok(adj) | ||||
| } | ||||
|  |  | |||
							
								
								
									
										347
									
								
								src/util.rs
									
										
									
									
									
								
							
							
						
						
									
										347
									
								
								src/util.rs
									
										
									
									
									
								
							|  | @ -1,13 +1,17 @@ | |||
| use std::{ | ||||
|     cmp::Ordering, | ||||
|     collections::{HashMap, HashSet}, | ||||
|     sync::OnceLock, | ||||
|   cmp::Ordering, | ||||
|   collections::{ | ||||
|     HashMap, | ||||
|     HashSet, | ||||
|   }, | ||||
|   sync::OnceLock, | ||||
| }; | ||||
| 
 | ||||
| use crate::error::AppError; | ||||
| use log::debug; | ||||
| use regex::Regex; | ||||
| 
 | ||||
| use crate::error::AppError; | ||||
| 
 | ||||
| // Use type alias for Result with our custom error type
 | ||||
| type Result<T> = std::result::Result<T, AppError>; | ||||
| 
 | ||||
|  | @ -15,81 +19,87 @@ use std::string::ToString; | |||
| 
 | ||||
| #[derive(Eq, PartialEq, Debug)] | ||||
| enum VersionComponent { | ||||
|     Number(u64), | ||||
|     Text(String), | ||||
|   Number(u64), | ||||
|   Text(String), | ||||
| } | ||||
| 
 | ||||
| impl std::cmp::Ord for VersionComponent { | ||||
|     fn cmp(&self, other: &Self) -> Ordering { | ||||
|         use VersionComponent::{Number, Text}; | ||||
|         match (self, other) { | ||||
|             (Number(x), Number(y)) => x.cmp(y), | ||||
|             (Text(x), Text(y)) => match (x.as_str(), y.as_str()) { | ||||
|                 ("pre", _) => Ordering::Less, | ||||
|                 (_, "pre") => Ordering::Greater, | ||||
|                 _ => x.cmp(y), | ||||
|             }, | ||||
|             (Text(_), Number(_)) => Ordering::Less, | ||||
|             (Number(_), Text(_)) => Ordering::Greater, | ||||
|   fn cmp(&self, other: &Self) -> Ordering { | ||||
|     use VersionComponent::{ | ||||
|       Number, | ||||
|       Text, | ||||
|     }; | ||||
|     match (self, other) { | ||||
|       (Number(x), Number(y)) => x.cmp(y), | ||||
|       (Text(x), Text(y)) => { | ||||
|         match (x.as_str(), y.as_str()) { | ||||
|           ("pre", _) => Ordering::Less, | ||||
|           (_, "pre") => Ordering::Greater, | ||||
|           _ => x.cmp(y), | ||||
|         } | ||||
|       }, | ||||
|       (Text(_), Number(_)) => Ordering::Less, | ||||
|       (Number(_), Text(_)) => Ordering::Greater, | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl PartialOrd for VersionComponent { | ||||
|     fn partial_cmp(&self, other: &Self) -> Option<Ordering> { | ||||
|         Some(self.cmp(other)) | ||||
|     } | ||||
|   fn partial_cmp(&self, other: &Self) -> Option<Ordering> { | ||||
|     Some(self.cmp(other)) | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| // takes a version string and outputs the different components
 | ||||
| //
 | ||||
| // a component is delimited by '-' or '.' and consists of just digits or letters
 | ||||
| struct VersionComponentIterator<'a> { | ||||
|     v: &'a [u8], | ||||
|     pos: usize, | ||||
|   v:   &'a [u8], | ||||
|   pos: usize, | ||||
| } | ||||
| 
 | ||||
| impl<'a> VersionComponentIterator<'a> { | ||||
|     pub fn new<I: Into<&'a str>>(v: I) -> Self { | ||||
|         Self { | ||||
|             v: v.into().as_bytes(), | ||||
|             pos: 0, | ||||
|         } | ||||
|   pub fn new<I: Into<&'a str>>(v: I) -> Self { | ||||
|     Self { | ||||
|       v:   v.into().as_bytes(), | ||||
|       pos: 0, | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| impl Iterator for VersionComponentIterator<'_> { | ||||
|     type Item = VersionComponent; | ||||
|   type Item = VersionComponent; | ||||
| 
 | ||||
|     fn next(&mut self) -> Option<Self::Item> { | ||||
|         // skip all '-' and '.' in the beginning
 | ||||
|         while let Some(b'.' | b'-') = self.v.get(self.pos) { | ||||
|             self.pos += 1; | ||||
|         } | ||||
| 
 | ||||
|         // get the next character and decide if it is a digit or char
 | ||||
|         let c = self.v.get(self.pos)?; | ||||
|         let is_digit = c.is_ascii_digit(); | ||||
|         // based on this collect characters after this into the component
 | ||||
|         let component_len = self.v[self.pos..] | ||||
|             .iter() | ||||
|             .copied() | ||||
|             .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') | ||||
|             .count(); | ||||
|         let component = | ||||
|             String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]).into_owned(); | ||||
| 
 | ||||
|         // remember what chars we used
 | ||||
|         self.pos += component_len; | ||||
| 
 | ||||
|         if component.is_empty() { | ||||
|             None | ||||
|         } else if is_digit { | ||||
|             component.parse::<u64>().ok().map(VersionComponent::Number) | ||||
|         } else { | ||||
|             Some(VersionComponent::Text(component)) | ||||
|         } | ||||
|   fn next(&mut self) -> Option<Self::Item> { | ||||
|     // skip all '-' and '.' in the beginning
 | ||||
|     while let Some(b'.' | b'-') = self.v.get(self.pos) { | ||||
|       self.pos += 1; | ||||
|     } | ||||
| 
 | ||||
|     // get the next character and decide if it is a digit or char
 | ||||
|     let c = self.v.get(self.pos)?; | ||||
|     let is_digit = c.is_ascii_digit(); | ||||
|     // based on this collect characters after this into the component
 | ||||
|     let component_len = self.v[self.pos..] | ||||
|       .iter() | ||||
|       .copied() | ||||
|       .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') | ||||
|       .count(); | ||||
|     let component = | ||||
|       String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]) | ||||
|         .into_owned(); | ||||
| 
 | ||||
|     // remember what chars we used
 | ||||
|     self.pos += component_len; | ||||
| 
 | ||||
|     if component.is_empty() { | ||||
|       None | ||||
|     } else if is_digit { | ||||
|       component.parse::<u64>().ok().map(VersionComponent::Number) | ||||
|     } else { | ||||
|       Some(VersionComponent::Text(component)) | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /// Compares two strings of package versions, and figures out the greater one.
 | ||||
|  | @ -98,149 +108,154 @@ impl Iterator for VersionComponentIterator<'_> { | |||
| ///
 | ||||
| /// * Ordering
 | ||||
| pub fn compare_versions(a: &str, b: &str) -> Ordering { | ||||
|     let iter_a = VersionComponentIterator::new(a); | ||||
|     let iter_b = VersionComponentIterator::new(b); | ||||
|   let iter_a = VersionComponentIterator::new(a); | ||||
|   let iter_b = VersionComponentIterator::new(b); | ||||
| 
 | ||||
|     iter_a.cmp(iter_b) | ||||
|   iter_a.cmp(iter_b) | ||||
| } | ||||
| 
 | ||||
| /// Parses a nix store path to extract the packages name and version
 | ||||
| ///
 | ||||
| /// This function first drops the inputs first 44 chars, since that is exactly the length of the /nix/store/... prefix. Then it matches that against our store path regex.
 | ||||
| /// This function first drops the inputs first 44 chars, since that is exactly
 | ||||
| /// the length of the /nix/store/... prefix. Then it matches that against our
 | ||||
| /// store path regex.
 | ||||
| ///
 | ||||
| /// # Returns
 | ||||
| ///
 | ||||
| /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error if
 | ||||
| ///   one or both cannot be retrieved.
 | ||||
| /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error
 | ||||
| ///   if one or both cannot be retrieved.
 | ||||
| pub fn get_version<'a>(pack: impl Into<&'a str>) -> Result<(&'a str, &'a str)> { | ||||
|     let path = pack.into(); | ||||
|   let path = pack.into(); | ||||
| 
 | ||||
|     // We can strip the path since it _always_ follows the format
 | ||||
|     // /nix/store/<...>-<program_name>-......
 | ||||
|     // This part is exactly 44 chars long, so we just remove it.
 | ||||
|     let stripped_path = &path[44..]; | ||||
|     debug!("Stripped path: {stripped_path}"); | ||||
|   // We can strip the path since it _always_ follows the format
 | ||||
|   // /nix/store/<...>-<program_name>-......
 | ||||
|   // This part is exactly 44 chars long, so we just remove it.
 | ||||
|   let stripped_path = &path[44..]; | ||||
|   debug!("Stripped path: {stripped_path}"); | ||||
| 
 | ||||
|     // Match the regex against the input
 | ||||
|     if let Some(cap) = store_path_regex().captures(stripped_path) { | ||||
|         // Handle potential missing captures safely
 | ||||
|         let name = cap.get(1).map_or("", |m| m.as_str()); | ||||
|         let mut version = cap.get(2).map_or("<none>", |m| m.as_str()); | ||||
|   // Match the regex against the input
 | ||||
|   if let Some(cap) = store_path_regex().captures(stripped_path) { | ||||
|     // Handle potential missing captures safely
 | ||||
|     let name = cap.get(1).map_or("", |m| m.as_str()); | ||||
|     let mut version = cap.get(2).map_or("<none>", |m| m.as_str()); | ||||
| 
 | ||||
|         if version.starts_with('-') { | ||||
|             version = &version[1..]; | ||||
|         } | ||||
| 
 | ||||
|         if name.is_empty() { | ||||
|             return Err(AppError::ParseError { | ||||
|                 message: format!("Failed to extract name from path: {path}"), | ||||
|                 context: "get_version".to_string(), | ||||
|                 source: None, | ||||
|             }); | ||||
|         } | ||||
| 
 | ||||
|         return Ok((name, version)); | ||||
|     if version.starts_with('-') { | ||||
|       version = &version[1..]; | ||||
|     } | ||||
| 
 | ||||
|     Err(AppError::ParseError { | ||||
|         message: format!("Path does not match expected nix store format: {path}"), | ||||
|     if name.is_empty() { | ||||
|       return Err(AppError::ParseError { | ||||
|         message: format!("Failed to extract name from path: {path}"), | ||||
|         context: "get_version".to_string(), | ||||
|         source: None, | ||||
|     }) | ||||
|         source:  None, | ||||
|       }); | ||||
|     } | ||||
| 
 | ||||
|     return Ok((name, version)); | ||||
|   } | ||||
| 
 | ||||
|   Err(AppError::ParseError { | ||||
|     message: format!("Path does not match expected nix store format: {path}"), | ||||
|     context: "get_version".to_string(), | ||||
|     source:  None, | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| // Returns a reference to the compiled regex pattern.
 | ||||
| // The regex is compiled only once.
 | ||||
| pub fn store_path_regex() -> &'static Regex { | ||||
|     static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||
|     REGEX.get_or_init(|| { | ||||
|         Regex::new(r"(.+?)(-([0-9].*?))?$") | ||||
|             .expect("Failed to compile regex pattern for nix store paths") | ||||
|     }) | ||||
|   static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||
|   REGEX.get_or_init(|| { | ||||
|     Regex::new(r"(.+?)(-([0-9].*?))?$") | ||||
|       .expect("Failed to compile regex pattern for nix store paths") | ||||
|   }) | ||||
| } | ||||
| 
 | ||||
| // TODO: move this somewhere else, this does not really
 | ||||
| // belong into this file
 | ||||
| pub struct PackageDiff<'a> { | ||||
|     pub pkg_to_versions_pre: HashMap<&'a str, HashSet<&'a str>>, | ||||
|     pub pkg_to_versions_post: HashMap<&'a str, HashSet<&'a str>>, | ||||
|     pub pre_keys: HashSet<&'a str>, | ||||
|     pub post_keys: HashSet<&'a str>, | ||||
|     pub added: HashSet<&'a str>, | ||||
|     pub removed: HashSet<&'a str>, | ||||
|     pub changed: HashSet<&'a str>, | ||||
|   pub pkg_to_versions_pre:  HashMap<&'a str, HashSet<&'a str>>, | ||||
|   pub pkg_to_versions_post: HashMap<&'a str, HashSet<&'a str>>, | ||||
|   pub pre_keys:             HashSet<&'a str>, | ||||
|   pub post_keys:            HashSet<&'a str>, | ||||
|   pub added:                HashSet<&'a str>, | ||||
|   pub removed:              HashSet<&'a str>, | ||||
|   pub changed:              HashSet<&'a str>, | ||||
| } | ||||
| 
 | ||||
| impl<'a> PackageDiff<'a> { | ||||
|     pub fn new<S: AsRef<str> + 'a>(pkgs_pre: &'a [S], pkgs_post: &'a [S]) -> Self { | ||||
|         // Map from packages of the first closure to their version
 | ||||
|         let mut pre = HashMap::<&str, HashSet<&str>>::new(); | ||||
|         let mut post = HashMap::<&str, HashSet<&str>>::new(); | ||||
|   pub fn new<S: AsRef<str> + 'a>( | ||||
|     pkgs_pre: &'a [S], | ||||
|     pkgs_post: &'a [S], | ||||
|   ) -> Self { | ||||
|     // Map from packages of the first closure to their version
 | ||||
|     let mut pre = HashMap::<&str, HashSet<&str>>::new(); | ||||
|     let mut post = HashMap::<&str, HashSet<&str>>::new(); | ||||
| 
 | ||||
|         for p in pkgs_pre { | ||||
|             match get_version(p.as_ref()) { | ||||
|                 Ok((name, version)) => { | ||||
|                     pre.entry(name).or_default().insert(version); | ||||
|                 } | ||||
|                 Err(e) => { | ||||
|                     debug!("Error parsing package version: {e}"); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         for p in pkgs_post { | ||||
|             match get_version(p.as_ref()) { | ||||
|                 Ok((name, version)) => { | ||||
|                     post.entry(name).or_default().insert(version); | ||||
|                 } | ||||
|                 Err(e) => { | ||||
|                     debug!("Error parsing package version: {e}"); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         // Compare the package names of both versions
 | ||||
|         let pre_keys: HashSet<&str> = pre.keys().copied().collect(); | ||||
|         let post_keys: HashSet<&str> = post.keys().copied().collect(); | ||||
| 
 | ||||
|         // Difference gives us added and removed packages
 | ||||
|         let added: HashSet<&str> = &post_keys - &pre_keys; | ||||
| 
 | ||||
|         let removed: HashSet<&str> = &pre_keys - &post_keys; | ||||
|         // Get the intersection of the package names for version changes
 | ||||
|         let changed: HashSet<&str> = &pre_keys & &post_keys; | ||||
|         Self { | ||||
|             pkg_to_versions_pre: pre, | ||||
|             pkg_to_versions_post: post, | ||||
|             pre_keys, | ||||
|             post_keys, | ||||
|             added, | ||||
|             removed, | ||||
|             changed, | ||||
|         } | ||||
|     for p in pkgs_pre { | ||||
|       match get_version(p.as_ref()) { | ||||
|         Ok((name, version)) => { | ||||
|           pre.entry(name).or_default().insert(version); | ||||
|         }, | ||||
|         Err(e) => { | ||||
|           debug!("Error parsing package version: {e}"); | ||||
|         }, | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|     for p in pkgs_post { | ||||
|       match get_version(p.as_ref()) { | ||||
|         Ok((name, version)) => { | ||||
|           post.entry(name).or_default().insert(version); | ||||
|         }, | ||||
|         Err(e) => { | ||||
|           debug!("Error parsing package version: {e}"); | ||||
|         }, | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|     // Compare the package names of both versions
 | ||||
|     let pre_keys: HashSet<&str> = pre.keys().copied().collect(); | ||||
|     let post_keys: HashSet<&str> = post.keys().copied().collect(); | ||||
| 
 | ||||
|     // Difference gives us added and removed packages
 | ||||
|     let added: HashSet<&str> = &post_keys - &pre_keys; | ||||
| 
 | ||||
|     let removed: HashSet<&str> = &pre_keys - &post_keys; | ||||
|     // Get the intersection of the package names for version changes
 | ||||
|     let changed: HashSet<&str> = &pre_keys & &post_keys; | ||||
|     Self { | ||||
|       pkg_to_versions_pre: pre, | ||||
|       pkg_to_versions_post: post, | ||||
|       pre_keys, | ||||
|       post_keys, | ||||
|       added, | ||||
|       removed, | ||||
|       changed, | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| mod test { | ||||
| 
 | ||||
|     #[test] | ||||
|     fn test_version_component_iter() { | ||||
|         use super::VersionComponent::{Number, Text}; | ||||
|         use crate::util::VersionComponentIterator; | ||||
|         let v = "132.1.2test234-1-man----.--.......---------..---"; | ||||
|   #[test] | ||||
|   fn test_version_component_iter() { | ||||
|     use super::VersionComponent::{ | ||||
|       Number, | ||||
|       Text, | ||||
|     }; | ||||
|     use crate::util::VersionComponentIterator; | ||||
|     let v = "132.1.2test234-1-man----.--.......---------..---"; | ||||
| 
 | ||||
|         let comp: Vec<_> = VersionComponentIterator::new(v).collect(); | ||||
|         assert_eq!( | ||||
|             comp, | ||||
|             [ | ||||
|                 Number(132), | ||||
|                 Number(1), | ||||
|                 Number(2), | ||||
|                 Text("test".into()), | ||||
|                 Number(234), | ||||
|                 Number(1), | ||||
|                 Text("man".into()) | ||||
|             ] | ||||
|         ); | ||||
|     } | ||||
|     let comp: Vec<_> = VersionComponentIterator::new(v).collect(); | ||||
|     assert_eq!(comp, [ | ||||
|       Number(132), | ||||
|       Number(1), | ||||
|       Number(2), | ||||
|       Text("test".into()), | ||||
|       Number(234), | ||||
|       Number(1), | ||||
|       Text("man".into()) | ||||
|     ]); | ||||
|   } | ||||
| } | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 bloxx12
							bloxx12