mirror of
				https://github.com/RGBCube/dix
				synced 2025-10-31 00:42:44 +00:00 
			
		
		
		
	feat: refactor store.rs
This commit is contained in:
		
							parent
							
								
									531fa0278f
								
							
						
					
					
						commit
						db09147da6
					
				
					 14 changed files with 1168 additions and 839 deletions
				
			
		
							
								
								
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -1,9 +1,2 @@ | ||||||
| /.direnv | /.direnv | ||||||
| /target | /target | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| # Added by cargo |  | ||||||
| # |  | ||||||
| # already existing elements were commented out |  | ||||||
| 
 |  | ||||||
| #/target |  | ||||||
|  |  | ||||||
							
								
								
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								.rustfmt.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,30 @@ | ||||||
|  | # Taken from https://github.com/cull-os/carcass. | ||||||
|  | # Modified to have 2 space indents and 80 line width. | ||||||
|  | 
 | ||||||
|  | # float_literal_trailing_zero  = "Always" # TODO: Warning for some reason? | ||||||
|  | condense_wildcard_suffixes   = true | ||||||
|  | doc_comment_code_block_width = 80 | ||||||
|  | edition                      = "2024"             # Keep in sync with Cargo.toml. | ||||||
|  | enum_discrim_align_threshold = 60 | ||||||
|  | force_explicit_abi           = false | ||||||
|  | force_multiline_blocks       = true | ||||||
|  | format_code_in_doc_comments  = true | ||||||
|  | format_macro_matchers        = true | ||||||
|  | format_strings               = true | ||||||
|  | group_imports                = "StdExternalCrate" | ||||||
|  | hex_literal_case             = "Upper" | ||||||
|  | imports_granularity          = "Crate" | ||||||
|  | imports_layout               = "Vertical" | ||||||
|  | inline_attribute_width       = 60 | ||||||
|  | match_block_trailing_comma   = true | ||||||
|  | max_width                    = 80 | ||||||
|  | newline_style                = "Unix" | ||||||
|  | normalize_comments           = true | ||||||
|  | normalize_doc_attributes     = true | ||||||
|  | overflow_delimited_expr      = true | ||||||
|  | struct_field_align_threshold = 60 | ||||||
|  | tab_spaces                   = 2 | ||||||
|  | unstable_features            = true | ||||||
|  | use_field_init_shorthand     = true | ||||||
|  | use_try_shorthand            = true | ||||||
|  | wrap_comments                = true | ||||||
							
								
								
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								.taplo.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,15 @@ | ||||||
|  | # Taken from https://github.com/cull-os/carcass. | ||||||
|  | 
 | ||||||
|  | [formatting] | ||||||
|  | align_entries         = true | ||||||
|  | column_width          = 100 | ||||||
|  | compact_arrays        = false | ||||||
|  | reorder_inline_tables = true | ||||||
|  | reorder_keys          = true | ||||||
|  | 
 | ||||||
|  | [[rule]] | ||||||
|  | include = [ "**/Cargo.toml" ] | ||||||
|  | keys    = [ "package" ] | ||||||
|  | 
 | ||||||
|  | [rule.formatting] | ||||||
|  | reorder_keys = false | ||||||
							
								
								
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							
							
						
						
									
										79
									
								
								Cargo.lock
									
										
									
										generated
									
									
									
								
							|  | @ -61,6 +61,12 @@ dependencies = [ | ||||||
|  "windows-sys", |  "windows-sys", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "anyhow" | ||||||
|  | version = "1.0.98" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "atty" | name = "atty" | ||||||
| version = "0.2.14" | version = "0.2.14" | ||||||
|  | @ -174,6 +180,15 @@ version = "1.0.3" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" | checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "convert_case" | ||||||
|  | version = "0.7.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" | ||||||
|  | dependencies = [ | ||||||
|  |  "unicode-segmentation", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "criterion" | name = "criterion" | ||||||
| version = "0.3.6" | version = "0.3.6" | ||||||
|  | @ -256,6 +271,28 @@ dependencies = [ | ||||||
|  "memchr", |  "memchr", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "derive_more" | ||||||
|  | version = "2.0.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" | ||||||
|  | dependencies = [ | ||||||
|  |  "derive_more-impl", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "derive_more-impl" | ||||||
|  | version = "2.0.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" | ||||||
|  | dependencies = [ | ||||||
|  |  "convert_case", | ||||||
|  |  "proc-macro2", | ||||||
|  |  "quote", | ||||||
|  |  "syn", | ||||||
|  |  "unicode-xid", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "diff" | name = "diff" | ||||||
| version = "0.1.13" | version = "0.1.13" | ||||||
|  | @ -266,14 +303,18 @@ checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" | ||||||
| name = "dix" | name = "dix" | ||||||
| version = "0.1.0" | version = "0.1.0" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  |  "anyhow", | ||||||
|  "clap 4.5.37", |  "clap 4.5.37", | ||||||
|  "criterion", |  "criterion", | ||||||
|  |  "derive_more", | ||||||
|  "diff", |  "diff", | ||||||
|  "env_logger", |  "env_logger", | ||||||
|  "libc", |  "libc", | ||||||
|  "log", |  "log", | ||||||
|  |  "ref-cast", | ||||||
|  "regex", |  "regex", | ||||||
|  "rusqlite", |  "rusqlite", | ||||||
|  |  "rustc-hash", | ||||||
|  "thiserror", |  "thiserror", | ||||||
|  "yansi", |  "yansi", | ||||||
| ] | ] | ||||||
|  | @ -562,6 +603,26 @@ dependencies = [ | ||||||
|  "crossbeam-utils", |  "crossbeam-utils", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "ref-cast" | ||||||
|  | version = "1.0.24" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" | ||||||
|  | dependencies = [ | ||||||
|  |  "ref-cast-impl", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "ref-cast-impl" | ||||||
|  | version = "1.0.24" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" | ||||||
|  | dependencies = [ | ||||||
|  |  "proc-macro2", | ||||||
|  |  "quote", | ||||||
|  |  "syn", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "regex" | name = "regex" | ||||||
| version = "1.11.1" | version = "1.11.1" | ||||||
|  | @ -605,6 +666,12 @@ dependencies = [ | ||||||
|  "smallvec", |  "smallvec", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "rustc-hash" | ||||||
|  | version = "2.1.1" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "rustversion" | name = "rustversion" | ||||||
| version = "1.0.20" | version = "1.0.20" | ||||||
|  | @ -742,12 +809,24 @@ version = "1.0.18" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" | checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "unicode-segmentation" | ||||||
|  | version = "1.12.0" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "unicode-width" | name = "unicode-width" | ||||||
| version = "0.1.14" | version = "0.1.14" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" | checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" | ||||||
| 
 | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "unicode-xid" | ||||||
|  | version = "0.2.6" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "utf8parse" | name = "utf8parse" | ||||||
| version = "0.2.2" | version = "0.2.2" | ||||||
|  |  | ||||||
							
								
								
									
										128
									
								
								Cargo.toml
									
										
									
									
									
								
							
							
						
						
									
										128
									
								
								Cargo.toml
									
										
									
									
									
								
							|  | @ -1,39 +1,119 @@ | ||||||
| [package] | [package] | ||||||
| name = "dix" | name    = "dix" | ||||||
| version = "0.1.0" | version = "0.1.0" | ||||||
| edition = "2024" | edition = "2024" | ||||||
| 
 | 
 | ||||||
| [[bin]] |  | ||||||
| name = "dix" |  | ||||||
| path = "src/main.rs" |  | ||||||
| 
 |  | ||||||
| [lib] |  | ||||||
| name = "dixlib" |  | ||||||
| path = "src/lib.rs" |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| [dependencies] | [dependencies] | ||||||
| clap = { version = "4.5.37", features = ["derive"] } | anyhow     = "1.0.98" | ||||||
| regex = "1.11.1" | clap       = { version = "4.5.37", features = [ "derive" ] } | ||||||
| yansi = "1.0.1" | derive_more = { version = "2.0.1", features = ["full"] } | ||||||
| thiserror = "2.0.12" | diff       = "0.1.13" | ||||||
| log = "0.4.20" |  | ||||||
| env_logger = "0.11.3" | env_logger = "0.11.3" | ||||||
| rusqlite = { version = "0.35.0", features = ["bundled"] } | log        = "0.4.20" | ||||||
| diff = "0.1.13" | ref-cast = "1.0.24" | ||||||
|  | regex      = "1.11.1" | ||||||
|  | rusqlite   = { version = "0.35.0", features = [ "bundled" ] } | ||||||
|  | rustc-hash = "2.1.1" | ||||||
|  | thiserror  = "2.0.12" | ||||||
|  | yansi      = "1.0.1" | ||||||
| 
 | 
 | ||||||
| [dev-dependencies] | [dev-dependencies] | ||||||
| criterion = "0.3" | criterion = "0.3" | ||||||
| libc = "0.2" | libc      = "0.2" | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
| name = "store" | harness = false | ||||||
| harness=false | name    = "store" | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
| name = "print" | harness = false | ||||||
| harness=false | name    = "print" | ||||||
| 
 | 
 | ||||||
| [[bench]] | [[bench]] | ||||||
| name = "util" | harness = false | ||||||
| harness=false | name    = "util" | ||||||
|  | 
 | ||||||
|  | [lints.clippy] | ||||||
|  | pedantic = { level = "warn", priority = -1 } | ||||||
|  | 
 | ||||||
|  | blanket_clippy_restriction_lints = "allow" | ||||||
|  | restriction                      = { level = "warn", priority = -1 } | ||||||
|  | 
 | ||||||
|  | alloc_instead_of_core             = "allow" | ||||||
|  | allow_attributes_without_reason   = "allow" | ||||||
|  | arbitrary_source_item_ordering    = "allow" | ||||||
|  | arithmetic_side_effects           = "allow" | ||||||
|  | as_conversions                    = "allow" | ||||||
|  | as_pointer_underscore             = "allow" | ||||||
|  | as_underscore                     = "allow" | ||||||
|  | big_endian_bytes                  = "allow" | ||||||
|  | clone_on_ref_ptr                  = "allow" | ||||||
|  | dbg_macro                         = "allow" | ||||||
|  | disallowed_script_idents          = "allow" | ||||||
|  | else_if_without_else              = "allow" | ||||||
|  | error_impl_error                  = "allow" | ||||||
|  | exhaustive_enums                  = "allow" | ||||||
|  | exhaustive_structs                = "allow" | ||||||
|  | expect_used                       = "allow" | ||||||
|  | field_scoped_visibility_modifiers = "allow" | ||||||
|  | float_arithmetic                  = "allow" | ||||||
|  | host_endian_bytes                 = "allow" | ||||||
|  | impl_trait_in_params              = "allow" | ||||||
|  | implicit_return                   = "allow" | ||||||
|  | indexing_slicing                  = "allow" | ||||||
|  | inline_asm_x86_intel_syntax       = "allow" | ||||||
|  | integer_division                  = "allow" | ||||||
|  | integer_division_remainder_used   = "allow" | ||||||
|  | large_include_file                = "allow" | ||||||
|  | let_underscore_must_use           = "allow" | ||||||
|  | let_underscore_untyped            = "allow" | ||||||
|  | little_endian_bytes               = "allow" | ||||||
|  | map_err_ignore                    = "allow" | ||||||
|  | match_same_arms                   = "allow" | ||||||
|  | missing_assert_message            = "allow" | ||||||
|  | missing_docs_in_private_items     = "allow" | ||||||
|  | missing_errors_doc                = "allow" | ||||||
|  | missing_inline_in_public_items    = "allow" | ||||||
|  | missing_panics_doc                = "allow" | ||||||
|  | missing_trait_methods             = "allow" | ||||||
|  | mod_module_files                  = "allow" | ||||||
|  | multiple_inherent_impl            = "allow" | ||||||
|  | mutex_atomic                      = "allow" | ||||||
|  | mutex_integer                     = "allow" | ||||||
|  | new_without_default               = "allow" | ||||||
|  | non_ascii_literal                 = "allow" | ||||||
|  | panic                             = "allow" | ||||||
|  | panic_in_result_fn                = "allow" | ||||||
|  | partial_pub_fields                = "allow" | ||||||
|  | print_stderr                      = "allow" | ||||||
|  | print_stdout                      = "allow" | ||||||
|  | pub_use                           = "allow" | ||||||
|  | pub_with_shorthand                = "allow" | ||||||
|  | pub_without_shorthand             = "allow" | ||||||
|  | question_mark_used                = "allow" | ||||||
|  | ref_patterns                      = "allow" | ||||||
|  | renamed_function_params           = "allow" | ||||||
|  | same_name_method                  = "allow" | ||||||
|  | semicolon_outside_block           = "allow" | ||||||
|  | separated_literal_suffix          = "allow" | ||||||
|  | shadow_reuse                      = "allow" | ||||||
|  | shadow_same                       = "allow" | ||||||
|  | shadow_unrelated                  = "allow" | ||||||
|  | single_call_fn                    = "allow" | ||||||
|  | single_char_lifetime_names        = "allow" | ||||||
|  | single_match_else                 = "allow" | ||||||
|  | std_instead_of_alloc              = "allow" | ||||||
|  | std_instead_of_core               = "allow" | ||||||
|  | string_add                        = "allow" | ||||||
|  | string_slice                      = "allow" | ||||||
|  | todo                              = "allow" | ||||||
|  | too_many_lines                    = "allow" | ||||||
|  | try_err                           = "allow" | ||||||
|  | unimplemented                     = "allow" | ||||||
|  | unnecessary_safety_comment        = "allow" | ||||||
|  | unnecessary_safety_doc            = "allow" | ||||||
|  | unreachable                       = "allow" | ||||||
|  | unwrap_in_result                  = "allow" | ||||||
|  | unwrap_used                       = "allow" | ||||||
|  | use_debug                         = "allow" | ||||||
|  | wildcard_enum_match_arm           = "allow" | ||||||
|  |  | ||||||
|  | @ -1,89 +1,94 @@ | ||||||
| use std::{ | use std::{ | ||||||
|     env, |   env, | ||||||
|     fs::{self, DirEntry}, |   fs, | ||||||
|     path::PathBuf, |   path::PathBuf, | ||||||
|     sync::OnceLock, |   sync::OnceLock, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| use dixlib::{store, util::PackageDiff}; | use dix::{ | ||||||
|  |   store, | ||||||
|  |   util::PackageDiff, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| /// tries to get the path of the oldest nixos system derivation
 | /// tries to get the path of the oldest nixos system derivation
 | ||||||
| /// this function is pretty hacky and only used so that
 | /// this function is pretty hacky and only used so that
 | ||||||
| /// you don't have to specify a specific derivation to
 | /// you don't have to specify a specific derivation to
 | ||||||
| /// run the benchmarks
 | /// run the benchmarks
 | ||||||
| fn get_oldest_nixos_system() -> Option<PathBuf> { | fn get_oldest_nixos_system() -> Option<PathBuf> { | ||||||
|     let profile_dir = fs::read_dir("/nix/var/nix/profiles").ok()?; |   let profile_dir = fs::read_dir("/nix/var/nix/profiles").ok()?; | ||||||
| 
 | 
 | ||||||
|     let files = profile_dir.filter_map(Result::ok).filter_map(|entry| { |   let files = profile_dir.filter_map(Result::ok).filter_map(|entry| { | ||||||
|         entry |     entry | ||||||
|             .file_type() |       .file_type() | ||||||
|             .ok() |       .ok() | ||||||
|             .and_then(|f| f.is_symlink().then_some(entry.path())) |       .and_then(|f| f.is_symlink().then_some(entry.path())) | ||||||
|     }); |   }); | ||||||
| 
 | 
 | ||||||
|     files.min_by_key(|path| { |   files.min_by_key(|path| { | ||||||
|         // extract all digits from the file name and use that as key
 |     // extract all digits from the file name and use that as key
 | ||||||
|         let p = path.as_os_str().to_str().unwrap_or_default(); |     let p = path.as_os_str().to_str().unwrap_or_default(); | ||||||
|         let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); |     let digits: String = p.chars().filter(|c| c.is_ascii_digit()).collect(); | ||||||
|         // if we are not able to produce a key (e.g. because the path does not contain digits)
 |     // if we are not able to produce a key (e.g. because the path does not
 | ||||||
|         // we put it last
 |     // contain digits) we put it last
 | ||||||
|         digits.parse::<u32>().unwrap_or(u32::MAX) |     digits.parse::<u32>().unwrap_or(u32::MAX) | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pub fn get_deriv_query() -> &'static PathBuf { | pub fn get_deriv_query() -> &'static PathBuf { | ||||||
|     static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); |   static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||||
|     _QUERY_DERIV.get_or_init(|| { |   _QUERY_DERIV.get_or_init(|| { | ||||||
|         let path = PathBuf::from( |     let path = PathBuf::from( | ||||||
|             env::var("DIX_BENCH_NEW_SYSTEM") |       env::var("DIX_BENCH_NEW_SYSTEM") | ||||||
|                 .unwrap_or_else(|_| "/run/current-system/system".into()), |         .unwrap_or_else(|_| "/run/current-system/system".into()), | ||||||
|         ); |     ); | ||||||
|         path |     path | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
| pub fn get_deriv_query_old() -> &'static PathBuf { | pub fn get_deriv_query_old() -> &'static PathBuf { | ||||||
|     static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); |   static _QUERY_DERIV: OnceLock<PathBuf> = OnceLock::new(); | ||||||
|     _QUERY_DERIV.get_or_init(|| { |   _QUERY_DERIV.get_or_init(|| { | ||||||
|         let path = env::var("DIX_BENCH_OLD_SYSTEM") |     let path = env::var("DIX_BENCH_OLD_SYSTEM") | ||||||
|             .ok() |       .ok() | ||||||
|             .map(PathBuf::from) |       .map(PathBuf::from) | ||||||
|             .or(get_oldest_nixos_system()) |       .or(get_oldest_nixos_system()) | ||||||
|             .unwrap_or_else(|| PathBuf::from("/run/current-system/system")); |       .unwrap_or_else(|| PathBuf::from("/run/current-system/system")); | ||||||
|         path |     path | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pub fn get_packages() -> &'static (Vec<String>, Vec<String>) { | pub fn get_packages() -> &'static (Vec<String>, Vec<String>) { | ||||||
|     static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); |   static _PKGS: OnceLock<(Vec<String>, Vec<String>)> = OnceLock::new(); | ||||||
|     _PKGS.get_or_init(|| { |   _PKGS.get_or_init(|| {query_depdendents | ||||||
|         let pkgs_before = store::get_packages(std::path::Path::new(get_deriv_query_old())) |     let pkgs_before = | ||||||
|             .unwrap() |       store::query_packages(std::path::Path::new(get_deriv_query_old())) | ||||||
|             .into_iter() |         .unwrap() | ||||||
|             .map(|(_, name)| name) |         .into_iter() | ||||||
|             .collect::<Vec<String>>(); |         .map(|(_, name)| name)query_depdendents | ||||||
|         let pkgs_after = store::get_packages(std::path::Path::new(get_deriv_query())) |         .collect::<Vec<String>>(); | ||||||
|             .unwrap() |     let pkgs_after = | ||||||
|             .into_iter() |       store::query_packages(std::path::Path::new(get_deriv_query())) | ||||||
|             .map(|(_, name)| name) |         .unwrap() | ||||||
|             .collect::<Vec<String>>(); |         .into_iter() | ||||||
|         (pkgs_before, pkgs_after) |         .map(|(_, name)| name) | ||||||
|     }) |         .collect::<Vec<String>>(); | ||||||
|  |     (pkgs_before, pkgs_after) | ||||||
|  |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pub fn get_pkg_diff() -> &'static PackageDiff<'static> { | pub fn get_pkg_diff() -> &'static PackageDiff<'static> { | ||||||
|     static _PKG_DIFF: OnceLock<PackageDiff> = OnceLock::new(); |   static _PKG_DIFF: OnceLock<PackageDiff> = OnceLock::new(); | ||||||
|     _PKG_DIFF.get_or_init(|| { |   _PKG_DIFF.get_or_init(|| { | ||||||
|         let (pkgs_before, pkgs_after) = get_packages(); |     let (pkgs_before, pkgs_after) = get_packages(); | ||||||
|         PackageDiff::new(pkgs_before, pkgs_after) |     PackageDiff::new(pkgs_before, pkgs_after) | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// prints the old and new NixOs system used for benchmarking
 | /// prints the old and new NixOs system used for benchmarking
 | ||||||
| ///
 | ///
 | ||||||
| /// is used to give information about the old and new system
 | /// is used to give information about the old and new system
 | ||||||
| pub fn print_used_nixos_systems() { | pub fn print_used_nixos_systems() { | ||||||
|     let old = get_deriv_query_old(); |   let old = get_deriv_query_old(); | ||||||
|     let new = get_deriv_query(); |   let new = get_deriv_query(); | ||||||
|     println!("old system used {:?}", old); |   println!("old system used {:?}", old); | ||||||
|     println!("new system used {:?}", new); |   println!("new system used {:?}", new); | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										129
									
								
								benches/print.rs
									
										
									
									
									
								
							
							
						
						
									
										129
									
								
								benches/print.rs
									
										
									
									
									
								
							|  | @ -1,86 +1,97 @@ | ||||||
| mod common; | mod common; | ||||||
| 
 | 
 | ||||||
| use std::{fs::File, os::fd::AsRawFd}; | use std::{ | ||||||
|  |   fs::File, | ||||||
|  |   os::fd::AsRawFd, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| use common::{get_pkg_diff, print_used_nixos_systems}; | use common::{ | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; |   get_pkg_diff, | ||||||
| use dixlib::print; |   print_used_nixos_systems, | ||||||
|  | }; | ||||||
|  | use criterion::{ | ||||||
|  |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::print; | ||||||
| 
 | 
 | ||||||
| /// reroutes stdout and stderr to the null device before
 | /// reroutes stdout and stderr to the null device before
 | ||||||
| /// executing `f`
 | /// executing `f`
 | ||||||
| fn suppress_output<F: FnOnce()>(f: F) { | fn suppress_output<F: FnOnce()>(f: F) { | ||||||
|     let stdout = std::io::stdout(); |   let stdout = std::io::stdout(); | ||||||
|     let stderr = std::io::stderr(); |   let stderr = std::io::stderr(); | ||||||
| 
 | 
 | ||||||
|     // Save original FDs
 |   // Save original FDs
 | ||||||
|     let orig_stdout_fd = stdout.as_raw_fd(); |   let orig_stdout_fd = stdout.as_raw_fd(); | ||||||
|     let orig_stderr_fd = stderr.as_raw_fd(); |   let orig_stderr_fd = stderr.as_raw_fd(); | ||||||
| 
 | 
 | ||||||
|     // Open /dev/null and get its FD
 |   // Open /dev/null and get its FD
 | ||||||
|     let devnull = File::create("/dev/null").unwrap(); |   let devnull = File::create("/dev/null").unwrap(); | ||||||
|     let null_fd = devnull.as_raw_fd(); |   let null_fd = devnull.as_raw_fd(); | ||||||
| 
 | 
 | ||||||
|     // Redirect stdout and stderr to /dev/null
 |   // Redirect stdout and stderr to /dev/null
 | ||||||
|     let _ = unsafe { libc::dup2(null_fd, orig_stdout_fd) }; |   let _ = unsafe { libc::dup2(null_fd, orig_stdout_fd) }; | ||||||
|     let _ = unsafe { libc::dup2(null_fd, orig_stderr_fd) }; |   let _ = unsafe { libc::dup2(null_fd, orig_stderr_fd) }; | ||||||
| 
 | 
 | ||||||
|     f(); |   f(); | ||||||
| 
 | 
 | ||||||
|     let _ = unsafe { libc::dup2(orig_stdout_fd, 1) }; |   let _ = unsafe { libc::dup2(orig_stdout_fd, 1) }; | ||||||
|     let _ = unsafe { libc::dup2(orig_stderr_fd, 2) }; |   let _ = unsafe { libc::dup2(orig_stderr_fd, 2) }; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pub fn bench_print_added(c: &mut Criterion) { | pub fn bench_print_added(c: &mut Criterion) { | ||||||
|     print_used_nixos_systems(); |   print_used_nixos_systems(); | ||||||
|     let diff = get_pkg_diff(); |   let diff = get_pkg_diff(); | ||||||
|     c.bench_function("print_added", |b| { |   c.bench_function("print_added", |b| { | ||||||
|         b.iter(|| { |     b.iter(|| { | ||||||
|             suppress_output(|| { |       suppress_output(|| { | ||||||
|                 print::print_added( |         print::print_added( | ||||||
|                     black_box(&diff.added), |           black_box(&diff.added), | ||||||
|                     black_box(&diff.pkg_to_versions_post), |           black_box(&diff.pkg_to_versions_post), | ||||||
|                     30, |           30, | ||||||
|                 ); |         ); | ||||||
|             }); |       }); | ||||||
|         }); |  | ||||||
|     }); |     }); | ||||||
|  |   }); | ||||||
| } | } | ||||||
| pub fn bench_print_removed(c: &mut Criterion) { | pub fn bench_print_removed(c: &mut Criterion) { | ||||||
|     print_used_nixos_systems(); |   print_used_nixos_systems(); | ||||||
|     let diff = get_pkg_diff(); |   let diff = get_pkg_diff(); | ||||||
|     c.bench_function("print_removed", |b| { |   c.bench_function("print_removed", |b| { | ||||||
|         b.iter(|| { |     b.iter(|| { | ||||||
|             suppress_output(|| { |       suppress_output(|| { | ||||||
|                 print::print_removed( |         print::print_removed( | ||||||
|                     black_box(&diff.removed), |           black_box(&diff.removed), | ||||||
|                     black_box(&diff.pkg_to_versions_pre), |           black_box(&diff.pkg_to_versions_pre), | ||||||
|                     30, |           30, | ||||||
|                 ); |         ); | ||||||
|             }); |       }); | ||||||
|         }); |  | ||||||
|     }); |     }); | ||||||
|  |   }); | ||||||
| } | } | ||||||
| pub fn bench_print_changed(c: &mut Criterion) { | pub fn bench_print_changed(c: &mut Criterion) { | ||||||
|     print_used_nixos_systems(); |   print_used_nixos_systems(); | ||||||
|     let diff = get_pkg_diff(); |   let diff = get_pkg_diff(); | ||||||
|     c.bench_function("print_changed", |b| { |   c.bench_function("print_changed", |b| { | ||||||
|         b.iter(|| { |     b.iter(|| { | ||||||
|             suppress_output(|| { |       suppress_output(|| { | ||||||
|                 print::print_changes( |         print::print_changes( | ||||||
|                     black_box(&diff.changed), |           black_box(&diff.changed), | ||||||
|                     black_box(&diff.pkg_to_versions_pre), |           black_box(&diff.pkg_to_versions_pre), | ||||||
|                     black_box(&diff.pkg_to_versions_post), |           black_box(&diff.pkg_to_versions_post), | ||||||
|                     30, |           30, | ||||||
|                 ); |         ); | ||||||
|             }); |       }); | ||||||
|         }); |  | ||||||
|     }); |     }); | ||||||
|  |   }); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| criterion_group!( | criterion_group!( | ||||||
|     benches, |   benches, | ||||||
|     bench_print_added, |   bench_print_added, | ||||||
|     bench_print_removed, |   bench_print_removed, | ||||||
|     bench_print_changed |   bench_print_changed | ||||||
| ); | ); | ||||||
| criterion_main!(benches); | criterion_main!(benches); | ||||||
|  |  | ||||||
|  | @ -1,6 +1,11 @@ | ||||||
| mod common; | mod common; | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | use criterion::{ | ||||||
| use dixlib::store; |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::store; | ||||||
| 
 | 
 | ||||||
| // basic benchmarks using the current system
 | // basic benchmarks using the current system
 | ||||||
| //
 | //
 | ||||||
|  | @ -12,25 +17,27 @@ use dixlib::store; | ||||||
| // db to benchmark instead to make the results comparable
 | // db to benchmark instead to make the results comparable
 | ||||||
| 
 | 
 | ||||||
| pub fn bench_get_packages(c: &mut Criterion) { | pub fn bench_get_packages(c: &mut Criterion) { | ||||||
|     c.bench_function("get_packages", |b| { |   c.bench_function("get_packages", |b| { | ||||||
|         b.iter(|| store::get_packages(black_box(common::get_deriv_query()))); |     b.iter(|| store::query_depdendents(black_box(common::get_deriv_query()))); | ||||||
|     }); |   }); | ||||||
| } | } | ||||||
| pub fn bench_get_closure_size(c: &mut Criterion) { | pub fn bench_get_closure_size(c: &mut Criterion) { | ||||||
|     c.bench_function("get_closure_size", |b| { |   c.bench_function("get_closure_size", |b| { | ||||||
|         b.iter(|| store::get_closure_size(black_box(common::get_deriv_query()))); |     b.iter(|| store::gequery_closure_sizelack_box(common::get_deriv_query()))); | ||||||
|     }); |   }); | ||||||
| } | } | ||||||
| pub fn bench_get_dependency_graph(c: &mut Criterion) { | pub fn bench_get_dependency_graph(c: &mut Criterion) { | ||||||
|     c.bench_function("get_dependency_graph", |b| { |   c.bench_function("get_dependency_graph", |b| { | ||||||
|         b.iter(|| store::get_dependency_graph(black_box(common::get_deriv_query()))); |     b.iter(|| { | ||||||
|  |       store::query_dependency_graph(black_box(common::get_deriv_query())) | ||||||
|     }); |     }); | ||||||
|  |   }); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| criterion_group!( | criterion_group!( | ||||||
|     benches, |   benches, | ||||||
|     bench_get_packages, |   bench_get_packages, | ||||||
|     bench_get_closure_size, |   bench_get_closure_size, | ||||||
|     bench_get_dependency_graph |   bench_get_dependency_graph | ||||||
| ); | ); | ||||||
| criterion_main!(benches); | criterion_main!(benches); | ||||||
|  |  | ||||||
|  | @ -1,14 +1,19 @@ | ||||||
| mod common; | mod common; | ||||||
| 
 | 
 | ||||||
| use common::get_packages; | use common::get_packages; | ||||||
| use criterion::{Criterion, black_box, criterion_group, criterion_main}; | use criterion::{ | ||||||
| use dixlib::util::PackageDiff; |   Criterion, | ||||||
|  |   black_box, | ||||||
|  |   criterion_group, | ||||||
|  |   criterion_main, | ||||||
|  | }; | ||||||
|  | use dix::util::PackageDiff; | ||||||
| 
 | 
 | ||||||
| pub fn bench_package_diff(c: &mut Criterion) { | pub fn bench_package_diff(c: &mut Criterion) { | ||||||
|     let (pkgs_before, pkgs_after) = get_packages(); |   let (pkgs_before, pkgs_after) = get_packages(); | ||||||
|     c.bench_function("PackageDiff::new", |b| { |   c.bench_function("PackageDiff::new", |b| { | ||||||
|         b.iter(|| PackageDiff::new(black_box(pkgs_before), black_box(pkgs_after))); |     b.iter(|| PackageDiff::new(black_box(pkgs_before), black_box(pkgs_after))); | ||||||
|     }); |   }); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| criterion_group!(benches, bench_package_diff); | criterion_group!(benches, bench_package_diff); | ||||||
|  |  | ||||||
							
								
								
									
										178
									
								
								src/error.rs
									
										
									
									
									
								
							
							
						
						
									
										178
									
								
								src/error.rs
									
										
									
									
									
								
							|  | @ -3,121 +3,131 @@ use thiserror::Error; | ||||||
| /// Application errors with thiserror
 | /// Application errors with thiserror
 | ||||||
| #[derive(Debug, Error)] | #[derive(Debug, Error)] | ||||||
| pub enum AppError { | pub enum AppError { | ||||||
|     #[error("Command failed: {command} {args:?} - {message}")] |   #[error("Command failed: {command} {args:?} - {message}")] | ||||||
|     CommandFailed { |   CommandFailed { | ||||||
|         command: String, |     command: String, | ||||||
|         args: Vec<String>, |     args:    Vec<String>, | ||||||
|         message: String, |     message: String, | ||||||
|     }, |   }, | ||||||
| 
 | 
 | ||||||
|     #[error("Failed to decode command output from {context}: {source}")] |   #[error("Failed to decode command output from {context}: {source}")] | ||||||
|     CommandOutputError { |   CommandOutputError { | ||||||
|         source: std::str::Utf8Error, |     source:  std::str::Utf8Error, | ||||||
|         context: String, |     context: String, | ||||||
|     }, |   }, | ||||||
| 
 | 
 | ||||||
|     #[error("Failed to parse data in {context}: {message}")] |   #[error("Failed to parse data in {context}: {message}")] | ||||||
|     ParseError { |   ParseError { | ||||||
|         message: String, |     message: String, | ||||||
|         context: String, |     context: String, | ||||||
|         #[source] |     #[source] | ||||||
|         source: Option<Box<dyn std::error::Error + Send + Sync>>, |     source:  Option<Box<dyn std::error::Error + Send + Sync>>, | ||||||
|     }, |   }, | ||||||
| 
 | 
 | ||||||
|     #[error("Regex error in {context}: {source}")] |   #[error("Regex error in {context}: {source}")] | ||||||
|     RegexError { |   RegexError { | ||||||
|         source: regex::Error, |     source:  regex::Error, | ||||||
|         context: String, |     context: String, | ||||||
|     }, |   }, | ||||||
| 
 | 
 | ||||||
|     #[error("IO error in {context}: {source}")] |   #[error("IO error in {context}: {source}")] | ||||||
|     IoError { |   IoError { | ||||||
|         source: std::io::Error, |     source:  std::io::Error, | ||||||
|         context: String, |     context: String, | ||||||
|     }, |   }, | ||||||
| 
 | 
 | ||||||
|     #[error("Database error: {source}")] |   #[error("Database error: {source}")] | ||||||
|     DatabaseError { source: rusqlite::Error }, |   DatabaseError { source: rusqlite::Error }, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // Implement From traits to support the ? operator
 | // Implement From traits to support the ? operator
 | ||||||
| impl From<std::io::Error> for AppError { | impl From<std::io::Error> for AppError { | ||||||
|     fn from(source: std::io::Error) -> Self { |   fn from(source: std::io::Error) -> Self { | ||||||
|         Self::IoError { |     Self::IoError { | ||||||
|             source, |       source, | ||||||
|             context: "unknown context".into(), |       context: "unknown context".into(), | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl From<std::str::Utf8Error> for AppError { | impl From<std::str::Utf8Error> for AppError { | ||||||
|     fn from(source: std::str::Utf8Error) -> Self { |   fn from(source: std::str::Utf8Error) -> Self { | ||||||
|         Self::CommandOutputError { |     Self::CommandOutputError { | ||||||
|             source, |       source, | ||||||
|             context: "command output".into(), |       context: "command output".into(), | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl From<rusqlite::Error> for AppError { | impl From<rusqlite::Error> for AppError { | ||||||
|     fn from(source: rusqlite::Error) -> Self { |   fn from(source: rusqlite::Error) -> Self { | ||||||
|         Self::DatabaseError { source } |     Self::DatabaseError { source } | ||||||
|     } |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl From<regex::Error> for AppError { | impl From<regex::Error> for AppError { | ||||||
|     fn from(source: regex::Error) -> Self { |   fn from(source: regex::Error) -> Self { | ||||||
|         Self::RegexError { |     Self::RegexError { | ||||||
|             source, |       source, | ||||||
|             context: "regex operation".into(), |       context: "regex operation".into(), | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl AppError { | impl AppError { | ||||||
|     /// Create a command failure error with context
 |   /// Create a command failure error with context
 | ||||||
|     pub fn command_failed<S: Into<String>>(command: S, args: &[&str], message: S) -> Self { |   pub fn command_failed<S: Into<String>>( | ||||||
|         Self::CommandFailed { |     command: S, | ||||||
|             command: command.into(), |     args: &[&str], | ||||||
|             args: args.iter().map(|&s| s.to_string()).collect(), |     message: S, | ||||||
|             message: message.into(), |   ) -> Self { | ||||||
|         } |     Self::CommandFailed { | ||||||
|  |       command: command.into(), | ||||||
|  |       args:    args.iter().map(|&s| s.to_string()).collect(), | ||||||
|  |       message: message.into(), | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| 
 | 
 | ||||||
|     /// Create a parse error with context
 |   /// Create a parse error with context
 | ||||||
|     pub fn parse_error<S: Into<String>, C: Into<String>>( |   pub fn parse_error<S: Into<String>, C: Into<String>>( | ||||||
|         message: S, |     message: S, | ||||||
|         context: C, |     context: C, | ||||||
|         source: Option<Box<dyn std::error::Error + Send + Sync>>, |     source: Option<Box<dyn std::error::Error + Send + Sync>>, | ||||||
|     ) -> Self { |   ) -> Self { | ||||||
|         Self::ParseError { |     Self::ParseError { | ||||||
|             message: message.into(), |       message: message.into(), | ||||||
|             context: context.into(), |       context: context.into(), | ||||||
|             source, |       source, | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| 
 | 
 | ||||||
|     /// Create an IO error with context
 |   /// Create an IO error with context
 | ||||||
|     pub fn io_error<C: Into<String>>(source: std::io::Error, context: C) -> Self { |   pub fn io_error<C: Into<String>>(source: std::io::Error, context: C) -> Self { | ||||||
|         Self::IoError { |     Self::IoError { | ||||||
|             source, |       source, | ||||||
|             context: context.into(), |       context: context.into(), | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| 
 | 
 | ||||||
|     /// Create a regex error with context
 |   /// Create a regex error with context
 | ||||||
|     pub fn regex_error<C: Into<String>>(source: regex::Error, context: C) -> Self { |   pub fn regex_error<C: Into<String>>( | ||||||
|         Self::RegexError { |     source: regex::Error, | ||||||
|             source, |     context: C, | ||||||
|             context: context.into(), |   ) -> Self { | ||||||
|         } |     Self::RegexError { | ||||||
|  |       source, | ||||||
|  |       context: context.into(), | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| 
 | 
 | ||||||
|     /// Create a command output error with context
 |   /// Create a command output error with context
 | ||||||
|     pub fn command_output_error<C: Into<String>>(source: std::str::Utf8Error, context: C) -> Self { |   pub fn command_output_error<C: Into<String>>( | ||||||
|         Self::CommandOutputError { |     source: std::str::Utf8Error, | ||||||
|             source, |     context: C, | ||||||
|             context: context.into(), |   ) -> Self { | ||||||
|         } |     Self::CommandOutputError { | ||||||
|  |       source, | ||||||
|  |       context: context.into(), | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										357
									
								
								src/main.rs
									
										
									
									
									
								
							
							
						
						
									
										357
									
								
								src/main.rs
									
										
									
									
									
								
							|  | @ -1,12 +1,18 @@ | ||||||
| use clap::Parser; |  | ||||||
| use core::str; | use core::str; | ||||||
| use dixlib::print; |  | ||||||
| use dixlib::store; |  | ||||||
| use dixlib::util::PackageDiff; |  | ||||||
| use log::{debug, error}; |  | ||||||
| use std::{ | use std::{ | ||||||
|     collections::{HashMap, HashSet}, |   collections::HashSet, | ||||||
|     thread, |   thread, | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | use clap::Parser; | ||||||
|  | use dixlib::{ | ||||||
|  |   print, | ||||||
|  |   store, | ||||||
|  |   util::PackageDiff, | ||||||
|  | }; | ||||||
|  | use log::{ | ||||||
|  |   debug, | ||||||
|  |   error, | ||||||
| }; | }; | ||||||
| use yansi::Paint; | use yansi::Paint; | ||||||
| 
 | 
 | ||||||
|  | @ -16,199 +22,204 @@ use yansi::Paint; | ||||||
| #[command(about = "Diff Nix stuff", long_about = None)] | #[command(about = "Diff Nix stuff", long_about = None)] | ||||||
| #[command(version, about, long_about = None)] | #[command(version, about, long_about = None)] | ||||||
| struct Args { | struct Args { | ||||||
|     path: std::path::PathBuf, |   path:  std::path::PathBuf, | ||||||
|     path2: std::path::PathBuf, |   path2: std::path::PathBuf, | ||||||
| 
 | 
 | ||||||
|     /// Print the whole store paths
 |   /// Print the whole store paths
 | ||||||
|     #[arg(short, long)] |   #[arg(short, long)] | ||||||
|     paths: bool, |   paths: bool, | ||||||
| 
 | 
 | ||||||
|     /// Print the closure size
 |   /// Print the closure size
 | ||||||
|     #[arg(long, short)] |   #[arg(long, short)] | ||||||
|     closure_size: bool, |   closure_size: bool, | ||||||
| 
 | 
 | ||||||
|     /// Verbosity level: -v for debug, -vv for trace
 |   /// Verbosity level: -v for debug, -vv for trace
 | ||||||
|     #[arg(short, long, action = clap::ArgAction::Count)] |   #[arg(short, long, action = clap::ArgAction::Count)] | ||||||
|     verbose: u8, |   verbose: u8, | ||||||
| 
 | 
 | ||||||
|     /// Silence all output except errors
 |   /// Silence all output except errors
 | ||||||
|     #[arg(short, long)] |   #[arg(short, long)] | ||||||
|     quiet: bool, |   quiet: bool, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #[derive(Debug, Clone)] | #[derive(Debug, Clone)] | ||||||
| struct Package<'a> { | struct Package<'a> { | ||||||
|     name: &'a str, |   name:     &'a str, | ||||||
|     versions: HashSet<&'a str>, |   versions: HashSet<&'a str>, | ||||||
|     /// Save if a package is a dependency of another package
 |   /// Save if a package is a dependency of another package
 | ||||||
|     is_dep: bool, |   is_dep:   bool, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl<'a> Package<'a> { | impl<'a> Package<'a> { | ||||||
|     fn new(name: &'a str, version: &'a str, is_dep: bool) -> Self { |   fn new(name: &'a str, version: &'a str, is_dep: bool) -> Self { | ||||||
|         let mut versions = HashSet::new(); |     let mut versions = HashSet::new(); | ||||||
|         versions.insert(version); |     versions.insert(version); | ||||||
|         Self { |     Self { | ||||||
|             name, |       name, | ||||||
|             versions, |       versions, | ||||||
|             is_dep, |       is_dep, | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| 
 | 
 | ||||||
|     fn add_version(&mut self, version: &'a str) { |   fn add_version(&mut self, version: &'a str) { | ||||||
|         self.versions.insert(version); |     self.versions.insert(version); | ||||||
|     } |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #[allow(clippy::cognitive_complexity, clippy::too_many_lines)] | #[allow(clippy::cognitive_complexity, clippy::too_many_lines)] | ||||||
| fn main() { | fn main() { | ||||||
|     let args = Args::parse(); |   let args = Args::parse(); | ||||||
| 
 | 
 | ||||||
|     // Configure logger based on verbosity flags and environment variables
 |   // Configure logger based on verbosity flags and environment variables
 | ||||||
|     // Respects RUST_LOG environment variable if present.
 |   // Respects RUST_LOG environment variable if present.
 | ||||||
|     // XXX:We can also dedicate a specific env variable for this tool, if we want to.
 |   // XXX:We can also dedicate a specific env variable for this tool, if we want
 | ||||||
|     let env = env_logger::Env::default().filter_or( |   // to.
 | ||||||
|         "RUST_LOG", |   let env = env_logger::Env::default().filter_or( | ||||||
|         if args.quiet { |     "RUST_LOG", | ||||||
|             "error" |     if args.quiet { | ||||||
|         } else { |       "error" | ||||||
|             match args.verbose { |  | ||||||
|                 0 => "info", |  | ||||||
|                 1 => "debug", |  | ||||||
|                 _ => "trace", |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|     ); |  | ||||||
| 
 |  | ||||||
|     // Build and initialize the logger
 |  | ||||||
|     env_logger::Builder::from_env(env) |  | ||||||
|         .format_timestamp(Some(env_logger::fmt::TimestampPrecision::Seconds)) |  | ||||||
|         .init(); |  | ||||||
| 
 |  | ||||||
|     // handles to the threads collecting closure size information
 |  | ||||||
|     // We do this as early as possible because nix is slow.
 |  | ||||||
|     let closure_size_handles = if args.closure_size { |  | ||||||
|         debug!("Calculating closure sizes in background"); |  | ||||||
|         let path = args.path.clone(); |  | ||||||
|         let path2 = args.path2.clone(); |  | ||||||
|         Some(( |  | ||||||
|             thread::spawn(move || store::get_closure_size(&path)), |  | ||||||
|             thread::spawn(move || store::get_closure_size(&path2)), |  | ||||||
|         )) |  | ||||||
|     } else { |     } else { | ||||||
|         None |       match args.verbose { | ||||||
|     }; |         0 => "info", | ||||||
|  |         1 => "debug", | ||||||
|  |         _ => "trace", | ||||||
|  |       } | ||||||
|  |     }, | ||||||
|  |   ); | ||||||
| 
 | 
 | ||||||
|     // Get package lists and handle potential errors
 |   // Build and initialize the logger
 | ||||||
|     let package_list_pre = match store::get_packages(&args.path) { |   env_logger::Builder::from_env(env) | ||||||
|         Ok(packages) => { |     .format_timestamp(Some(env_logger::fmt::TimestampPrecision::Seconds)) | ||||||
|             debug!("Found {} packages in first closure", packages.len()); |     .init(); | ||||||
|             packages.into_iter().map(|(_, path)| path).collect() |  | ||||||
|         } |  | ||||||
|         Err(e) => { |  | ||||||
|             error!( |  | ||||||
|                 "Error getting packages from path {}: {}", |  | ||||||
|                 args.path.display(), |  | ||||||
|                 e |  | ||||||
|             ); |  | ||||||
|             eprintln!( |  | ||||||
|                 "Error getting packages from path {}: {}", |  | ||||||
|                 args.path.display(), |  | ||||||
|                 e |  | ||||||
|             ); |  | ||||||
|             Vec::new() |  | ||||||
|         } |  | ||||||
|     }; |  | ||||||
| 
 | 
 | ||||||
|     let package_list_post = match store::get_packages(&args.path2) { |   // handles to the threads collecting closure size information
 | ||||||
|         Ok(packages) => { |   // We do this as early as possible because nix is slow.
 | ||||||
|             debug!("Found {} packages in second closure", packages.len()); |   let closure_size_handles = if args.closure_size { | ||||||
|             packages.into_iter().map(|(_, path)| path).collect() |     debug!("Calculating closure sizes in background"); | ||||||
|         } |     let path = args.path.clone(); | ||||||
|         Err(e) => { |     let path2 = args.path2.clone(); | ||||||
|             error!( |     Some(( | ||||||
|                 "Error getting packages from path {}: {}", |       thread::spawn(move || store::get_closure_size(&path)), | ||||||
|                 args.path2.display(), |       thread::spawn(move || store::get_closure_size(&path2)), | ||||||
|                 e |     )) | ||||||
|             ); |   } else { | ||||||
|             eprintln!( |     None | ||||||
|                 "Error getting packages from path {}: {}", |   }; | ||||||
|                 args.path2.display(), |  | ||||||
|                 e |  | ||||||
|             ); |  | ||||||
|             Vec::new() |  | ||||||
|         } |  | ||||||
|     }; |  | ||||||
| 
 | 
 | ||||||
|     let PackageDiff { |   // Get package lists and handle potential errors
 | ||||||
|         pkg_to_versions_pre: pre, |   let package_list_pre = match store::query_packages(&args.path) { | ||||||
|         pkg_to_versions_post: post, |     Ok(packages) => { | ||||||
|         pre_keys: _, |       debug!("Found {} packages in first closure", packages.len()); | ||||||
|         post_keys: _, |       packages.into_iter().map(|(_, path)| path).collect() | ||||||
|         added, |     }, | ||||||
|         removed, |     Err(e) => { | ||||||
|         changed, |       error!( | ||||||
|     } = PackageDiff::new(&package_list_pre, &package_list_post); |         "Error getting packages from path {}: {}", | ||||||
|  |         args.path.display(), | ||||||
|  |         e | ||||||
|  |       ); | ||||||
|  |       eprintln!( | ||||||
|  |         "Error getting packages from path {}: {}", | ||||||
|  |         args.path.display(), | ||||||
|  |         e | ||||||
|  |       ); | ||||||
|  |       Vec::new() | ||||||
|  |     }, | ||||||
|  |   }; | ||||||
| 
 | 
 | ||||||
|     debug!("Added packages: {}", added.len()); |   let package_list_post = match store::query_packages(&args.path2) { | ||||||
|     debug!("Removed packages: {}", removed.len()); |     Ok(packages) => { | ||||||
|     debug!( |       debug!("Found {} packages in second closure", packages.len()); | ||||||
|         "Changed packages: {}", |       packages.into_iter().map(|(_, path)| path).collect() | ||||||
|         changed |     }, | ||||||
|             .iter() |     Err(e) => { | ||||||
|             .filter(|p| !p.is_empty() |       error!( | ||||||
|                 && match (pre.get(*p), post.get(*p)) { |         "Error getting packages from path {}: {}", | ||||||
|                     (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, |         args.path2.display(), | ||||||
|                     _ => false, |         e | ||||||
|                 }) |       ); | ||||||
|             .count() |       eprintln!( | ||||||
|     ); |         "Error getting packages from path {}: {}", | ||||||
|  |         args.path2.display(), | ||||||
|  |         e | ||||||
|  |       ); | ||||||
|  |       Vec::new() | ||||||
|  |     }, | ||||||
|  |   }; | ||||||
| 
 | 
 | ||||||
|     println!("Difference between the two generations:"); |   let PackageDiff { | ||||||
|     println!(); |     pkg_to_versions_pre: pre, | ||||||
|  |     pkg_to_versions_post: post, | ||||||
|  |     pre_keys: _, | ||||||
|  |     post_keys: _, | ||||||
|  |     added, | ||||||
|  |     removed, | ||||||
|  |     changed, | ||||||
|  |   } = PackageDiff::new(&package_list_pre, &package_list_post); | ||||||
| 
 | 
 | ||||||
|     let width_changes = changed |   debug!("Added packages: {}", added.len()); | ||||||
|         .iter() |   debug!("Removed packages: {}", removed.len()); | ||||||
|         .filter(|&&p| match (pre.get(p), post.get(p)) { |   debug!( | ||||||
|             (Some(version_pre), Some(version_post)) => version_pre != version_post, |     "Changed packages: {}", | ||||||
|  |     changed | ||||||
|  |       .iter() | ||||||
|  |       .filter(|p| { | ||||||
|  |         !p.is_empty() | ||||||
|  |           && match (pre.get(*p), post.get(*p)) { | ||||||
|  |             (Some(ver_pre), Some(ver_post)) => ver_pre != ver_post, | ||||||
|             _ => false, |             _ => false, | ||||||
|         }); |           } | ||||||
|  |       }) | ||||||
|  |       .count() | ||||||
|  |   ); | ||||||
| 
 | 
 | ||||||
|     let col_width = added |   println!("Difference between the two generations:"); | ||||||
|         .iter() |   println!(); | ||||||
|         .chain(removed.iter()) |  | ||||||
|         .chain(width_changes) |  | ||||||
|         .map(|p| p.len()) |  | ||||||
|         .max() |  | ||||||
|         .unwrap_or_default(); |  | ||||||
| 
 | 
 | ||||||
|     println!("<<< {}", args.path.to_string_lossy()); |   let width_changes = changed.iter().filter(|&&p| { | ||||||
|     println!(">>> {}", args.path2.to_string_lossy()); |     match (pre.get(p), post.get(p)) { | ||||||
|     print::print_added(&added, &post, col_width); |       (Some(version_pre), Some(version_post)) => version_pre != version_post, | ||||||
|     print::print_removed(&removed, &pre, col_width); |       _ => false, | ||||||
|     print::print_changes(&changed, &pre, &post, col_width); |  | ||||||
| 
 |  | ||||||
|     if let Some((pre_handle, post_handle)) = closure_size_handles { |  | ||||||
|         match (pre_handle.join(), post_handle.join()) { |  | ||||||
|             (Ok(Ok(pre_size)), Ok(Ok(post_size))) => { |  | ||||||
|                 let pre_size = pre_size / 1024 / 1024; |  | ||||||
|                 let post_size = post_size / 1024 / 1024; |  | ||||||
|                 debug!("Pre closure size: {pre_size} MiB"); |  | ||||||
|                 debug!("Post closure size: {post_size} MiB"); |  | ||||||
| 
 |  | ||||||
|                 println!("{}", "Closure Size:".underline().bold()); |  | ||||||
|                 println!("Before: {pre_size} MiB"); |  | ||||||
|                 println!("After: {post_size} MiB"); |  | ||||||
|                 println!("Difference: {} MiB", post_size - pre_size); |  | ||||||
|             } |  | ||||||
|             (Ok(Err(e)), _) | (_, Ok(Err(e))) => { |  | ||||||
|                 error!("Error getting closure size: {e}"); |  | ||||||
|                 eprintln!("Error getting closure size: {e}"); |  | ||||||
|             } |  | ||||||
|             _ => { |  | ||||||
|                 error!("Failed to get closure size information due to a thread error"); |  | ||||||
|                 eprintln!("Error: Failed to get closure size information due to a thread error"); |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   }); | ||||||
|  | 
 | ||||||
|  |   let col_width = added | ||||||
|  |     .iter() | ||||||
|  |     .chain(removed.iter()) | ||||||
|  |     .chain(width_changes) | ||||||
|  |     .map(|p| p.len()) | ||||||
|  |     .max() | ||||||
|  |     .unwrap_or_default(); | ||||||
|  | 
 | ||||||
|  |   println!("<<< {}", args.path.to_string_lossy()); | ||||||
|  |   println!(">>> {}", args.path2.to_string_lossy()); | ||||||
|  |   print::print_added(&added, &post, col_width); | ||||||
|  |   print::print_removed(&removed, &pre, col_width); | ||||||
|  |   print::print_changes(&changed, &pre, &post, col_width); | ||||||
|  | 
 | ||||||
|  |   if let Some((pre_handle, post_handle)) = closure_size_handles { | ||||||
|  |     match (pre_handle.join(), post_handle.join()) { | ||||||
|  |       (Ok(Ok(pre_size)), Ok(Ok(post_size))) => { | ||||||
|  |         let pre_size = pre_size / 1024 / 1024; | ||||||
|  |         let post_size = post_size / 1024 / 1024; | ||||||
|  |         debug!("Pre closure size: {pre_size} MiB"); | ||||||
|  |         debug!("Post closure size: {post_size} MiB"); | ||||||
|  | 
 | ||||||
|  |         println!("{}", "Closure Size:".underline().bold()); | ||||||
|  |         println!("Before: {pre_size} MiB"); | ||||||
|  |         println!("After: {post_size} MiB"); | ||||||
|  |         println!("Difference: {} MiB", post_size - pre_size); | ||||||
|  |       }, | ||||||
|  |       (Ok(Err(e)), _) | (_, Ok(Err(e))) => { | ||||||
|  |         error!("Error getting closure size: {e}"); | ||||||
|  |         eprintln!("Error getting closure size: {e}"); | ||||||
|  |       }, | ||||||
|  |       _ => { | ||||||
|  |         error!("Failed to get closure size information due to a thread error"); | ||||||
|  |         eprintln!( | ||||||
|  |           "Error: Failed to get closure size information due to a thread error" | ||||||
|  |         ); | ||||||
|  |       }, | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										308
									
								
								src/print.rs
									
										
									
									
									
								
							
							
						
						
									
										308
									
								
								src/print.rs
									
										
									
									
									
								
							|  | @ -1,10 +1,14 @@ | ||||||
| use core::str; | use core::str; | ||||||
| use regex::Regex; |  | ||||||
| use std::{ | use std::{ | ||||||
|     collections::{HashMap, HashSet}, |   collections::{ | ||||||
|     string::ToString, |     HashMap, | ||||||
|     sync::OnceLock, |     HashSet, | ||||||
|  |   }, | ||||||
|  |   string::ToString, | ||||||
|  |   sync::OnceLock, | ||||||
| }; | }; | ||||||
|  | 
 | ||||||
|  | use regex::Regex; | ||||||
| use yansi::Paint; | use yansi::Paint; | ||||||
| 
 | 
 | ||||||
| /// diffs two strings character by character, and returns a tuple of strings
 | /// diffs two strings character by character, and returns a tuple of strings
 | ||||||
|  | @ -12,179 +16,191 @@ use yansi::Paint; | ||||||
| ///
 | ///
 | ||||||
| /// # Returns:
 | /// # Returns:
 | ||||||
| ///
 | ///
 | ||||||
| /// * (String, String) - The differing chars being red in the left, and green in the right one.
 | /// * (String, String) - The differing chars being red in the left, and green in
 | ||||||
|  | ///   the right one.
 | ||||||
| fn diff_versions(left: &str, right: &str) -> (String, String) { | fn diff_versions(left: &str, right: &str) -> (String, String) { | ||||||
|     let mut prev = "\x1b[33m".to_string(); |   let mut prev = "\x1b[33m".to_string(); | ||||||
|     let mut post = "\x1b[33m".to_string(); |   let mut post = "\x1b[33m".to_string(); | ||||||
| 
 | 
 | ||||||
|     // We only have to filter the left once, since we stop if the left one is empty.
 |   // We only have to filter the left once, since we stop if the left one is
 | ||||||
|     // We do this to display things like -man, -dev properly.
 |   // empty. We do this to display things like -man, -dev properly.
 | ||||||
|     let matches = name_regex().captures(left); |   let matches = name_regex().captures(left); | ||||||
|     let mut suffix = String::new(); |   let mut suffix = String::new(); | ||||||
| 
 | 
 | ||||||
|     if let Some(m) = matches { |   if let Some(m) = matches { | ||||||
|         let tmp = m.get(0).map_or("", |m| m.as_str()); |     let tmp = m.get(0).map_or("", |m| m.as_str()); | ||||||
|         suffix.push_str(tmp); |     suffix.push_str(tmp); | ||||||
|  |   } | ||||||
|  |   // string without the suffix
 | ||||||
|  |   let filtered_left = &left[..left.len() - suffix.len()]; | ||||||
|  |   let filtered_right = &right[..right.len() - suffix.len()]; | ||||||
|  | 
 | ||||||
|  |   for diff in diff::chars(filtered_left, filtered_right) { | ||||||
|  |     match diff { | ||||||
|  |       diff::Result::Both(l, _) => { | ||||||
|  |         let string_to_push = format!("{l}"); | ||||||
|  |         prev.push_str(&string_to_push); | ||||||
|  |         post.push_str(&string_to_push); | ||||||
|  |       }, | ||||||
|  |       diff::Result::Left(l) => { | ||||||
|  |         let string_to_push = format!("\x1b[1;91m{l}"); | ||||||
|  |         prev.push_str(&string_to_push); | ||||||
|  |       }, | ||||||
|  | 
 | ||||||
|  |       diff::Result::Right(r) => { | ||||||
|  |         let string_to_push = format!("\x1b[1;92m{r}"); | ||||||
|  |         post.push_str(&string_to_push); | ||||||
|  |       }, | ||||||
|     } |     } | ||||||
|     // string without the suffix
 |   } | ||||||
|     let filtered_left = &left[..left.len() - suffix.len()]; |  | ||||||
|     let filtered_right = &right[..right.len() - suffix.len()]; |  | ||||||
| 
 | 
 | ||||||
|     for diff in diff::chars(filtered_left, filtered_right) { |   // push removed suffix
 | ||||||
|         match diff { |   prev.push_str(&format!("\x1b[33m{}", &suffix)); | ||||||
|             diff::Result::Both(l, _) => { |   post.push_str(&format!("\x1b[33m{}", &suffix)); | ||||||
|                 let string_to_push = format!("{l}"); |  | ||||||
|                 prev.push_str(&string_to_push); |  | ||||||
|                 post.push_str(&string_to_push); |  | ||||||
|             } |  | ||||||
|             diff::Result::Left(l) => { |  | ||||||
|                 let string_to_push = format!("\x1b[1;91m{l}"); |  | ||||||
|                 prev.push_str(&string_to_push); |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|             diff::Result::Right(r) => { |   // reset
 | ||||||
|                 let string_to_push = format!("\x1b[1;92m{r}"); |   prev.push_str("\x1b[0m"); | ||||||
|                 post.push_str(&string_to_push); |   post.push_str("\x1b[0m"); | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|     // push removed suffix
 |   (prev, post) | ||||||
|     prev.push_str(&format!("\x1b[33m{}", &suffix)); |  | ||||||
|     post.push_str(&format!("\x1b[33m{}", &suffix)); |  | ||||||
| 
 |  | ||||||
|     //reset
 |  | ||||||
|     prev.push_str("\x1b[0m"); |  | ||||||
|     post.push_str("\x1b[0m"); |  | ||||||
| 
 |  | ||||||
|     (prev, post) |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// print the packages added between two closures.
 | /// print the packages added between two closures.
 | ||||||
| pub fn print_added(set: &HashSet<&str>, post: &HashMap<&str, HashSet<&str>>, col_width: usize) { | pub fn print_added( | ||||||
|     println!("{}", "Packages added:".underline().bold()); |   set: &HashSet<&str>, | ||||||
|  |   post: &HashMap<&str, HashSet<&str>>, | ||||||
|  |   col_width: usize, | ||||||
|  | ) { | ||||||
|  |   println!("{}", "Packages added:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|     // Use sorted outpu
 |   // Use sorted outpu
 | ||||||
|     let mut sorted: Vec<_> = set |   let mut sorted: Vec<_> = set | ||||||
|         .iter() |     .iter() | ||||||
|         .filter_map(|p| post.get(p).map(|ver| (*p, ver))) |     .filter_map(|p| post.get(p).map(|ver| (*p, ver))) | ||||||
|         .collect(); |     .collect(); | ||||||
| 
 | 
 | ||||||
|     // Sort by package name for consistent output
 |   // Sort by package name for consistent output
 | ||||||
|     sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); |   sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||||
| 
 | 
 | ||||||
|     for (p, ver) in sorted { |   for (p, ver) in sorted { | ||||||
|         let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); |     let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||||
|         version_vec.sort_unstable(); |     version_vec.sort_unstable(); | ||||||
|         let version_str = version_vec.join(", "); |     let version_str = version_vec.join(", "); | ||||||
|         println!( |     println!( | ||||||
|             "[{}] {:col_width$} \x1b[33m{}\x1b[0m", |       "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||||
|             "A:".green().bold(), |       "A:".green().bold(), | ||||||
|             p, |       p, | ||||||
|             version_str |       version_str | ||||||
|         ); |     ); | ||||||
|     } |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// print the packages removed between two closures.
 | /// print the packages removed between two closures.
 | ||||||
| pub fn print_removed(set: &HashSet<&str>, pre: &HashMap<&str, HashSet<&str>>, col_width: usize) { | pub fn print_removed( | ||||||
|     println!("{}", "Packages removed:".underline().bold()); |   set: &HashSet<&str>, | ||||||
|  |   pre: &HashMap<&str, HashSet<&str>>, | ||||||
|  |   col_width: usize, | ||||||
|  | ) { | ||||||
|  |   println!("{}", "Packages removed:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|     // Use sorted output for more predictable and readable results
 |   // Use sorted output for more predictable and readable results
 | ||||||
|     let mut sorted: Vec<_> = set |   let mut sorted: Vec<_> = set | ||||||
|         .iter() |     .iter() | ||||||
|         .filter_map(|p| pre.get(p).map(|ver| (*p, ver))) |     .filter_map(|p| pre.get(p).map(|ver| (*p, ver))) | ||||||
|         .collect(); |     .collect(); | ||||||
| 
 | 
 | ||||||
|     // Sort by package name for consistent output
 |   // Sort by package name for consistent output
 | ||||||
|     sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); |   sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); | ||||||
| 
 | 
 | ||||||
|     for (p, ver) in sorted { |   for (p, ver) in sorted { | ||||||
|         let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); |     let mut version_vec = ver.iter().copied().collect::<Vec<_>>(); | ||||||
|         version_vec.sort_unstable(); |     version_vec.sort_unstable(); | ||||||
|         let version_str = version_vec.join(", "); |     let version_str = version_vec.join(", "); | ||||||
|         println!( |     println!( | ||||||
|             "[{}] {:col_width$} \x1b[33m{}\x1b[0m", |       "[{}] {:col_width$} \x1b[33m{}\x1b[0m", | ||||||
|             "R:".red().bold(), |       "R:".red().bold(), | ||||||
|             p, |       p, | ||||||
|             version_str |       version_str | ||||||
|         ); |     ); | ||||||
|     } |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pub fn print_changes( | pub fn print_changes( | ||||||
|     set: &HashSet<&str>, |   set: &HashSet<&str>, | ||||||
|     pre: &HashMap<&str, HashSet<&str>>, |   pre: &HashMap<&str, HashSet<&str>>, | ||||||
|     post: &HashMap<&str, HashSet<&str>>, |   post: &HashMap<&str, HashSet<&str>>, | ||||||
|     col_width: usize, |   col_width: usize, | ||||||
| ) { | ) { | ||||||
|     println!("{}", "Version changes:".underline().bold()); |   println!("{}", "Versions changed:".underline().bold()); | ||||||
| 
 | 
 | ||||||
|     // Use sorted output for more predictable and readable results
 |   // Use sorted output for more predictable and readable results
 | ||||||
|     let mut changes = Vec::new(); |   let mut changes = Vec::new(); | ||||||
| 
 | 
 | ||||||
|     for p in set.iter().filter(|p| !p.is_empty()) { |   for p in set.iter().filter(|p| !p.is_empty()) { | ||||||
|         if let (Some(ver_pre), Some(ver_post)) = (pre.get(p), post.get(p)) { |     if let (Some(ver_pre), Some(ver_post)) = (pre.get(p), post.get(p)) { | ||||||
|             if ver_pre != ver_post { |       if ver_pre != ver_post { | ||||||
|                 changes.push((*p, ver_pre, ver_post)); |         changes.push((*p, ver_pre, ver_post)); | ||||||
|             } |       } | ||||||
|         } |     } | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   // Sort by package name for consistent output
 | ||||||
|  |   changes.sort_by(|(a, ..), (b, ..)| a.cmp(b)); | ||||||
|  | 
 | ||||||
|  |   for (p, ver_pre, ver_post) in changes { | ||||||
|  |     let mut version_vec_pre = | ||||||
|  |       ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); | ||||||
|  |     let mut version_vec_post = | ||||||
|  |       ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); | ||||||
|  | 
 | ||||||
|  |     version_vec_pre.sort_unstable(); | ||||||
|  |     version_vec_post.sort_unstable(); | ||||||
|  | 
 | ||||||
|  |     let mut diffed_pre: String; | ||||||
|  |     let diffed_post: String; | ||||||
|  | 
 | ||||||
|  |     if version_vec_pre.len() == version_vec_post.len() { | ||||||
|  |       let mut diff_pre: Vec<String> = vec![]; | ||||||
|  |       let mut diff_post: Vec<String> = vec![]; | ||||||
|  | 
 | ||||||
|  |       for (pre, post) in version_vec_pre.iter().zip(version_vec_post.iter()) { | ||||||
|  |         let (a, b) = diff_versions(pre, post); | ||||||
|  |         diff_pre.push(a); | ||||||
|  |         diff_post.push(b); | ||||||
|  |       } | ||||||
|  |       diffed_pre = diff_pre.join(", "); | ||||||
|  |       diffed_post = diff_post.join(", "); | ||||||
|  |     } else { | ||||||
|  |       let version_str_pre = version_vec_pre.join(", "); | ||||||
|  |       let version_str_post = version_vec_post.join(", "); | ||||||
|  |       (diffed_pre, diffed_post) = | ||||||
|  |         diff_versions(&version_str_pre, &version_str_post); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     // Sort by package name for consistent output
 |     // push a space to the diffed_pre, if it is non-empty, we do this here and
 | ||||||
|     changes.sort_by(|(a, _, _), (b, _, _)| a.cmp(b)); |     // not in the println in order to properly align the ±.
 | ||||||
| 
 |     if !version_vec_pre.is_empty() { | ||||||
|     for (p, ver_pre, ver_post) in changes { |       let mut tmp = " ".to_string(); | ||||||
|         let mut version_vec_pre = ver_pre.difference(ver_post).copied().collect::<Vec<_>>(); |       tmp.push_str(&diffed_pre); | ||||||
|         let mut version_vec_post = ver_post.difference(ver_pre).copied().collect::<Vec<_>>(); |       diffed_pre = tmp; | ||||||
| 
 |  | ||||||
|         version_vec_pre.sort_unstable(); |  | ||||||
|         version_vec_post.sort_unstable(); |  | ||||||
| 
 |  | ||||||
|         let mut diffed_pre: String; |  | ||||||
|         let diffed_post: String; |  | ||||||
| 
 |  | ||||||
|         if version_vec_pre.len() == version_vec_post.len() { |  | ||||||
|             let mut diff_pre: Vec<String> = vec![]; |  | ||||||
|             let mut diff_post: Vec<String> = vec![]; |  | ||||||
| 
 |  | ||||||
|             for (pre, post) in version_vec_pre.iter().zip(version_vec_post.iter()) { |  | ||||||
|                 let (a, b) = diff_versions(pre, post); |  | ||||||
|                 diff_pre.push(a); |  | ||||||
|                 diff_post.push(b); |  | ||||||
|             } |  | ||||||
|             diffed_pre = diff_pre.join(", "); |  | ||||||
|             diffed_post = diff_post.join(", "); |  | ||||||
|         } else { |  | ||||||
|             let version_str_pre = version_vec_pre.join(", "); |  | ||||||
|             let version_str_post = version_vec_post.join(", "); |  | ||||||
|             (diffed_pre, diffed_post) = diff_versions(&version_str_pre, &version_str_post); |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         // push a space to the diffed_pre, if it is non-empty, we do this here and not in the println
 |  | ||||||
|         // in order to properly align the ±.
 |  | ||||||
|         if !version_vec_pre.is_empty() { |  | ||||||
|             let mut tmp = " ".to_string(); |  | ||||||
|             tmp.push_str(&diffed_pre); |  | ||||||
|             diffed_pre = tmp; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         println!( |  | ||||||
|             "[{}] {:col_width$}{} \x1b[0m\u{00B1}\x1b[0m {}", |  | ||||||
|             "C:".bold().bright_yellow(), |  | ||||||
|             p, |  | ||||||
|             diffed_pre, |  | ||||||
|             diffed_post |  | ||||||
|         ); |  | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     println!( | ||||||
|  |       "[{}] {:col_width$}{} \x1b[0m\u{00B1}\x1b[0m {}", | ||||||
|  |       "C:".bold().bright_yellow(), | ||||||
|  |       p, | ||||||
|  |       diffed_pre, | ||||||
|  |       diffed_post | ||||||
|  |     ); | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // Returns a reference to the compiled regex pattern.
 | // Returns a reference to the compiled regex pattern.
 | ||||||
| // The regex is compiled only once.
 | // The regex is compiled only once.
 | ||||||
| fn name_regex() -> &'static Regex { | fn name_regex() -> &'static Regex { | ||||||
|     static REGEX: OnceLock<Regex> = OnceLock::new(); |   static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||||
|     REGEX.get_or_init(|| { |   REGEX.get_or_init(|| { | ||||||
|         Regex::new(r"(-man|-lib|-doc|-dev|-out|-terminfo)") |     Regex::new(r"(-man|-lib|-doc|-dev|-out|-terminfo)") | ||||||
|             .expect("Failed to compile regex pattern for name") |       .expect("Failed to compile regex pattern for name") | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										252
									
								
								src/store.rs
									
										
									
									
									
								
							
							
						
						
									
										252
									
								
								src/store.rs
									
										
									
									
									
								
							|  | @ -1,115 +1,167 @@ | ||||||
| use std::collections::HashMap; | use std::{ | ||||||
|  |   path::{ | ||||||
|  |     Path, | ||||||
|  |     PathBuf, | ||||||
|  |   }, | ||||||
|  |   result, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| use crate::error::AppError; | use anyhow::{ | ||||||
|  |   Context as _, | ||||||
|  |   Result, | ||||||
|  | }; | ||||||
|  | use derive_more::Deref; | ||||||
|  | use ref_cast::RefCast; | ||||||
| use rusqlite::Connection; | use rusqlite::Connection; | ||||||
|  | use rustc_hash::{ | ||||||
|  |   FxBuildHasher, | ||||||
|  |   FxHashMap, | ||||||
|  | }; | ||||||
| 
 | 
 | ||||||
| // Use type alias for Result with our custom error type
 | macro_rules! path_to_str { | ||||||
| type Result<T> = std::result::Result<T, AppError>; |   ($path:ident) => { | ||||||
|  |     let $path = $path.canonicalize().with_context(|| { | ||||||
|  |       format!( | ||||||
|  |         "failed to canonicalize path '{path}'", | ||||||
|  |         path = $path.display(), | ||||||
|  |       ) | ||||||
|  |     })?; | ||||||
| 
 | 
 | ||||||
| const DATABASE_URL: &str = "/nix/var/nix/db/db.sqlite"; |     let $path = $path.to_str().with_context(|| { | ||||||
| 
 |       format!( | ||||||
| const QUERY_PKGS: &str = " |         "failed to convert path '{path}' to valid unicode", | ||||||
| WITH RECURSIVE |         path = $path.display(), | ||||||
| 	graph(p) AS ( |       ) | ||||||
| 		SELECT id 
 |     })?; | ||||||
| 		FROM ValidPaths |   }; | ||||||
| 		WHERE path = ? |  | ||||||
| 	UNION |  | ||||||
| 		SELECT reference FROM Refs |  | ||||||
| 		JOIN graph ON referrer = p |  | ||||||
| 	) |  | ||||||
| SELECT id, path from graph |  | ||||||
| JOIN ValidPaths ON id = p; |  | ||||||
| ";
 |  | ||||||
| 
 |  | ||||||
| const QUERY_CLOSURE_SIZE: &str = " |  | ||||||
| WITH RECURSIVE |  | ||||||
| 	graph(p) AS ( |  | ||||||
| 		SELECT id 
 |  | ||||||
| 		FROM ValidPaths |  | ||||||
| 		WHERE path = ? |  | ||||||
| 	UNION |  | ||||||
| 		SELECT reference FROM Refs |  | ||||||
| 		JOIN graph ON referrer = p |  | ||||||
| 	) |  | ||||||
| SELECT SUM(narSize) as sum from graph |  | ||||||
| JOIN ValidPaths ON p = id; |  | ||||||
| ";
 |  | ||||||
| 
 |  | ||||||
| const QUERY_DEPENDENCY_GRAPH: &str = " |  | ||||||
| WITH RECURSIVE |  | ||||||
| 	graph(p, c) AS ( |  | ||||||
| 		SELECT id as par, reference as chd 
 |  | ||||||
| 		FROM ValidPaths |  | ||||||
| 		JOIN Refs ON referrer = id |  | ||||||
| 		WHERE path = ? |  | ||||||
| 	UNION |  | ||||||
| 		SELECT referrer as par, reference as chd FROM Refs |  | ||||||
| 		JOIN graph ON referrer = c |  | ||||||
| 	) |  | ||||||
| SELECT p, c from graph; |  | ||||||
| ";
 |  | ||||||
| 
 |  | ||||||
| /// executes a query on the nix db directly
 |  | ||||||
| /// to gather all derivations that the derivation given by the path
 |  | ||||||
| /// depends on
 |  | ||||||
| ///
 |  | ||||||
| /// The ids of the derivations in the database are returned as well, since these
 |  | ||||||
| /// can be used to later convert nodes (represented by the the ids) of the
 |  | ||||||
| /// dependency graph to actual paths
 |  | ||||||
| ///
 |  | ||||||
| /// in the future, we might wan't to switch to async
 |  | ||||||
| pub fn get_packages(path: &std::path::Path) -> Result<Vec<(i64, String)>> { |  | ||||||
|     // resolve symlinks and convert to a string
 |  | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |  | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |  | ||||||
| 
 |  | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_PKGS)?; |  | ||||||
|     let queried_pkgs: std::result::Result<Vec<(i64, String)>, _> = stmt |  | ||||||
|         .query_map([p], |row| Ok((row.get(0)?, row.get(1)?)))? |  | ||||||
|         .collect(); |  | ||||||
|     Ok(queried_pkgs?) |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// executes a query on the nix db directly
 | #[derive(Deref, Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||||||
| /// to get the total closure size of the derivation
 | pub struct DerivationId(i64); | ||||||
| /// by summing up the nar size of all derivations
 |  | ||||||
| /// depending on the derivation
 |  | ||||||
| ///
 |  | ||||||
| /// in the future, we might wan't to switch to async
 |  | ||||||
| pub fn get_closure_size(path: &std::path::Path) -> Result<i64> { |  | ||||||
|     // resolve symlinks and convert to a string
 |  | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |  | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |  | ||||||
| 
 | 
 | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_CLOSURE_SIZE)?; | #[expect(clippy::module_name_repetitions)] | ||||||
|     let queried_sum = stmt.query_row([p], |row| row.get(0))?; | #[derive(RefCast, Deref, Debug, PartialEq, Eq)] | ||||||
|     Ok(queried_sum) | #[repr(transparent)] | ||||||
|  | pub struct StorePath(Path); | ||||||
|  | 
 | ||||||
|  | #[expect(clippy::module_name_repetitions)] | ||||||
|  | #[derive(Deref, Debug, Clone, PartialEq, Eq)] | ||||||
|  | pub struct StorePathBuf(PathBuf); | ||||||
|  | 
 | ||||||
|  | /// Connects to the Nix database.
 | ||||||
|  | pub fn connect() -> Result<Connection> { | ||||||
|  |   const DATABASE_PATH: &str = "/nix/var/nix/db/db.sqlite"; | ||||||
|  | 
 | ||||||
|  |   Connection::open(DATABASE_PATH).with_context(|| { | ||||||
|  |     format!("failed to connect to Nix database at {DATABASE_PATH}") | ||||||
|  |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// returns the complete dependency graph of
 | /// Gathers all derivations that the given store path depends on.
 | ||||||
| /// of the derivation as an adjacency list. The nodes are
 | pub fn query_depdendents( | ||||||
| /// represented by the DB ids
 |   connection: &mut Connection, | ||||||
|  |   path: &StorePath, | ||||||
|  | ) -> Result<Vec<(DerivationId, StorePathBuf)>> { | ||||||
|  |   const QUERY: &str = " | ||||||
|  |     WITH RECURSIVE | ||||||
|  |       graph(p) AS ( | ||||||
|  |         SELECT id 
 | ||||||
|  |         FROM ValidPaths | ||||||
|  |         WHERE path = ? | ||||||
|  |       UNION | ||||||
|  |         SELECT reference FROM Refs | ||||||
|  |         JOIN graph ON referrer = p | ||||||
|  |       ) | ||||||
|  |     SELECT id, path from graph | ||||||
|  |     JOIN ValidPaths ON id = p; | ||||||
|  |   ";
 | ||||||
|  | 
 | ||||||
|  |   path_to_str!(path); | ||||||
|  | 
 | ||||||
|  |   let packages: result::Result<Vec<(DerivationId, StorePathBuf)>, _> = | ||||||
|  |     connection | ||||||
|  |       .prepare_cached(QUERY)? | ||||||
|  |       .query_map([path], |row| { | ||||||
|  |         Ok(( | ||||||
|  |           DerivationId(row.get(0)?), | ||||||
|  |           StorePathBuf(row.get::<_, String>(1)?.into()), | ||||||
|  |         )) | ||||||
|  |       })? | ||||||
|  |       .collect(); | ||||||
|  | 
 | ||||||
|  |   Ok(packages?) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | /// Gets the total closure size of the given store path by summing up the nar
 | ||||||
|  | /// size of all depdendent derivations.
 | ||||||
|  | pub fn query_closure_size( | ||||||
|  |   connection: &mut Connection, | ||||||
|  |   path: &StorePath, | ||||||
|  | ) -> Result<usize> { | ||||||
|  |   const QUERY: &str = " | ||||||
|  |     WITH RECURSIVE | ||||||
|  |       graph(p) AS ( | ||||||
|  |         SELECT id 
 | ||||||
|  |         FROM ValidPaths | ||||||
|  |         WHERE path = ? | ||||||
|  |       UNION | ||||||
|  |         SELECT reference FROM Refs | ||||||
|  |         JOIN graph ON referrer = p | ||||||
|  |       ) | ||||||
|  |     SELECT SUM(narSize) as sum from graph | ||||||
|  |     JOIN ValidPaths ON p = id; | ||||||
|  |   ";
 | ||||||
|  | 
 | ||||||
|  |   path_to_str!(path); | ||||||
|  | 
 | ||||||
|  |   let closure_size = connection | ||||||
|  |     .prepare_cached(QUERY)? | ||||||
|  |     .query_row([path], |row| row.get(0))?; | ||||||
|  | 
 | ||||||
|  |   Ok(closure_size) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | /// Gathers the complete dependency graph of of the store path as an adjacency
 | ||||||
|  | /// list.
 | ||||||
| ///
 | ///
 | ||||||
| /// We might want to collect the paths in the graph directly as
 | /// We might want to collect the paths in the graph directly as
 | ||||||
| /// well in the future, depending on how much we use them
 | /// well in the future, depending on how much we use them
 | ||||||
| /// in the operations on the graph
 | /// in the operations on the graph.
 | ||||||
| ///
 | pub fn query_dependency_graph( | ||||||
| /// The mapping from id to graph can be obtained by using [``get_packages``]
 |   connection: &mut Connection, | ||||||
| pub fn get_dependency_graph(path: &std::path::Path) -> Result<HashMap<i64, Vec<i64>>> { |   path: &StorePath, | ||||||
|     // resolve symlinks and convert to a string
 | ) -> Result<FxHashMap<DerivationId, Vec<DerivationId>>> { | ||||||
|     let p: String = path.canonicalize()?.to_string_lossy().into_owned(); |   const QUERY: &str = " | ||||||
|     let conn = Connection::open(DATABASE_URL)?; |     WITH RECURSIVE | ||||||
|  |       graph(p, c) AS ( | ||||||
|  |         SELECT id as par, reference as chd 
 | ||||||
|  |         FROM ValidPaths | ||||||
|  |         JOIN Refs ON referrer = id | ||||||
|  |         WHERE path = ? | ||||||
|  |       UNION | ||||||
|  |         SELECT referrer as par, reference as chd FROM Refs | ||||||
|  |         JOIN graph ON referrer = c | ||||||
|  |       ) | ||||||
|  |     SELECT p, c from graph; | ||||||
|  |   ";
 | ||||||
| 
 | 
 | ||||||
|     let mut stmt = conn.prepare_cached(QUERY_DEPENDENCY_GRAPH)?; |   path_to_str!(path); | ||||||
|     let mut adj = HashMap::<i64, Vec<i64>>::new(); |  | ||||||
|     let queried_edges = |  | ||||||
|         stmt.query_map([p], |row| Ok::<(i64, i64), _>((row.get(0)?, row.get(1)?)))?; |  | ||||||
|     for row in queried_edges { |  | ||||||
|         let (from, to) = row?; |  | ||||||
|         adj.entry(from).or_default().push(to); |  | ||||||
|         adj.entry(to).or_default(); |  | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|     Ok(adj) |   let mut adj = | ||||||
|  |     FxHashMap::<DerivationId, Vec<DerivationId>>::with_hasher(FxBuildHasher); | ||||||
|  | 
 | ||||||
|  |   let mut statement = connection.prepare_cached(QUERY)?; | ||||||
|  | 
 | ||||||
|  |   let edges = statement.query_map([path], |row| { | ||||||
|  |     Ok((DerivationId(row.get(0)?), DerivationId(row.get(1)?))) | ||||||
|  |   })?; | ||||||
|  | 
 | ||||||
|  |   for row in edges { | ||||||
|  |     let (from, to) = row?; | ||||||
|  | 
 | ||||||
|  |     adj.entry(from).or_default().push(to); | ||||||
|  |     adj.entry(to).or_default(); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   Ok(adj) | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										347
									
								
								src/util.rs
									
										
									
									
									
								
							
							
						
						
									
										347
									
								
								src/util.rs
									
										
									
									
									
								
							|  | @ -1,13 +1,17 @@ | ||||||
| use std::{ | use std::{ | ||||||
|     cmp::Ordering, |   cmp::Ordering, | ||||||
|     collections::{HashMap, HashSet}, |   collections::{ | ||||||
|     sync::OnceLock, |     HashMap, | ||||||
|  |     HashSet, | ||||||
|  |   }, | ||||||
|  |   sync::OnceLock, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| use crate::error::AppError; |  | ||||||
| use log::debug; | use log::debug; | ||||||
| use regex::Regex; | use regex::Regex; | ||||||
| 
 | 
 | ||||||
|  | use crate::error::AppError; | ||||||
|  | 
 | ||||||
| // Use type alias for Result with our custom error type
 | // Use type alias for Result with our custom error type
 | ||||||
| type Result<T> = std::result::Result<T, AppError>; | type Result<T> = std::result::Result<T, AppError>; | ||||||
| 
 | 
 | ||||||
|  | @ -15,81 +19,87 @@ use std::string::ToString; | ||||||
| 
 | 
 | ||||||
| #[derive(Eq, PartialEq, Debug)] | #[derive(Eq, PartialEq, Debug)] | ||||||
| enum VersionComponent { | enum VersionComponent { | ||||||
|     Number(u64), |   Number(u64), | ||||||
|     Text(String), |   Text(String), | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl std::cmp::Ord for VersionComponent { | impl std::cmp::Ord for VersionComponent { | ||||||
|     fn cmp(&self, other: &Self) -> Ordering { |   fn cmp(&self, other: &Self) -> Ordering { | ||||||
|         use VersionComponent::{Number, Text}; |     use VersionComponent::{ | ||||||
|         match (self, other) { |       Number, | ||||||
|             (Number(x), Number(y)) => x.cmp(y), |       Text, | ||||||
|             (Text(x), Text(y)) => match (x.as_str(), y.as_str()) { |     }; | ||||||
|                 ("pre", _) => Ordering::Less, |     match (self, other) { | ||||||
|                 (_, "pre") => Ordering::Greater, |       (Number(x), Number(y)) => x.cmp(y), | ||||||
|                 _ => x.cmp(y), |       (Text(x), Text(y)) => { | ||||||
|             }, |         match (x.as_str(), y.as_str()) { | ||||||
|             (Text(_), Number(_)) => Ordering::Less, |           ("pre", _) => Ordering::Less, | ||||||
|             (Number(_), Text(_)) => Ordering::Greater, |           (_, "pre") => Ordering::Greater, | ||||||
|  |           _ => x.cmp(y), | ||||||
|         } |         } | ||||||
|  |       }, | ||||||
|  |       (Text(_), Number(_)) => Ordering::Less, | ||||||
|  |       (Number(_), Text(_)) => Ordering::Greater, | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl PartialOrd for VersionComponent { | impl PartialOrd for VersionComponent { | ||||||
|     fn partial_cmp(&self, other: &Self) -> Option<Ordering> { |   fn partial_cmp(&self, other: &Self) -> Option<Ordering> { | ||||||
|         Some(self.cmp(other)) |     Some(self.cmp(other)) | ||||||
|     } |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // takes a version string and outputs the different components
 | // takes a version string and outputs the different components
 | ||||||
| //
 | //
 | ||||||
| // a component is delimited by '-' or '.' and consists of just digits or letters
 | // a component is delimited by '-' or '.' and consists of just digits or letters
 | ||||||
| struct VersionComponentIterator<'a> { | struct VersionComponentIterator<'a> { | ||||||
|     v: &'a [u8], |   v:   &'a [u8], | ||||||
|     pos: usize, |   pos: usize, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl<'a> VersionComponentIterator<'a> { | impl<'a> VersionComponentIterator<'a> { | ||||||
|     pub fn new<I: Into<&'a str>>(v: I) -> Self { |   pub fn new<I: Into<&'a str>>(v: I) -> Self { | ||||||
|         Self { |     Self { | ||||||
|             v: v.into().as_bytes(), |       v:   v.into().as_bytes(), | ||||||
|             pos: 0, |       pos: 0, | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl Iterator for VersionComponentIterator<'_> { | impl Iterator for VersionComponentIterator<'_> { | ||||||
|     type Item = VersionComponent; |   type Item = VersionComponent; | ||||||
| 
 | 
 | ||||||
|     fn next(&mut self) -> Option<Self::Item> { |   fn next(&mut self) -> Option<Self::Item> { | ||||||
|         // skip all '-' and '.' in the beginning
 |     // skip all '-' and '.' in the beginning
 | ||||||
|         while let Some(b'.' | b'-') = self.v.get(self.pos) { |     while let Some(b'.' | b'-') = self.v.get(self.pos) { | ||||||
|             self.pos += 1; |       self.pos += 1; | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         // get the next character and decide if it is a digit or char
 |  | ||||||
|         let c = self.v.get(self.pos)?; |  | ||||||
|         let is_digit = c.is_ascii_digit(); |  | ||||||
|         // based on this collect characters after this into the component
 |  | ||||||
|         let component_len = self.v[self.pos..] |  | ||||||
|             .iter() |  | ||||||
|             .copied() |  | ||||||
|             .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') |  | ||||||
|             .count(); |  | ||||||
|         let component = |  | ||||||
|             String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]).into_owned(); |  | ||||||
| 
 |  | ||||||
|         // remember what chars we used
 |  | ||||||
|         self.pos += component_len; |  | ||||||
| 
 |  | ||||||
|         if component.is_empty() { |  | ||||||
|             None |  | ||||||
|         } else if is_digit { |  | ||||||
|             component.parse::<u64>().ok().map(VersionComponent::Number) |  | ||||||
|         } else { |  | ||||||
|             Some(VersionComponent::Text(component)) |  | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     // get the next character and decide if it is a digit or char
 | ||||||
|  |     let c = self.v.get(self.pos)?; | ||||||
|  |     let is_digit = c.is_ascii_digit(); | ||||||
|  |     // based on this collect characters after this into the component
 | ||||||
|  |     let component_len = self.v[self.pos..] | ||||||
|  |       .iter() | ||||||
|  |       .copied() | ||||||
|  |       .take_while(|&c| c.is_ascii_digit() == is_digit && c != b'.' && c != b'-') | ||||||
|  |       .count(); | ||||||
|  |     let component = | ||||||
|  |       String::from_utf8_lossy(&self.v[self.pos..(self.pos + component_len)]) | ||||||
|  |         .into_owned(); | ||||||
|  | 
 | ||||||
|  |     // remember what chars we used
 | ||||||
|  |     self.pos += component_len; | ||||||
|  | 
 | ||||||
|  |     if component.is_empty() { | ||||||
|  |       None | ||||||
|  |     } else if is_digit { | ||||||
|  |       component.parse::<u64>().ok().map(VersionComponent::Number) | ||||||
|  |     } else { | ||||||
|  |       Some(VersionComponent::Text(component)) | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// Compares two strings of package versions, and figures out the greater one.
 | /// Compares two strings of package versions, and figures out the greater one.
 | ||||||
|  | @ -98,149 +108,154 @@ impl Iterator for VersionComponentIterator<'_> { | ||||||
| ///
 | ///
 | ||||||
| /// * Ordering
 | /// * Ordering
 | ||||||
| pub fn compare_versions(a: &str, b: &str) -> Ordering { | pub fn compare_versions(a: &str, b: &str) -> Ordering { | ||||||
|     let iter_a = VersionComponentIterator::new(a); |   let iter_a = VersionComponentIterator::new(a); | ||||||
|     let iter_b = VersionComponentIterator::new(b); |   let iter_b = VersionComponentIterator::new(b); | ||||||
| 
 | 
 | ||||||
|     iter_a.cmp(iter_b) |   iter_a.cmp(iter_b) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// Parses a nix store path to extract the packages name and version
 | /// Parses a nix store path to extract the packages name and version
 | ||||||
| ///
 | ///
 | ||||||
| /// This function first drops the inputs first 44 chars, since that is exactly the length of the /nix/store/... prefix. Then it matches that against our store path regex.
 | /// This function first drops the inputs first 44 chars, since that is exactly
 | ||||||
|  | /// the length of the /nix/store/... prefix. Then it matches that against our
 | ||||||
|  | /// store path regex.
 | ||||||
| ///
 | ///
 | ||||||
| /// # Returns
 | /// # Returns
 | ||||||
| ///
 | ///
 | ||||||
| /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error if
 | /// * Result<(&'a str, &'a str)> - The Package's name and version, or an error
 | ||||||
| ///   one or both cannot be retrieved.
 | ///   if one or both cannot be retrieved.
 | ||||||
| pub fn get_version<'a>(pack: impl Into<&'a str>) -> Result<(&'a str, &'a str)> { | pub fn get_version<'a>(pack: impl Into<&'a str>) -> Result<(&'a str, &'a str)> { | ||||||
|     let path = pack.into(); |   let path = pack.into(); | ||||||
| 
 | 
 | ||||||
|     // We can strip the path since it _always_ follows the format
 |   // We can strip the path since it _always_ follows the format
 | ||||||
|     // /nix/store/<...>-<program_name>-......
 |   // /nix/store/<...>-<program_name>-......
 | ||||||
|     // This part is exactly 44 chars long, so we just remove it.
 |   // This part is exactly 44 chars long, so we just remove it.
 | ||||||
|     let stripped_path = &path[44..]; |   let stripped_path = &path[44..]; | ||||||
|     debug!("Stripped path: {stripped_path}"); |   debug!("Stripped path: {stripped_path}"); | ||||||
| 
 | 
 | ||||||
|     // Match the regex against the input
 |   // Match the regex against the input
 | ||||||
|     if let Some(cap) = store_path_regex().captures(stripped_path) { |   if let Some(cap) = store_path_regex().captures(stripped_path) { | ||||||
|         // Handle potential missing captures safely
 |     // Handle potential missing captures safely
 | ||||||
|         let name = cap.get(1).map_or("", |m| m.as_str()); |     let name = cap.get(1).map_or("", |m| m.as_str()); | ||||||
|         let mut version = cap.get(2).map_or("<none>", |m| m.as_str()); |     let mut version = cap.get(2).map_or("<none>", |m| m.as_str()); | ||||||
| 
 | 
 | ||||||
|         if version.starts_with('-') { |     if version.starts_with('-') { | ||||||
|             version = &version[1..]; |       version = &version[1..]; | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if name.is_empty() { |  | ||||||
|             return Err(AppError::ParseError { |  | ||||||
|                 message: format!("Failed to extract name from path: {path}"), |  | ||||||
|                 context: "get_version".to_string(), |  | ||||||
|                 source: None, |  | ||||||
|             }); |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         return Ok((name, version)); |  | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     Err(AppError::ParseError { |     if name.is_empty() { | ||||||
|         message: format!("Path does not match expected nix store format: {path}"), |       return Err(AppError::ParseError { | ||||||
|  |         message: format!("Failed to extract name from path: {path}"), | ||||||
|         context: "get_version".to_string(), |         context: "get_version".to_string(), | ||||||
|         source: None, |         source:  None, | ||||||
|     }) |       }); | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     return Ok((name, version)); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   Err(AppError::ParseError { | ||||||
|  |     message: format!("Path does not match expected nix store format: {path}"), | ||||||
|  |     context: "get_version".to_string(), | ||||||
|  |     source:  None, | ||||||
|  |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // Returns a reference to the compiled regex pattern.
 | // Returns a reference to the compiled regex pattern.
 | ||||||
| // The regex is compiled only once.
 | // The regex is compiled only once.
 | ||||||
| pub fn store_path_regex() -> &'static Regex { | pub fn store_path_regex() -> &'static Regex { | ||||||
|     static REGEX: OnceLock<Regex> = OnceLock::new(); |   static REGEX: OnceLock<Regex> = OnceLock::new(); | ||||||
|     REGEX.get_or_init(|| { |   REGEX.get_or_init(|| { | ||||||
|         Regex::new(r"(.+?)(-([0-9].*?))?$") |     Regex::new(r"(.+?)(-([0-9].*?))?$") | ||||||
|             .expect("Failed to compile regex pattern for nix store paths") |       .expect("Failed to compile regex pattern for nix store paths") | ||||||
|     }) |   }) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // TODO: move this somewhere else, this does not really
 | // TODO: move this somewhere else, this does not really
 | ||||||
| // belong into this file
 | // belong into this file
 | ||||||
| pub struct PackageDiff<'a> { | pub struct PackageDiff<'a> { | ||||||
|     pub pkg_to_versions_pre: HashMap<&'a str, HashSet<&'a str>>, |   pub pkg_to_versions_pre:  HashMap<&'a str, HashSet<&'a str>>, | ||||||
|     pub pkg_to_versions_post: HashMap<&'a str, HashSet<&'a str>>, |   pub pkg_to_versions_post: HashMap<&'a str, HashSet<&'a str>>, | ||||||
|     pub pre_keys: HashSet<&'a str>, |   pub pre_keys:             HashSet<&'a str>, | ||||||
|     pub post_keys: HashSet<&'a str>, |   pub post_keys:            HashSet<&'a str>, | ||||||
|     pub added: HashSet<&'a str>, |   pub added:                HashSet<&'a str>, | ||||||
|     pub removed: HashSet<&'a str>, |   pub removed:              HashSet<&'a str>, | ||||||
|     pub changed: HashSet<&'a str>, |   pub changed:              HashSet<&'a str>, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl<'a> PackageDiff<'a> { | impl<'a> PackageDiff<'a> { | ||||||
|     pub fn new<S: AsRef<str> + 'a>(pkgs_pre: &'a [S], pkgs_post: &'a [S]) -> Self { |   pub fn new<S: AsRef<str> + 'a>( | ||||||
|         // Map from packages of the first closure to their version
 |     pkgs_pre: &'a [S], | ||||||
|         let mut pre = HashMap::<&str, HashSet<&str>>::new(); |     pkgs_post: &'a [S], | ||||||
|         let mut post = HashMap::<&str, HashSet<&str>>::new(); |   ) -> Self { | ||||||
|  |     // Map from packages of the first closure to their version
 | ||||||
|  |     let mut pre = HashMap::<&str, HashSet<&str>>::new(); | ||||||
|  |     let mut post = HashMap::<&str, HashSet<&str>>::new(); | ||||||
| 
 | 
 | ||||||
|         for p in pkgs_pre { |     for p in pkgs_pre { | ||||||
|             match get_version(p.as_ref()) { |       match get_version(p.as_ref()) { | ||||||
|                 Ok((name, version)) => { |         Ok((name, version)) => { | ||||||
|                     pre.entry(name).or_default().insert(version); |           pre.entry(name).or_default().insert(version); | ||||||
|                 } |         }, | ||||||
|                 Err(e) => { |         Err(e) => { | ||||||
|                     debug!("Error parsing package version: {e}"); |           debug!("Error parsing package version: {e}"); | ||||||
|                 } |         }, | ||||||
|             } |       } | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         for p in pkgs_post { |  | ||||||
|             match get_version(p.as_ref()) { |  | ||||||
|                 Ok((name, version)) => { |  | ||||||
|                     post.entry(name).or_default().insert(version); |  | ||||||
|                 } |  | ||||||
|                 Err(e) => { |  | ||||||
|                     debug!("Error parsing package version: {e}"); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         // Compare the package names of both versions
 |  | ||||||
|         let pre_keys: HashSet<&str> = pre.keys().copied().collect(); |  | ||||||
|         let post_keys: HashSet<&str> = post.keys().copied().collect(); |  | ||||||
| 
 |  | ||||||
|         // Difference gives us added and removed packages
 |  | ||||||
|         let added: HashSet<&str> = &post_keys - &pre_keys; |  | ||||||
| 
 |  | ||||||
|         let removed: HashSet<&str> = &pre_keys - &post_keys; |  | ||||||
|         // Get the intersection of the package names for version changes
 |  | ||||||
|         let changed: HashSet<&str> = &pre_keys & &post_keys; |  | ||||||
|         Self { |  | ||||||
|             pkg_to_versions_pre: pre, |  | ||||||
|             pkg_to_versions_post: post, |  | ||||||
|             pre_keys, |  | ||||||
|             post_keys, |  | ||||||
|             added, |  | ||||||
|             removed, |  | ||||||
|             changed, |  | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     for p in pkgs_post { | ||||||
|  |       match get_version(p.as_ref()) { | ||||||
|  |         Ok((name, version)) => { | ||||||
|  |           post.entry(name).or_default().insert(version); | ||||||
|  |         }, | ||||||
|  |         Err(e) => { | ||||||
|  |           debug!("Error parsing package version: {e}"); | ||||||
|  |         }, | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     // Compare the package names of both versions
 | ||||||
|  |     let pre_keys: HashSet<&str> = pre.keys().copied().collect(); | ||||||
|  |     let post_keys: HashSet<&str> = post.keys().copied().collect(); | ||||||
|  | 
 | ||||||
|  |     // Difference gives us added and removed packages
 | ||||||
|  |     let added: HashSet<&str> = &post_keys - &pre_keys; | ||||||
|  | 
 | ||||||
|  |     let removed: HashSet<&str> = &pre_keys - &post_keys; | ||||||
|  |     // Get the intersection of the package names for version changes
 | ||||||
|  |     let changed: HashSet<&str> = &pre_keys & &post_keys; | ||||||
|  |     Self { | ||||||
|  |       pkg_to_versions_pre: pre, | ||||||
|  |       pkg_to_versions_post: post, | ||||||
|  |       pre_keys, | ||||||
|  |       post_keys, | ||||||
|  |       added, | ||||||
|  |       removed, | ||||||
|  |       changed, | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| mod test { | mod test { | ||||||
| 
 | 
 | ||||||
|     #[test] |   #[test] | ||||||
|     fn test_version_component_iter() { |   fn test_version_component_iter() { | ||||||
|         use super::VersionComponent::{Number, Text}; |     use super::VersionComponent::{ | ||||||
|         use crate::util::VersionComponentIterator; |       Number, | ||||||
|         let v = "132.1.2test234-1-man----.--.......---------..---"; |       Text, | ||||||
|  |     }; | ||||||
|  |     use crate::util::VersionComponentIterator; | ||||||
|  |     let v = "132.1.2test234-1-man----.--.......---------..---"; | ||||||
| 
 | 
 | ||||||
|         let comp: Vec<_> = VersionComponentIterator::new(v).collect(); |     let comp: Vec<_> = VersionComponentIterator::new(v).collect(); | ||||||
|         assert_eq!( |     assert_eq!(comp, [ | ||||||
|             comp, |       Number(132), | ||||||
|             [ |       Number(1), | ||||||
|                 Number(132), |       Number(2), | ||||||
|                 Number(1), |       Text("test".into()), | ||||||
|                 Number(2), |       Number(234), | ||||||
|                 Text("test".into()), |       Number(1), | ||||||
|                 Number(234), |       Text("man".into()) | ||||||
|                 Number(1), |     ]); | ||||||
|                 Text("man".into()) |   } | ||||||
|             ] |  | ||||||
|         ); |  | ||||||
|     } |  | ||||||
| } | } | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 bloxx12
							bloxx12