diff --git a/Userland/Utilities/unzip.cpp b/Userland/Utilities/unzip.cpp index 5d1ed31d5f..e839cae594 100644 --- a/Userland/Utilities/unzip.cpp +++ b/Userland/Utilities/unzip.cpp @@ -77,13 +77,11 @@ static bool unpack_zip_member(Archive::ZipMember zip_member, bool quiet) ErrorOr serenity_main(Main::Arguments arguments) { char const* path; - int map_size_limit = 32 * MiB; bool quiet { false }; String output_directory_path; Vector file_filters; Core::ArgsParser args_parser; - args_parser.add_option(map_size_limit, "Maximum chunk size to map", "map-size-limit", 0, "size"); args_parser.add_option(output_directory_path, "Directory to receive the archive content", "output-directory", 'd', "path"); args_parser.add_option(quiet, "Be less verbose", "quiet", 'q'); args_parser.add_positional_argument(path, "File to unzip", "path", Core::ArgsParser::Required::Yes); @@ -97,13 +95,6 @@ ErrorOr serenity_main(Main::Arguments arguments) // FIXME: Map file chunk-by-chunk once we have mmap() with offset. // This will require mapping some parts then unmapping them repeatedly, // but it would be significantly faster and less syscall heavy than seek()/read() at every read. - if (st.st_size >= map_size_limit) { - warnln("unzip warning: Refusing to map file since it is larger than {}, pass '--map-size-limit {}' to get around this", - human_readable_size(map_size_limit), - round_up_to_power_of_two(st.st_size, 16)); - return 1; - } - RefPtr mapped_file; ReadonlyBytes input_bytes; if (st.st_size > 0) {