1
1
require 'json'
2
- require 'digest/md5'
3
2
require 'parallel'
3
+ require 'zlib'
4
4
5
5
#
6
6
# Handles storage of module metadata on disk. A base metadata file is always included - this was added to ensure a much
@@ -127,9 +127,8 @@ def load_cache_from_file_store
127
127
}
128
128
end
129
129
130
- # This method uses a per-file MD5 cache to avoid recalculating checksums for files that have not changed.
131
- # It loads the cache, checks each file's mtime and size, and only recalculates the MD5 if needed.
132
- # The overall checksum is a hash of all per-file MD5s concatenated together.
130
+ # This method uses a per-file CRC32 cache to avoid recalculating checksums for files that have not changed.
131
+ # It loads the cache, checks each file's mtime and size, and only recalculates the CRC32 if needed.
133
132
#
134
133
# @return [Boolean]
135
134
def self . valid_checksum?
@@ -140,69 +139,69 @@ def self.valid_checksum?
140
139
# Gather all files from the specified directories
141
140
files = Dir . glob ( [ modules_dir , lib_dir , local_modules_dir ] ) . select { |f | File . file? ( f ) } . sort
142
141
143
- # Path to the per-file MD5 cache
144
- cache_file = File . join ( Msf ::Config . config_directory , 'store' , 'md5_cache .json' )
142
+ # Path to the per-file CRC32 cache
143
+ cache_file = File . join ( Msf ::Config . config_directory , 'store' , 'crc32_cache .json' )
145
144
# Load the cache if it exists, otherwise start with an empty hash
146
145
per_file_cache = File . exist? ( cache_file ) ? JSON . parse ( File . read ( cache_file ) ) : { }
147
146
148
- # Calculate per-file MD5s in parallel, only recalculating if mtime/size changed
149
- file_md5s_with_metadata = Parallel . map ( files , in_threads : Etc . nprocessors * 2 ) do |file |
147
+ # Calculate per-file CRC32s in parallel, only recalculating if mtime/size changed
148
+ file_crc32s_with_metadata = Parallel . map ( files , in_threads : Etc . nprocessors * 2 ) do |file |
150
149
# Get file metadata (size and last modified time)
151
150
file_metadata = File . stat ( file )
152
151
cache_entry = per_file_cache [ file ]
153
- # Use cached MD5 if mtime and size match, otherwise recalculate
152
+ # Use cached CRC32 if mtime and size match, otherwise recalculate
154
153
if cache_entry && cache_entry [ 'mtime' ] == file_metadata . mtime . to_i && cache_entry [ 'size' ] == file_metadata . size
155
- md5 = cache_entry [ 'md5 ' ]
154
+ crc32 = cache_entry [ 'crc32 ' ]
156
155
else
157
- md5 = Digest :: MD5 . file ( file ) . hexdigest
156
+ crc32 = Zlib . crc32 ( File . read ( file ) ) . to_s ( 16 )
158
157
end
159
158
# Return file and its metadata for later aggregation
160
159
[ file , {
161
- 'md5 ' => md5 ,
160
+ 'crc32 ' => crc32 ,
162
161
'mtime' => file_metadata . mtime . to_i ,
163
162
'size' => file_metadata . size
164
163
} ]
165
164
end
166
165
167
166
# Build the updated_cache hash from the results
168
- updated_cache = file_md5s_with_metadata . to_h
169
- file_md5s = file_md5s_with_metadata . map { |_ , meta | meta [ 'md5 ' ] }
167
+ updated_cache = file_crc32s_with_metadata . to_h
168
+ file_crc32s = file_crc32s_with_metadata . map { |_ , meta | meta [ 'crc32 ' ] }
170
169
171
170
# Ensure the directory for the cache file exists before writing
172
171
FileUtils . mkdir_p ( File . dirname ( cache_file ) )
173
172
# Save the updated per-file cache to disk
174
173
File . write ( cache_file , JSON . pretty_generate ( updated_cache ) )
175
174
176
- # Combine all per-file MD5s into a single string and hash it for the overall checksum
177
- overall_md5 = Digest :: MD5 . hexdigest ( file_md5s . join )
178
- @current_checksum = overall_md5
175
+ # Combine all per-file CRC32s into a single string and hash it for the overall checksum
176
+ overall_crc32 = Zlib . crc32 ( file_crc32s . join ) . to_s ( 16 )
177
+ @current_checksum = overall_crc32
179
178
180
179
@cache_store_path = File . join ( Msf ::Config . config_directory , "store" , CacheMetaDataFile )
181
180
cache_db_path = File . join ( Msf ::Config . install_root , "db" , CacheMetaDataFile )
182
181
183
- # If the cache file does not exist, copy the db cache and update the md5 value
182
+ # If the cache file does not exist, copy the db cache and update the crc32 value
184
183
unless File . exist? ( @cache_store_path )
185
184
FileUtils . mkdir_p ( File . dirname ( @cache_store_path ) )
186
185
FileUtils . cp ( cache_db_path , @cache_store_path )
187
- # Update the md5 value in the copied file
186
+ # Update the crc32 value in the copied file
188
187
cache_content = JSON . parse ( File . read ( @cache_store_path ) )
189
188
cache_content [ 'checksum' ] ||= { }
190
- cache_content [ 'checksum' ] [ 'md5 ' ] = @current_checksum
189
+ cache_content [ 'checksum' ] [ 'crc32 ' ] = @current_checksum
191
190
File . write ( @cache_store_path , JSON . pretty_generate ( cache_content ) )
192
191
end
193
192
194
193
cache_content = JSON . parse ( File . read ( @cache_store_path ) )
195
- cached_sha = cache_content . dig ( 'checksum' , 'md5 ' )
194
+ cached_crc32 = cache_content . dig ( 'checksum' , 'crc32 ' )
196
195
197
196
# Return true if the current checksum matches the cached one, otherwise return false
198
- @current_checksum == cached_sha
197
+ @current_checksum == cached_crc32
199
198
end
200
199
201
- # Update the cache checksum file with the current md5 checksum of the module paths.
200
+ # Update the cache checksum file with the current crc32 checksum of the module paths.
202
201
#
203
202
# @return [Integer]
204
203
def self . update_cache_checksum
205
- updated_cache_content = { 'checksum' => { 'md5 ' => @current_checksum } }
204
+ updated_cache_content = { 'checksum' => { 'crc32 ' => @current_checksum } }
206
205
FileUtils . rm_f ( @cache_store_path )
207
206
File . write ( @cache_store_path , JSON . pretty_generate ( updated_cache_content ) )
208
207
end
0 commit comments