11# SPDX-License-Identifier: MIT
2- import re , logging , sys , os , stat , shutil , struct , subprocess , zlib , time
2+ import re , logging , sys , os , stat , shutil , struct , subprocess , zlib , time , hashlib , lzma
33from ctypes import *
44
55if sys .platform == 'darwin' :
@@ -131,11 +131,54 @@ def input_prompt(*args):
131131 logging .info (f"INPUT: { val !r} " )
132132 return val
133133
134+ class PBZX :
135+ def __init__ (self , istream , osize ):
136+ self .istream = istream
137+ self .osize = osize
138+ self .buf = b""
139+ self .p = 0
140+ self .total_read = 0
141+
142+ hdr = istream .read (12 )
143+ magic , blocksize = struct .unpack (">4sQ" , hdr )
144+ assert magic == b"pbzx"
145+
146+ def read (self , size ):
147+ if (len (self .buf ) - self .p ) >= size :
148+ d = self .buf [self .p :self .p + size ]
149+ self .p += len (d )
150+ return d
151+
152+ bp = [self .buf [self .p :]]
153+ self .p = 0
154+
155+ avail = len (bp [0 ])
156+ while avail < size and self .total_read < self .osize :
157+ hdr = self .istream .read (16 )
158+ if not hdr :
159+ raise Exception ("End of compressed data but more expected" )
160+
161+ uncompressed_size , compressed_size = struct .unpack (">QQ" , hdr )
162+ blk = self .istream .read (compressed_size )
163+ if uncompressed_size != compressed_size :
164+ blk = lzma .decompress (blk , format = lzma .FORMAT_XZ )
165+ bp .append (blk )
166+ avail += len (blk )
167+ self .total_read += len (blk )
168+
169+ self .buf = b"" .join (bp )
170+ d = self .buf [self .p :self .p + size ]
171+ self .p += len (d )
172+ return d
173+
134174class PackageInstaller :
135175 def __init__ (self ):
136176 self .verbose = "-v" in sys .argv
137177 self .printed_progress = False
138178
179+ def path (self , path ):
180+ return path
181+
139182 def flush_progress (self ):
140183 if self .ucache and self .ucache .flush_progress ():
141184 self .printed_progress = False
@@ -145,8 +188,10 @@ def flush_progress(self):
145188 self .printed_progress = False
146189
147190 def extract (self , src , dest ):
148- logging .info (f" { src } -> { dest } /" )
149- self .pkg .extract (src , dest )
191+ dest_path = os .path .join (dest , src )
192+ dest_dir = os .path .split (dest_path )[0 ]
193+ os .makedirs (dest_dir , exist_ok = True )
194+ self .extract_file (src , dest_path )
150195
151196 def fdcopy (self , sfd , dfd , size = None ):
152197 BLOCK = 16 * 1024 * 1024
@@ -171,10 +216,24 @@ def fdcopy(self, sfd, dfd, size=None):
171216 sys .stdout .write ("\033 [3G100.00% " )
172217 sys .stdout .flush ()
173218
219+ def copy_recompress (self , src , path ):
220+ # For BXDIFF50 stuff in OTA images
221+ bxstream = self .pkg .open (src )
222+ assert bxstream .read (8 ) == b"BXDIFF50"
223+ bxstream .read (8 )
224+ size , csize , zxsize = struct .unpack ("<3Q" , bxstream .read (24 ))
225+ assert csize == 0
226+ sha1 = bxstream .read (20 )
227+ istream = PBZX (bxstream , size )
228+ self .stream_compress (istream , size , path , sha1 = sha1 )
229+
174230 def copy_compress (self , src , path ):
175231 info = self .pkg .getinfo (src )
176232 size = info .file_size
177233 istream = self .pkg .open (src )
234+ self .stream_compress (istream , size , path , crc = info .CRC )
235+
236+ def stream_compress (self , istream , size , path , crc = None , sha1 = None ):
178237 with open (path , 'wb' ):
179238 pass
180239 num_chunks = (size + CHUNK_SIZE - 1 ) // CHUNK_SIZE
@@ -212,20 +271,39 @@ def copy_compress(self, src, path):
212271 "66706D630C000000" + "" .join (f"{ ((size >> 8 * i ) & 0xff ):02x} " for i in range (8 )),
213272 path ], check = True )
214273 os .chflags (path , stat .UF_COMPRESSED )
215- crc = 0
216- with open (path , 'rb' ) as result_file :
217- while 1 :
218- data = result_file .read (CHUNK_SIZE )
219- if len (data ) == 0 :
220- break
221- crc = zlib .crc32 (data , crc )
222- if crc != info .CRC :
223- raise Exception ('Internal error: failed to compress file: crc mismatch' )
274+
275+ if sha1 is not None :
276+ sha = hashlib .sha1 ()
277+ with open (path , 'rb' ) as result_file :
278+ while 1 :
279+ data = result_file .read (CHUNK_SIZE )
280+ if len (data ) == 0 :
281+ break
282+ sha .update (data )
283+ if sha .digest () != sha1 :
284+ raise Exception ('Internal error: failed to recompress file: SHA1 mismatch' )
285+ elif crc is not None :
286+ calc_crc = 0
287+ with open (path , 'rb' ) as result_file :
288+ while 1 :
289+ data = result_file .read (CHUNK_SIZE )
290+ if len (data ) == 0 :
291+ break
292+ calc_crc = zlib .crc32 (data , calc_crc )
293+ if crc != calc_crc :
294+ raise Exception ('Internal error: failed to compress file: crc mismatch' )
295+ else :
296+ raise Exception ("No checksum available" )
224297
225298 sys .stdout .write ("\033 [3G100.00% " )
226299 sys .stdout .flush ()
227300
228- def extract_file (self , src , dest , optional = True ):
301+ def extract_file (self , src , dest , optional = False ):
302+ src = self .path (src )
303+ self ._extract_file (src , dest , optional )
304+
305+ def _extract_file (self , src , dest , optional = False ):
306+ logging .info (f" { src } -> { dest } " )
229307 try :
230308 info = self .pkg .getinfo (src )
231309 with self .pkg .open (src ) as sfd , \
@@ -235,10 +313,12 @@ def extract_file(self, src, dest, optional=True):
235313 except KeyError :
236314 if not optional :
237315 raise
316+ logging .info (f" (SKIPPED)" )
238317 if self .verbose :
239318 self .flush_progress ()
240319
241320 def extract_tree (self , src , dest ):
321+ src = self .path (src )
242322 if src [- 1 ] != "/" :
243323 src += "/"
244324 logging .info (f" { src } * -> { dest } " )
@@ -264,7 +344,7 @@ def extract_tree(self, src, dest):
264344 os .unlink (destpath )
265345 os .symlink (link , destpath )
266346 else :
267- self .extract_file (name , destpath )
347+ self ._extract_file (name , destpath )
268348
269349 if self .verbose :
270350 self .flush_progress ()
0 commit comments