@@ -3910,10 +3910,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
39103910 return first_two + headerdata
39113911
39123912
3913- def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
3913+ def ReadFileHeaderDataWithContent(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
39143914 if(not hasattr(fp, "read")):
39153915 return False
39163916 delimiter = formatspecs['format_delimiter']
3917+ fheaderstart = fp.tell()
39173918 if(__use_new_style__):
39183919 HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
39193920 else:
@@ -3927,15 +3928,41 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
39273928 fcs = HeaderOut[-2].lower()
39283929 fccs = HeaderOut[-1].lower()
39293930 fsize = int(HeaderOut[7], 16)
3930- fcompression = HeaderOut[14]
3931- fcsize = int(HeaderOut[15], 16)
3932- fseeknextfile = HeaderOut[26]
3933- fjsontype = HeaderOut[27]
3934- fjsonlen = int(HeaderOut[28], 16)
3935- fjsonsize = int(HeaderOut[29], 16)
3936- fjsonchecksumtype = HeaderOut[30]
3937- fjsonchecksum = HeaderOut[31]
3938- fjsoncontent = {}
3931+ fcompression = HeaderOut[17]
3932+ fcsize = int(HeaderOut[18], 16)
3933+ fseeknextfile = HeaderOut[28]
3934+ fjsontype = HeaderOut[29]
3935+ fjsonlen = int(HeaderOut[30], 16)
3936+ fjsonsize = int(HeaderOut[31], 16)
3937+ fjsonchecksumtype = HeaderOut[32]
3938+ fjsonchecksum = HeaderOut[33]
3939+ fextrasize = int(HeaderOut[34], 16)
3940+ fextrafields = int(HeaderOut[35], 16)
3941+ fextrafieldslist = []
3942+ extrastart = 36
3943+ extraend = extrastart + fextrafields
3944+ while(extrastart < extraend):
3945+ fextrafieldslist.append(HeaderOut[extrastart])
3946+ extrastart = extrastart + 1
3947+ fvendorfieldslist = []
3948+ fvendorfields = 0;
3949+ if((len(HeaderOut) - 4)>extraend):
3950+ extrastart = extraend
3951+ extraend = len(HeaderOut) - 4
3952+ while(extrastart < extraend):
3953+ fvendorfieldslist.append(HeaderOut[extrastart])
3954+ extrastart = extrastart + 1
3955+ fvendorfields = fvendorfields + 1
3956+ if(fextrafields==1):
3957+ try:
3958+ fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
3959+ fextrafields = len(fextrafieldslist)
3960+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
3961+ try:
3962+ fextrafieldslist = json.loads(fextrafieldslist[0])
3963+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
3964+ pass
3965+ fjstart = fp.tell()
39393966 if(fjsontype=="json"):
39403967 fjsoncontent = {}
39413968 fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
@@ -4002,31 +4029,37 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
40024029 except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
40034030 pass
40044031 fp.seek(len(delimiter), 1)
4032+ fjend = fp.tell() - 1
40054033 jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
40064034 if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
40074035 VerbosePrintOut("File JSON Data Checksum Error with file " +
40084036 fname + " at offset " + str(fheaderstart))
40094037 VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
40104038 return False
4011- fp.seek(len(delimiter), 1)
4039+ fcs = HeaderOut[-2].lower()
4040+ fccs = HeaderOut[-1].lower()
40124041 newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4013- HeaderOut.append(fjsoncontent)
40144042 if(fcs != newfcs and not skipchecksum):
40154043 VerbosePrintOut("File Header Checksum Error with file " +
40164044 fname + " at offset " + str(fheaderstart))
40174045 VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
40184046 return False
4047+ fhend = fp.tell() - 1
4048+ fcontentstart = fp.tell()
40194049 fcontents = MkTempFile()
4050+ pyhascontents = False
40204051 if(fsize > 0 and not listonly):
40214052 if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
40224053 fcontents.write(fp.read(fsize))
40234054 else:
40244055 fcontents.write(fp.read(fcsize))
4056+ pyhascontents = True
40254057 elif(fsize > 0 and listonly):
40264058 if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
40274059 fp.seek(fsize, 1)
40284060 else:
40294061 fp.seek(fcsize, 1)
4062+ pyhascontents = False
40304063 fcontents.seek(0, 0)
40314064 newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
40324065 fcontents.seek(0, 0)
@@ -4040,12 +4073,15 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
40404073 else:
40414074 fcontents.seek(0, 0)
40424075 if(uncompress):
4043- cfcontents = UncompressFileAlt(fcontents, formatspecs)
4076+ cfcontents = UncompressFileAlt(
4077+ fcontents, formatspecs)
40444078 cfcontents.seek(0, 0)
40454079 fcontents = MkTempFile()
40464080 shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
40474081 cfcontents.close()
40484082 fcontents.seek(0, 0)
4083+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4084+ fcontentend = fp.tell()
40494085 if(re.findall("^\\+([0-9]+)", fseeknextfile)):
40504086 fseeknextasnum = int(fseeknextfile.replace("+", ""))
40514087 if(abs(fseeknextasnum) == 0):
@@ -4063,6 +4099,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
40634099 fp.seek(fseeknextasnum, 0)
40644100 else:
40654101 return False
4102+ fcontents.seek(0, 0)
4103+ if(not contentasfile):
4104+ fcontents = fcontents.read()
40664105 HeaderOut.append(fcontents)
40674106 return HeaderOut
40684107
@@ -4511,7 +4550,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
45114550 return outlist
45124551
45134552
4514- def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4553+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
45154554 if(not hasattr(fp, "read")):
45164555 return False
45174556 delimiter = formatspecs['format_delimiter']
@@ -4549,8 +4588,8 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
45494588 "'" + newfcs + "'")
45504589 return False
45514590 fnumfiles = int(inheader[8], 16)
4552- outfseeknextfile = inheaderdata [9]
4553- fjsonsize = int(inheaderdata [12], 16)
4591+ outfseeknextfile = inheader [9]
4592+ fjsonsize = int(inheader [12], 16)
45544593 fjsonchecksumtype = inheader[13]
45554594 fjsonchecksum = inheader[14]
45564595 fp.read(fjsonsize)
@@ -4575,7 +4614,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
45754614 countnum = 0
45764615 flist = []
45774616 while(countnum < fnumfiles):
4578- HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
4617+ HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
45794618 if(len(HeaderOut) == 0):
45804619 break
45814620 flist.append(HeaderOut)
0 commit comments