@@ -8036,6 +8036,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
80368036 fp.close()
80378037 return True
80388038
8039+ def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8040+ return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8041+
8042+ def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8043+ return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8044+
8045+ def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8046+ return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8047+
8048+ def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
8049+ if(not hasattr(fp, "write")):
8050+ return False
8051+ GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
8052+ numfiles = int(len(GetDirList))
8053+ fnumfiles = format(numfiles, 'x').lower()
8054+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
8055+ try:
8056+ fp.flush()
8057+ if(hasattr(os, "sync")):
8058+ os.fsync(fp.fileno())
8059+ except (io.UnsupportedOperation, AttributeError, OSError):
8060+ pass
8061+ for curfname in GetDirList:
8062+ tmpoutlist = curfname['fheaders']
8063+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
8064+ try:
8065+ fp.flush()
8066+ if(hasattr(os, "sync")):
8067+ os.fsync(fp.fileno())
8068+ except (io.UnsupportedOperation, AttributeError, OSError):
8069+ pass
8070+ return fp
8071+
8072+ def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8073+ if(IsNestedDict(formatspecs) and fmttype=="auto" and
8074+ (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8075+ get_in_ext = os.path.splitext(outfile)
8076+ tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
8077+ if(tmpfmt is None and get_in_ext[1]!=""):
8078+ get_in_ext = os.path.splitext(get_in_ext[0])
8079+ tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
8080+ if(tmpfmt is None):
8081+ fmttype = __file_format_default__
8082+ formatspecs = formatspecs[fmttype]
8083+ else:
8084+ fmttype = tmpfmt
8085+ formatspecs = formatspecs[tmpfmt]
8086+ elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
8087+ formatspecs = formatspecs[fmttype]
8088+ elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
8089+ fmttype = __file_format_default__
8090+ formatspecs = formatspecs[fmttype]
8091+ if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
8092+ outfile = RemoveWindowsPath(outfile)
8093+ if(os.path.exists(outfile)):
8094+ try:
8095+ os.unlink(outfile)
8096+ except OSError:
8097+ pass
8098+ if(outfile == "-" or outfile is None):
8099+ verbose = False
8100+ fp = MkTempFile()
8101+ elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8102+ fp = outfile
8103+ elif(re.findall(__upload_proto_support__, outfile)):
8104+ fp = MkTempFile()
8105+ else:
8106+ fbasename = os.path.splitext(outfile)[0]
8107+ fextname = os.path.splitext(outfile)[1]
8108+ if(not compresswholefile and fextname in outextlistwd):
8109+ compresswholefile = True
8110+ try:
8111+ fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8112+ except PermissionError:
8113+ return False
8114+ AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
8115+ if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8116+ fp = CompressOpenFileAlt(
8117+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8118+ try:
8119+ fp.flush()
8120+ if(hasattr(os, "sync")):
8121+ os.fsync(fp.fileno())
8122+ except (io.UnsupportedOperation, AttributeError, OSError):
8123+ pass
8124+ if(outfile == "-"):
8125+ fp.seek(0, 0)
8126+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8127+ elif(outfile is None):
8128+ fp.seek(0, 0)
8129+ outvar = fp.read()
8130+ fp.close()
8131+ return outvar
8132+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8133+ fp = CompressOpenFileAlt(
8134+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8135+ fp.seek(0, 0)
8136+ upload_file_to_internet_file(fp, outfile)
8137+ if(returnfp):
8138+ fp.seek(0, 0)
8139+ return fp
8140+ else:
8141+ fp.close()
8142+ return True
8143+
8144+ def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8145+ if not isinstance(infiles, list):
8146+ infiles = [infiles]
8147+ returnout = False
8148+ for infileslist in infiles:
8149+ returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
8150+ if(not returnout):
8151+ break
8152+ else:
8153+ outfile = returnout
8154+ if(not returnfp and returnout):
8155+ returnout.close()
8156+ return True
8157+ return returnout
8158+
80398159def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
80408160 if(IsNestedDict(formatspecs) and fmttype=="auto" and
80418161 (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
0 commit comments