|
73 | 73 | except ImportError: |
74 | 74 | import json |
75 | 75 |
|
| 76 | +testyaml = False |
| 77 | +try: |
| 78 | + import oyaml as yaml |
| 79 | + testyaml = True |
| 80 | +except ImportError: |
| 81 | + try: |
| 82 | + import yaml |
| 83 | + testyaml = True |
| 84 | + except ImportError: |
| 85 | + testyaml = False |
| 86 | + |
76 | 87 | try: |
77 | 88 | import configparser |
78 | 89 | except ImportError: |
@@ -2620,7 +2631,7 @@ def _load_all_members_spooled(self): |
2620 | 2631 | scanned_leading = 0 # for tolerant header scan |
2621 | 2632 |
|
2622 | 2633 | while True: |
2623 | | - data = self.file.read(1 << 20) # 1 MiB blocks |
| 2634 | + data = self.file.read(__filebuff_size__) # 1 MiB blocks |
2624 | 2635 | if not data: |
2625 | 2636 | if d is not None: |
2626 | 2637 | self._spool.write(d.flush()) |
@@ -2778,7 +2789,7 @@ def write(self, data): |
2778 | 2789 |
|
2779 | 2790 | # Buffer and compress in chunks to limit memory |
2780 | 2791 | self._write_buf += data |
2781 | | - if len(self._write_buf) >= (1 << 20): # 1 MiB threshold |
| 2792 | + if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold |
2782 | 2793 | chunk = self._compressor.compress(bytes(self._write_buf)) |
2783 | 2794 | if chunk: |
2784 | 2795 | self.file.write(chunk) |
@@ -3083,7 +3094,7 @@ def _load_all_members_spooled(self): |
3083 | 3094 |
|
3084 | 3095 | self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold) |
3085 | 3096 |
|
3086 | | - CHUNK = 1 << 20 |
| 3097 | + CHUNK = __filebuff_size__ |
3087 | 3098 | pending = b"" |
3088 | 3099 | d = None |
3089 | 3100 | absolute_offset = 0 |
@@ -3246,7 +3257,7 @@ def write(self, data): |
3246 | 3257 |
|
3247 | 3258 | # Stage and compress in chunks |
3248 | 3259 | self._write_buf += data |
3249 | | - if len(self._write_buf) >= (1 << 20): # 1 MiB threshold |
| 3260 | + if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold |
3250 | 3261 | out = self._compressor.compress(bytes(self._write_buf)) |
3251 | 3262 | if out: |
3252 | 3263 | self.file.write(out) |
@@ -3699,7 +3710,7 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__ |
3699 | 3710 | if CheckSumSupport(algo_key, hashlib_guaranteed): |
3700 | 3711 | h = hashlib.new(algo_key) |
3701 | 3712 | while True: |
3702 | | - chunk = inbytes.read(1 << 20) |
| 3713 | + chunk = inbytes.read(__filebuff_size__) |
3703 | 3714 | if not chunk: |
3704 | 3715 | break |
3705 | 3716 | if not isinstance(chunk, (bytes, bytearray, memoryview)): |
@@ -4151,6 +4162,28 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck |
4151 | 4162 | fprejsoncontent = "" |
4152 | 4163 | fjsonrawcontent = fprejsoncontent |
4153 | 4164 | fjsoncontent = {} |
| 4165 | + elif(testyaml and fjsontype == "yaml"): |
| 4166 | + fjsoncontent = {} |
| 4167 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4168 | + if (fjsonsize > 0): |
| 4169 | + try: |
| 4170 | + # try base64 → utf-8 → YAML |
| 4171 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4172 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4173 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4174 | + try: |
| 4175 | + # fall back to treating the bytes as plain text YAML |
| 4176 | + fjsonrawcontent = fprejsoncontent |
| 4177 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4178 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4179 | + # final fallback: empty |
| 4180 | + fprejsoncontent = "" |
| 4181 | + fjsonrawcontent = fprejsoncontent |
| 4182 | + fjsoncontent = {} |
| 4183 | + else: |
| 4184 | + fprejsoncontent = "" |
| 4185 | + fjsonrawcontent = fprejsoncontent |
| 4186 | + fjsoncontent = {} |
4154 | 4187 | elif(fjsontype=="list"): |
4155 | 4188 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4156 | 4189 | flisttmp = MkTempFile() |
@@ -4324,6 +4357,28 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, |
4324 | 4357 | fprejsoncontent = "" |
4325 | 4358 | fjsonrawcontent = fprejsoncontent |
4326 | 4359 | fjsoncontent = {} |
| 4360 | + elif(testyaml and fjsontype == "yaml"): |
| 4361 | + fjsoncontent = {} |
| 4362 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4363 | + if (fjsonsize > 0): |
| 4364 | + try: |
| 4365 | + # try base64 → utf-8 → YAML |
| 4366 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4367 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4368 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4369 | + try: |
| 4370 | + # fall back to treating the bytes as plain text YAML |
| 4371 | + fjsonrawcontent = fprejsoncontent |
| 4372 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4373 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4374 | + # final fallback: empty |
| 4375 | + fprejsoncontent = "" |
| 4376 | + fjsonrawcontent = fprejsoncontent |
| 4377 | + fjsoncontent = {} |
| 4378 | + else: |
| 4379 | + fprejsoncontent = "" |
| 4380 | + fjsonrawcontent = fprejsoncontent |
| 4381 | + fjsoncontent = {} |
4327 | 4382 | elif(fjsontype=="list"): |
4328 | 4383 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4329 | 4384 | flisttmp = MkTempFile() |
@@ -4510,6 +4565,28 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, |
4510 | 4565 | fprejsoncontent = "" |
4511 | 4566 | fjsonrawcontent = fprejsoncontent |
4512 | 4567 | fjsoncontent = {} |
| 4568 | + elif(testyaml and fjsontype == "yaml"): |
| 4569 | + fjsoncontent = {} |
| 4570 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4571 | + if (fjsonsize > 0): |
| 4572 | + try: |
| 4573 | + # try base64 → utf-8 → YAML |
| 4574 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4575 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4576 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4577 | + try: |
| 4578 | + # fall back to treating the bytes as plain text YAML |
| 4579 | + fjsonrawcontent = fprejsoncontent |
| 4580 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4581 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4582 | + # final fallback: empty |
| 4583 | + fprejsoncontent = "" |
| 4584 | + fjsonrawcontent = fprejsoncontent |
| 4585 | + fjsoncontent = {} |
| 4586 | + else: |
| 4587 | + fprejsoncontent = "" |
| 4588 | + fjsonrawcontent = fprejsoncontent |
| 4589 | + fjsoncontent = {} |
4513 | 4590 | elif(fjsontype=="list"): |
4514 | 4591 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4515 | 4592 | flisttmp = MkTempFile() |
@@ -8766,7 +8843,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw): |
8766 | 8843 |
|
8767 | 8844 | # ========= copy helpers ========= |
8768 | 8845 |
|
8769 | | -def fast_copy(infp, outfp, bufsize=1 << 20): |
| 8846 | +def fast_copy(infp, outfp, bufsize=__filebuff_size__): |
8770 | 8847 | """ |
8771 | 8848 | Efficient copy from any readable file-like to any writable file-like. |
8772 | 8849 | Uses readinto() when available to avoid extra allocations. |
@@ -8810,7 +8887,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__): |
8810 | 8887 | shutil.copyfileobj(fp, outfp, length=chunk_size) |
8811 | 8888 |
|
8812 | 8889 |
|
8813 | | -def copy_opaque(src, dst, bufsize=1 << 20, grow_step=64 << 20): |
| 8890 | +def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20): |
8814 | 8891 | """ |
8815 | 8892 | Copy opaque bytes from 'src' (any readable file-like) to 'dst' |
8816 | 8893 | (your mmap-backed FileLikeAdapter or any writable file-like). |
|
0 commit comments