-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfix_unicode_json.py
More file actions
116 lines (91 loc) · 3.93 KB
/
fix_unicode_json.py
File metadata and controls
116 lines (91 loc) · 3.93 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python3
"""
JSON Stream Fixer & Recovery Tool
This script recovers valid JSON objects from a malformed file (e.g., multiple top-level
objects, trailing commas, or BOM artifacts) and saves them as a standard JSON array.
Arguments and Options:
Positional Arguments:
input_file Path to the malformed JSON file.
Optional Arguments:
--output-file, -o Path for the fixed output. (Default: '<input>_fixed.json')
--indent, -i Number of spaces for indentation. (Default: 2)
--quiet, -q Suppress periodic progress messages.
Usage Examples:
Basic recovery:
$ python fix_unicode_json.py broken_data.json
Specify output and compact format:
$ python fix_unicode_json.py broken_data.json -o recovered.json --indent 0
"""
import json
import argparse
import time
import sys
from pathlib import Path
class JSONFixer:
def __init__(self, input_path, output_path=None, indent=2, quiet=False):
self.input_path = Path(input_path)
self.output_path = Path(output_path) if output_path else self.input_path.with_name(f"{self.input_path.stem}_fixed.json")
self.indent = indent
self.quiet = quiet
def fix(self):
if not self.input_path.exists():
print(f"❌ Error: File not found: {self.input_path}")
return False
print(f"🚀 Starting JSON fix: {self.input_path.name}")
start_time = time.time()
decoder = json.JSONDecoder()
objects = []
try:
# utf-8-sig handles files with or without BOM automatically
with self.input_path.open(encoding="utf-8-sig") as f:
text = f.read()
idx = 0
text_len = len(text)
count = 0
while idx < text_len:
# Remove leading whitespace/newlines from the remaining slice
text = text[idx:].lstrip()
if not text:
break
try:
# Attempt to extract the next valid JSON object
obj, idx_next = decoder.raw_decode(text)
objects.append(obj)
count += 1
if not self.quiet and count % 100 == 0:
print(f"⏳ Parsed {count} objects...")
idx = idx_next
except json.JSONDecodeError:
# If decoding fails, skip ahead to the next comma
comma_idx = text.find(',')
if comma_idx == -1:
break
idx = comma_idx + 1
# Save the recovered objects as a proper JSON array
with self.output_path.open("w", encoding="utf-8") as f:
json.dump(objects, f, ensure_ascii=False, indent=self.indent)
end_time = time.time()
print(f"✅ Done! Recovered {len(objects)} objects.")
print(f"🕒 Total time: {end_time - start_time:.2f} seconds")
print(f"📄 Output file: {self.output_path}")
return True
except Exception as e:
print(f"❌ Unexpected error: {e}")
return False
def main():
parser = argparse.ArgumentParser(description="Recover valid JSON objects from malformed files.")
parser.add_argument("input_file", help="Path to the input JSON file")
parser.add_argument("--output-file", "-o", help="Path for the output file")
parser.add_argument("--indent", "-i", type=int, default=2, help="Indentation for output JSON")
parser.add_argument("--quiet", "-q", action="store_true", help="Minimize console output")
args = parser.parse_args()
fixer = JSONFixer(
input_path=args.input_file,
output_path=args.output_file,
indent=args.indent,
quiet=args.quiet
)
if not fixer.fix():
sys.exit(1)
if __name__ == "__main__":
main()