-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdelete_x.py
More file actions
97 lines (86 loc) · 3.08 KB
/
delete_x.py
File metadata and controls
97 lines (86 loc) · 3.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
import argparse
import sys
def parse_args():
parser = argparse.ArgumentParser(
description="Delete all files and/or batches from the API."
)
parser.add_argument(
"--base-url",
type=str,
default="http://localhost:5000/v1",
help="Base URL for the API (default: %(default)s)",
)
parser.add_argument(
"--files",
action="store_true",
help="Delete all files",
)
parser.add_argument(
"--batches",
action="store_true",
help="Delete all batches",
)
parser.add_argument(
"--threads",
type=int,
default=5,
help="Number of threads for file deletion (default: %(default)s)",
)
return parser.parse_args()
def delete_file(file_id, base_url):
"""Delete a single file by ID"""
try:
response = requests.delete(f"{base_url}/files/{file_id}")
response.raise_for_status() # Raise an exception for bad status codes
print(f"✅ Deleted file {file_id}")
return True, file_id
except requests.exceptions.RequestException as e:
print(f"❌ Failed to delete file {file_id}: {e}")
return False, file_id
def delete_all_files(base_url, threads):
# Get all files
response = requests.get(f"{base_url}/files")
uploaded_file_ids = [file['id'] for file in response.json()['data']]
print(f"✅ Found {len(uploaded_file_ids)} files")
if uploaded_file_ids:
successful_deletions = 0
failed_deletions = 0
with ThreadPoolExecutor(max_workers=threads) as executor:
# Submit all deletion tasks
future_to_file_id = {
executor.submit(delete_file, file_id, base_url): file_id
for file_id in uploaded_file_ids
}
# Process completed tasks
for future in as_completed(future_to_file_id):
success, file_id = future.result()
if success:
successful_deletions += 1
else:
failed_deletions += 1
print(f"\n📊 Deletion Summary:")
print(f" ✅ Successfully deleted: {successful_deletions} files")
print(f" ❌ Failed to delete: {failed_deletions} files")
else:
print("No files to delete")
def delete_all_batches(base_url):
while True:
response = requests.get(f"{base_url}/batches")
batches = response.json()['data']
print(f"✅ Found {len(batches)} batches")
if len(batches) == 0:
break
for batch in batches:
response = requests.delete(f"{base_url}/batches/{batch['id']}")
print(f"✅ Deleted batch {batch['id']}")
if __name__ == "__main__":
args = parse_args()
if not args.files and not args.batches:
print("Nothing to do. Use --files and/or --batches to specify what to delete.")
sys.exit(0)
if args.files:
delete_all_files(args.base_url, args.threads)
if args.batches:
delete_all_batches(args.base_url)