-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathstandardize_tags.py
More file actions
147 lines (120 loc) · 3.74 KB
/
standardize_tags.py
File metadata and controls
147 lines (120 loc) · 3.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
#!/usr/bin/env python3
"""
Script to standardize tags in TOML files across the tasks directory.
Standardization rules:
Round 1 (completed):
- string -> strings
- array -> collections
- arrays -> collections
- alog -> algo (typo fix)
- bits_operation -> bits-operation
- bits -> bits-operation
- hash_map -> hash-maps
- hashmap -> hash-maps
- algorithms -> algo
Round 2 (current):
- graph -> graphs
- graphs -> graphs (keep as-is)
- map -> hash_maps
- hash-maps -> hash_maps
- dict -> hash_maps
- set -> sets
- sets -> sets (keep as-is)
- date -> date_time
- date-time -> date_time
- prefix-scan -> prefix_scan
- two-pointers -> two_pointers
- bits-operation -> bits
- linear-scan -> linear_scan
- sliding_window -> sliding_window (keep as-is, already has underscore)
"""
import os
import re
from pathlib import Path
# Tag replacement mapping
TAG_REPLACEMENTS = {
# Round 1 (already applied)
"string": "strings",
"array": "collections",
"arrays": "collections",
"alog": "algo", # Typo fix
"algorithms": "algo",
# Round 2 (new changes)
"graph": "graphs",
"map": "hash_maps",
"hash-maps": "hash_maps",
"dict": "hash_maps",
"set": "sets",
"date": "date_time",
"date-time": "date_time",
"prefix-scan": "prefix_scan",
"two-pointers": "two_pointers",
"bits-operation": "bits",
"linear-scan": "linear_scan",
}
def standardize_tags_in_line(line):
"""Standardize tags in a single line containing a tags declaration."""
if not line.strip().startswith("tags"):
return line
# Extract the tags array
match = re.search(r"tags\s*=\s*\[(.*?)\]", line)
if not match:
return line
tags_content = match.group(1)
# Parse individual tags
tags = []
for tag in re.findall(r'"([^"]+)"', tags_content):
# Replace tag if it's in our mapping
standardized_tag = TAG_REPLACEMENTS.get(tag, tag)
tags.append(standardized_tag)
# Remove duplicates while preserving order
seen = set()
unique_tags = []
for tag in tags:
if tag not in seen:
seen.add(tag)
unique_tags.append(tag)
# Reconstruct the line with standardized tags
tags_str = ", ".join(f'"{tag}"' for tag in unique_tags)
# Preserve the original indentation
indent_match = re.match(r"^(\s*)", line)
indent = indent_match.group(1) if indent_match else ""
return f"{indent}tags = [{tags_str}]\n"
def process_toml_file(file_path):
"""Process a single TOML file to standardize its tags."""
try:
with open(file_path, "r", encoding="utf-8") as f:
lines = f.readlines()
modified = False
new_lines = []
for line in lines:
new_line = standardize_tags_in_line(line)
new_lines.append(new_line)
if new_line != line:
modified = True
if modified:
with open(file_path, "w", encoding="utf-8") as f:
f.writelines(new_lines)
return True
return False
except Exception as e:
print(f"Error processing {file_path}: {e}")
return False
def main():
"""Find and process all TOML files in the tasks directory."""
tasks_dir = Path("tasks")
if not tasks_dir.exists():
print(f"Error: {tasks_dir} directory not found")
return
# Find all TOML files
toml_files = list(tasks_dir.rglob("*.toml"))
print(f"Found {len(toml_files)} TOML files")
print("Processing files...")
modified_count = 0
for toml_file in toml_files:
if process_toml_file(toml_file):
modified_count += 1
print(f" Modified: {toml_file}")
print(f"\nDone! Modified {modified_count} out of {len(toml_files)} files")
if __name__ == "__main__":
main()