-
Notifications
You must be signed in to change notification settings - Fork 2.9k
Expand file tree
/
Copy pathutils.py
More file actions
152 lines (123 loc) · 4.85 KB
/
utils.py
File metadata and controls
152 lines (123 loc) · 4.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import json
import re
import pprint
import requests
def update_contributors():
"""
Update contributors' task numbers and sort them based on contribution.
This function:
- Reads the support_model.md, support_model_amd.md, and support_model_Ascend.md to extract tasks
- Counts contribution points (2 for LoRA tasks, 1 for regular tasks)
- Adds special contributor points
- Sorts contributors by their contribution points
- Saves the updated data to JSON file
- Prints contributor information
Returns:
dict: Updated contributors information
"""
# Read files
with open('./support_model.md', 'r') as f:
readme = f.read()
with open('./support_model_amd.md', 'r') as f:
readme_amd = f.read()
with open('./support_model_Ascend.md', 'r') as f:
readme_ascend = f.read()
with open('./contributors.json', 'r') as f:
contributors = json.load(f)
# Reset task counts
keys = contributors.keys()
for key in keys:
contributors[key]['task_num'] = 0
# Extract tasks from main support_model.md
tasks = readme.split('\n')
tasks = [task for task in tasks if '@' in task][:-1]
# Extract tasks from AMD support_model_amd.md
tasks_amd = readme_amd.split('\n')
tasks_amd = [task for task in tasks_amd if '@' in task]
# Extract tasks from Ascend support_model_Ascend.md
tasks_ascend = readme_ascend.split('\n')
tasks_ascend = [task for task in tasks_ascend if '@' in task]
# Combine all tasks
all_tasks = tasks + tasks_amd + tasks_ascend
# Count points: LoRA tasks +2, regular tasks +1
for task in all_tasks:
if '@' not in task:
continue
name = task.split('@')[1]
if name not in keys:
continue
# Check if the task contains "Lora" or "微调" (case-insensitive)
if "Lora" in task or "微调" in task:
contributors[name]['task_num'] += 2
else:
contributors[name]['task_num'] += 1
# Add special contributor points
special_contributors = {
'不要葱姜蒜': 300,
'Logan Zou': 300,
'刘十一': 150,
}
for name, points in special_contributors.items():
if name in contributors:
contributors[name]['task_num'] += points
# Sort by contribution points
contributors = dict(sorted(contributors.items(), key=lambda x: x[1]['task_num'], reverse=True))
# Save results
with open('./contributors.json', 'w') as f:
json.dump(contributors, f, indent=4, ensure_ascii=False)
# Print results
for key, value in contributors.items():
print(f'- {value["info"]}')
return contributors
def calculate_docker_hours():
"""
Calculate and display Docker runtime hours from CodeWithGPU API.
Fetches data from Datawhale account, calculates total runtime hours,
and displays sorted information about each container.
"""
url = "https://www.codewithgpu.com/api/v1/image/home/Datawhale?page_index=1&page_size=100&username=Datawhale"
headers = {
"accept": "application/json, text/plain, */*",
"accept-language": "zh-CN,zh;q=0.9",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
}
total_hours = 0
docker_list = []
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()
if data['code'] == "Success":
item_list = data['data']['list']
for item in item_list:
uuid = item['uuid']
runtime_hour = item['runtime_hour']
if runtime_hour is not None:
total_hours += runtime_hour
docker_list.append({
"uuid": uuid.split("/")[-1],
"runtime_hour": runtime_hour
})
else:
print(f"Error: {data['message']}")
print(f"\n{'='*60}")
print(f"{' DOCKER RUNTIME SUMMARY ':^60}")
print(f"{'='*60}")
print(f"{'Total Containers:':<20} {len(docker_list):>10}")
print(f"{'Total Runtime:':<20} {total_hours:>10.1f} hours")
if docker_list:
print(f"{'='*60}")
print(f"{' DOCKER CONTAINERS (Sorted by Runtime) ':^60}")
print(f"{'='*60}")
print(f"{'Rank':<6} {'UUID':<35} {'Runtime (hours)':>15}")
print(f"{'-'*6}-{'-'*35}-{'-'*15}")
docker_list = sorted(docker_list, key=lambda x: x['runtime_hour'], reverse=True)
for i, item in enumerate(docker_list, 1):
print(f"{i:<6} {item['uuid']:<35} {item['runtime_hour']:>15.1f}")
print(f"{'='*60}")
else:
print("No Docker containers found.")
return docker_list, total_hours
# Usage example
if __name__ == "__main__":
update_contributors()
calculate_docker_hours()