Tim77777767
commited on
Commit
·
4346c95
1
Parent(s):
1ef0893
Anpassungen der binchanger/checker und bin angepasst
Browse files- binchanger.py +39 -5
- binchecker.py +2 -2
- pytorch_model.bin +2 -2
binchanger.py
CHANGED
@@ -3,27 +3,61 @@ import os
|
|
3 |
|
4 |
# --- Configuration ---
|
5 |
# Pfad zur Eingabe-BIN-Datei
|
6 |
-
input_checkpoint_path = "./
|
7 |
# Pfad zur Ausgabe-BIN-Datei (wo die geänderte Version gespeichert wird)
|
8 |
-
output_checkpoint_path = "./
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# --- Check, ob die Eingabedatei existiert ---
|
11 |
if not os.path.exists(input_checkpoint_path):
|
12 |
-
print(f"Fehler: Eingabedatei nicht gefunden unter {input_checkpoint_path}. Bitte den Pfad korrigieren.")
|
13 |
else:
|
14 |
# --- Checkpoint laden ---
|
15 |
state_dict = torch.load(input_checkpoint_path, map_location="cpu")
|
16 |
|
17 |
# --- Layer-Namen ändern und neues State Dict erstellen ---
|
18 |
new_state_dict = {}
|
|
|
|
|
|
|
|
|
19 |
for old_key, value in state_dict.items():
|
20 |
-
if old_key
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
new_key = old_key.replace('decode_head.', 'segformer_head.', 1)
|
22 |
new_state_dict[new_key] = value
|
|
|
23 |
else:
|
|
|
24 |
new_state_dict[old_key] = value
|
|
|
25 |
|
26 |
# --- Geändertes State Dict speichern ---
|
27 |
torch.save(new_state_dict, output_checkpoint_path)
|
28 |
|
29 |
-
print(f"Fertig! Die umbenannte Datei wurde gespeichert unter: {output_checkpoint_path}
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
# --- Configuration ---
|
5 |
# Pfad zur Eingabe-BIN-Datei
|
6 |
+
input_checkpoint_path = "./pytorch_model_oldLayerNames.bin"
|
7 |
# Pfad zur Ausgabe-BIN-Datei (wo die geänderte Version gespeichert wird)
|
8 |
+
output_checkpoint_path = "./pytorch_model_renamed_final.bin"
|
9 |
+
|
10 |
+
# Define the layers that *must* be named 'segmentation_head'
|
11 |
+
# These are the specific layers you identified
|
12 |
+
specific_segmentation_head_layers = [
|
13 |
+
'decode_head.conv_seg.bias',
|
14 |
+
'decode_head.conv_seg.weight',
|
15 |
+
'decode_head.convs.0.conv.bias',
|
16 |
+
'decode_head.convs.0.conv.weight',
|
17 |
+
'decode_head.convs.1.conv.bias',
|
18 |
+
'decode_head.convs.1.conv.weight',
|
19 |
+
'decode_head.convs.2.conv.bias',
|
20 |
+
'decode_head.convs.2.conv.weight',
|
21 |
+
'decode_head.convs.3.conv.bias',
|
22 |
+
'decode_head.convs.3.conv.weight',
|
23 |
+
'decode_head.fusion_conv.conv.bias',
|
24 |
+
'decode_head.fusion_conv.conv.weight'
|
25 |
+
]
|
26 |
|
27 |
# --- Check, ob die Eingabedatei existiert ---
|
28 |
if not os.path.exists(input_checkpoint_path):
|
29 |
+
print(f"Fehler: Eingabedatei nicht gefunden unter {input_checkpoint_path}. Bitte den Pfad korrigieren. ❌")
|
30 |
else:
|
31 |
# --- Checkpoint laden ---
|
32 |
state_dict = torch.load(input_checkpoint_path, map_location="cpu")
|
33 |
|
34 |
# --- Layer-Namen ändern und neues State Dict erstellen ---
|
35 |
new_state_dict = {}
|
36 |
+
renamed_count_segmentation = 0
|
37 |
+
renamed_count_segformer = 0
|
38 |
+
skipped_count = 0
|
39 |
+
|
40 |
for old_key, value in state_dict.items():
|
41 |
+
if old_key in specific_segmentation_head_layers:
|
42 |
+
# These specific layers get 'segmentation_head.' prefix
|
43 |
+
new_key = old_key.replace('decode_head.', 'segmentation_head.', 1)
|
44 |
+
new_state_dict[new_key] = value
|
45 |
+
renamed_count_segmentation += 1
|
46 |
+
elif old_key.startswith('decode_head.'):
|
47 |
+
# All other layers starting with 'decode_head.' get 'segformer_head.' prefix
|
48 |
new_key = old_key.replace('decode_head.', 'segformer_head.', 1)
|
49 |
new_state_dict[new_key] = value
|
50 |
+
renamed_count_segformer += 1
|
51 |
else:
|
52 |
+
# Keep other layers as they are (e.g., backbone layers)
|
53 |
new_state_dict[old_key] = value
|
54 |
+
skipped_count += 1
|
55 |
|
56 |
# --- Geändertes State Dict speichern ---
|
57 |
torch.save(new_state_dict, output_checkpoint_path)
|
58 |
|
59 |
+
print(f"✅ Fertig! Die umbenannte Datei wurde gespeichert unter: {output_checkpoint_path}")
|
60 |
+
print(f"Zusammenfassung der Umbenennungen:")
|
61 |
+
print(f" - '{renamed_count_segmentation}' Layer von 'decode_head.' zu 'segmentation_head.' umbenannt.")
|
62 |
+
print(f" - '{renamed_count_segformer}' Layer von 'decode_head.' zu 'segformer_head.' umbenannt.")
|
63 |
+
print(f" - '{skipped_count}' Layer behielten ihren ursprünglichen Namen (z.B. Backbone).")
|
binchecker.py
CHANGED
@@ -2,10 +2,10 @@ import torch
|
|
2 |
import os # Import the os module for path manipulation
|
3 |
|
4 |
# Define the output file name
|
5 |
-
output_file = '
|
6 |
|
7 |
# Pfad zu deiner .bin Datei
|
8 |
-
checkpoint_path = "./
|
9 |
|
10 |
# Check if the checkpoint file exists
|
11 |
if not os.path.exists(checkpoint_path):
|
|
|
2 |
import os # Import the os module for path manipulation
|
3 |
|
4 |
# Define the output file name
|
5 |
+
output_file = 'layer_names_renamed_final_output.txt'
|
6 |
|
7 |
# Pfad zu deiner .bin Datei
|
8 |
+
checkpoint_path = "./pytorch_model_renamed_final.bin"
|
9 |
|
10 |
# Check if the checkpoint file exists
|
11 |
if not os.path.exists(checkpoint_path):
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ba1746d460b19f3e96cbb256a30fe2c947c609d0144f18895baf3beaa40ab7a
|
3 |
+
size 328240287
|