File size: 979 Bytes
1ef0893
 
 
 
4346c95
1ef0893
 
4346c95
1ef0893
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import torch
import os # Import the os module for path manipulation

# Define the output file name
output_file = 'layer_names_renamed_final_output.txt'

# Pfad zu deiner .bin Datei
checkpoint_path = "./pytorch_model_renamed_final.bin"

# Check if the checkpoint file exists
if not os.path.exists(checkpoint_path):
    print(f"Error: Checkpoint file not found at {checkpoint_path}. Please update the path.")
else:
    # Open the output file in write mode ('w')
    with open(output_file, 'w') as f:
        # Checkpoint laden (State Dict)
        state_dict = torch.load(checkpoint_path, map_location="cpu")

        # Alle Layer-Namen in die Datei schreiben
        f.write("Alle Layer-Namen im Checkpoint:\n")
        for name in state_dict.keys():
            f.write(name + "\n")

        # Optional: Anzahl der Layer in die Datei schreiben
        f.write(f"\nInsgesamt {len(state_dict)} Layer im Checkpoint.\n")

    print(f"Layer names successfully saved to {output_file}")