File size: 1,976 Bytes
03ae676 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import argparse
import torch
import os
from models.exaonepath import EXAONEPathV1p5Downstream
from utils.constants import CLASS_NAMES
from tokens import HF_TOKEN
def infer(model, input_file):
print("Processing", input_file, "...")
probs = model(input_file)
result_str = "Result -- " + " / ".join(
[f"{name}: {probs[i].item():.4f}" for i, name in enumerate(CLASS_NAMES)]
)
print(result_str + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Inference")
parser.add_argument('--svs_path', type=str, default='./samples/wsis/1/1.svs', help="Path to the .svs file")
parser.add_argument('--svs_dir', type=str, default='./samples_CRC', help="")
args = parser.parse_args()
hf_token = HF_TOKEN
# model = EXAONEPathV1p5Downstream.from_pretrained("LGAI-EXAONE/EXAONE-Path-1.5", use_auth_token=hf_token)
model = EXAONEPathV1p5Downstream(num_sampled_patch=16384)
# qwe = torch.load('./pytorch_model_ori.bin')
# aaa = model.load_state_dict(qwe, strict=False)
# hw_w = torch.load('/mnt/shared/shared_medical/shared/hi.choi/MOM/logs_eval_25/closebench2/ours/BIOMARKER_SMC_SMC/CRCSensor/CRCSensor_exaone_mom3_MOM_batch_8_lr0.00003_wd0.1_do0.1/_s100/s_0_checkpoint.pt', map_location='cpu')
# new_state_dict = {}
# for k, v in hw_w.items():
# if k.startswith("_orig_mod."):
# new_k = k.replace("_orig_mod.", "agg_model.", 1)
# else:
# new_k = k
# new_state_dict[new_k] = v
# load_result = model.load_state_dict(new_state_dict, strict=False)
model.load_state_dict(torch.load('./pytorch_model.bin'))
model.to(torch.device("cuda" if torch.cuda.is_available() else "cpu"))
model.eval()
model.feature_extractor = torch.compile(model.feature_extractor)
model.agg_model = torch.compile(model.agg_model)
for svs_name in os.listdir(args.svs_dir):
infer(model, os.path.join(args.svs_dir, svs_name))
|