|
""" |
|
Custom model with relative import to demonstrate the bug. |
|
""" |
|
from transformers import PreTrainedModel |
|
from transformers.modeling_outputs import BaseModelOutput |
|
import torch |
|
import torch.nn as nn |
|
|
|
|
|
from .another_module import custom_function |
|
|
|
|
|
class CustomModel(PreTrainedModel): |
|
def __init__(self, config): |
|
super().__init__(config) |
|
self.embeddings = nn.Embedding(config.vocab_size, config.hidden_size) |
|
self.layer = nn.Linear(config.hidden_size, config.hidden_size) |
|
|
|
def forward(self, input_ids=None, **kwargs): |
|
embeddings = self.embeddings(input_ids) |
|
|
|
output = custom_function(embeddings) |
|
hidden_states = self.layer(output) |
|
|
|
return BaseModelOutput(last_hidden_state=hidden_states) |
|
|