|
|
|
@ -73,11 +73,21 @@ class StableDiffusionModelHijack:
|
|
|
|
name = os.path.splitext(filename)[0]
|
|
|
|
name = os.path.splitext(filename)[0]
|
|
|
|
|
|
|
|
|
|
|
|
data = torch.load(path)
|
|
|
|
data = torch.load(path)
|
|
|
|
param_dict = data['string_to_param']
|
|
|
|
|
|
|
|
if hasattr(param_dict, '_parameters'):
|
|
|
|
# textual inversion embeddings
|
|
|
|
param_dict = getattr(param_dict, '_parameters') # fix for torch 1.12.1 loading saved file from torch 1.11
|
|
|
|
if 'string_to_param' in data:
|
|
|
|
assert len(param_dict) == 1, 'embedding file has multiple terms in it'
|
|
|
|
param_dict = data['string_to_param']
|
|
|
|
emb = next(iter(param_dict.items()))[1]
|
|
|
|
if hasattr(param_dict, '_parameters'):
|
|
|
|
|
|
|
|
param_dict = getattr(param_dict, '_parameters') # fix for torch 1.12.1 loading saved file from torch 1.11
|
|
|
|
|
|
|
|
assert len(param_dict) == 1, 'embedding file has multiple terms in it'
|
|
|
|
|
|
|
|
emb = next(iter(param_dict.items()))[1]
|
|
|
|
|
|
|
|
elif type(data) == dict and type(next(iter(data.values()))) == torch.Tensor:
|
|
|
|
|
|
|
|
assert len(data.keys()) == 1, 'embedding file has multiple terms in it'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
emb = next(iter(data.values()))
|
|
|
|
|
|
|
|
if len(emb.shape) == 1:
|
|
|
|
|
|
|
|
emb = emb.unsqueeze(0)
|
|
|
|
|
|
|
|
|
|
|
|
self.word_embeddings[name] = emb.detach()
|
|
|
|
self.word_embeddings[name] = emb.detach()
|
|
|
|
self.word_embeddings_checksums[name] = f'{const_hash(emb.reshape(-1))&0xffff:04x}'
|
|
|
|
self.word_embeddings_checksums[name] = f'{const_hash(emb.reshape(-1))&0xffff:04x}'
|
|
|
|
|
|
|
|
|
|
|
|
|