rename logging from textual inversion to not confuse it with global logging module

This commit is contained in:
AUTOMATIC1111 2024-04-21 07:41:28 +03:00
parent d74fc56fa5
commit 9bcfb92a00
3 changed files with 3 additions and 3 deletions

View File

@ -11,7 +11,7 @@ import tqdm
from einops import rearrange, repeat
from ldm.util import default
from modules import devices, sd_models, shared, sd_samplers, hashes, sd_hijack_checkpoint, errors
from modules.textual_inversion import textual_inversion, logging
from modules.textual_inversion import textual_inversion, saving_settings
from modules.textual_inversion.learn_schedule import LearnRateScheduler
from torch import einsum
from torch.nn.init import normal_, xavier_normal_, xavier_uniform_, kaiming_normal_, kaiming_uniform_, zeros_
@ -533,7 +533,7 @@ def train_hypernetwork(id_task, hypernetwork_name: str, learn_rate: float, batch
model_name=checkpoint.model_name, model_hash=checkpoint.shorthash, num_of_dataset_images=len(ds),
**{field: getattr(hypernetwork, field) for field in ['layer_structure', 'activation_func', 'weight_init', 'add_layer_norm', 'use_dropout', ]}
)
logging.save_settings_to_file(log_directory, {**saved_params, **locals()})
saving_settings.save_settings_to_file(log_directory, {**saved_params, **locals()})
latent_sampling_method = ds.latent_sampling_method

View File

@ -17,7 +17,7 @@ import modules.textual_inversion.dataset
from modules.textual_inversion.learn_schedule import LearnRateScheduler
from modules.textual_inversion.image_embedding import embedding_to_b64, embedding_from_b64, insert_image_data_embed, extract_image_data_embed, caption_image_overlay
from modules.textual_inversion.logging import save_settings_to_file
from modules.textual_inversion.saving_settings import save_settings_to_file
TextualInversionTemplate = namedtuple("TextualInversionTemplate", ["name", "path"])