Skip to content

Commit a5121e7

Browse files
committed
fixes for B007
1 parent 550256d commit a5121e7

28 files changed

Lines changed: 57 additions & 62 deletions

‎extensions-builtin/LDSR/ldsr_model_arch.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def run(model, selected_path, custom_steps, eta):
8888

8989
x_t = None
9090
logs = None
91-
for n in range(n_runs):
91+
for _ in range(n_runs):
9292
if custom_shape is not None:
9393
x_t = torch.randn(1, custom_shape[1], custom_shape[2], custom_shape[3]).to(model.device)
9494
x_t = repeat(x_t, '1 c h w -> b c h w', b=custom_shape[0])

‎extensions-builtin/Lora/lora.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -418,7 +418,7 @@ def infotext_pasted(infotext, params):
418418

419419
added = []
420420

421-
for k, v in params.items():
421+
for k in params:
422422
if not k.startswith("AddNet Model "):
423423
continue
424424

‎extensions-builtin/ScuNET/scripts/scunet_model.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def load_model(self, path: str):
132132
model = net(in_nc=3, config=[4, 4, 4, 4, 4, 4, 4], dim=64)
133133
model.load_state_dict(torch.load(filename), strict=True)
134134
model.eval()
135-
for k, v in model.named_parameters():
135+
for _, v in model.named_parameters():
136136
v.requires_grad = False
137137
model = model.to(device)
138138

‎extensions-builtin/SwinIR/swinir_model_arch.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -848,7 +848,7 @@ def flops(self):
848848
H, W = self.patches_resolution
849849
flops += H * W * 3 * self.embed_dim * 9
850850
flops += self.patch_embed.flops()
851-
for i, layer in enumerate(self.layers):
851+
for layer in self.layers:
852852
flops += layer.flops()
853853
flops += H * W * 3 * self.embed_dim * self.embed_dim
854854
flops += self.upsample.flops()

‎extensions-builtin/SwinIR/swinir_model_arch_v2.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1001,7 +1001,7 @@ def flops(self):
10011001
H, W = self.patches_resolution
10021002
flops += H * W * 3 * self.embed_dim * 9
10031003
flops += self.patch_embed.flops()
1004-
for i, layer in enumerate(self.layers):
1004+
for layer in self.layers:
10051005
flops += layer.flops()
10061006
flops += H * W * 3 * self.embed_dim * self.embed_dim
10071007
flops += self.upsample.flops()

‎modules/codeformer_model.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ def restore(self, np_image, w=None):
9494
self.face_helper.get_face_landmarks_5(only_center_face=False, resize=640, eye_dist_threshold=5)
9595
self.face_helper.align_warp_face()
9696

97-
for idx, cropped_face in enumerate(self.face_helper.cropped_faces):
97+
for cropped_face in self.face_helper.cropped_faces:
9898
cropped_face_t = img2tensor(cropped_face / 255., bgr2rgb=True, float32=True)
9999
normalize(cropped_face_t, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True)
100100
cropped_face_t = cropped_face_t.unsqueeze(0).to(devices.device_codeformer)

‎modules/esrgan_model.py‎

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,7 @@ def mod2normal(state_dict):
1616
# this code is copied from https://github.com/victorca25/iNNfer
1717
if 'conv_first.weight' in state_dict:
1818
crt_net = {}
19-
items = []
20-
for k, v in state_dict.items():
21-
items.append(k)
19+
items = list(state_dict)
2220

2321
crt_net['model.0.weight'] = state_dict['conv_first.weight']
2422
crt_net['model.0.bias'] = state_dict['conv_first.bias']
@@ -52,9 +50,7 @@ def resrgan2normal(state_dict, nb=23):
5250
if "conv_first.weight" in state_dict and "body.0.rdb1.conv1.weight" in state_dict:
5351
re8x = 0
5452
crt_net = {}
55-
items = []
56-
for k, v in state_dict.items():
57-
items.append(k)
53+
items = list(state_dict)
5854

5955
crt_net['model.0.weight'] = state_dict['conv_first.weight']
6056
crt_net['model.0.bias'] = state_dict['conv_first.bias']

‎modules/extra_networks.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def deactivate(p, extra_network_data):
9191
"""call deactivate for extra networks in extra_network_data in specified order, then call
9292
deactivate for all remaining registered networks"""
9393

94-
for extra_network_name, extra_network_args in extra_network_data.items():
94+
for extra_network_name in extra_network_data:
9595
extra_network = extra_network_registry.get(extra_network_name, None)
9696
if extra_network is None:
9797
continue

‎modules/generation_parameters_copypaste.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def parse_generation_parameters(x: str):
247247
lines.append(lastline)
248248
lastline = ''
249249

250-
for i, line in enumerate(lines):
250+
for line in lines:
251251
line = line.strip()
252252
if line.startswith("Negative prompt:"):
253253
done_with_prompt = True

‎modules/hypernetworks/hypernetwork.py‎

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -177,34 +177,34 @@ def __init__(self, name=None, enable_sizes=None, layer_structure=None, activatio
177177

178178
def weights(self):
179179
res = []
180-
for k, layers in self.layers.items():
180+
for layers in self.layers.values():
181181
for layer in layers:
182182
res += layer.parameters()
183183
return res
184184

185185
def train(self, mode=True):
186-
for k, layers in self.layers.items():
186+
for layers in self.layers.values():
187187
for layer in layers:
188188
layer.train(mode=mode)
189189
for param in layer.parameters():
190190
param.requires_grad = mode
191191

192192
def to(self, device):
193-
for k, layers in self.layers.items():
193+
for layers in self.layers.values():
194194
for layer in layers:
195195
layer.to(device)
196196

197197
return self
198198

199199
def set_multiplier(self, multiplier):
200-
for k, layers in self.layers.items():
200+
for layers in self.layers.values():
201201
for layer in layers:
202202
layer.multiplier = multiplier
203203

204204
return self
205205

206206
def eval(self):
207-
for k, layers in self.layers.items():
207+
for layers in self.layers.values():
208208
for layer in layers:
209209
layer.eval()
210210
for param in layer.parameters():
@@ -619,7 +619,7 @@ def train_hypernetwork(id_task, hypernetwork_name, learn_rate, batch_size, gradi
619619
try:
620620
sd_hijack_checkpoint.add()
621621

622-
for i in range((steps-initial_step) * gradient_step):
622+
for _ in range((steps-initial_step) * gradient_step):
623623
if scheduler.finished:
624624
break
625625
if shared.state.interrupted:

0 commit comments

Comments
 (0)