Skip to content

Commit 563291e

Browse files
authored
Enforce all pyflake lint rules (#6033)
* Enforce F821 undefined-name * Enforce all pyflake lint rules
1 parent 6c0377f commit 563291e

File tree

9 files changed

+12
-18
lines changed

9 files changed

+12
-18
lines changed

api_server/routes/internal/internal_routes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ async def get_logs(request):
4040
return web.json_response("".join([(l["t"] + " - " + l["m"]) for l in app.logger.get_logs()]))
4141

4242
@self.routes.get('/logs/raw')
43-
async def get_logs(request):
43+
async def get_raw_logs(request):
4444
self.terminal_service.update_size()
4545
return web.json_response({
4646
"entries": list(app.logger.get_logs()),

comfy/controlnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -821,7 +821,7 @@ def load_t2i_adapter(t2i_data, model_options={}): #TODO: model_options
821821
for i in range(4):
822822
for j in range(2):
823823
prefix_replace["adapter.body.{}.resnets.{}.".format(i, j)] = "body.{}.".format(i * 2 + j)
824-
prefix_replace["adapter.body.{}.".format(i, j)] = "body.{}.".format(i * 2)
824+
prefix_replace["adapter.body.{}.".format(i, )] = "body.{}.".format(i * 2)
825825
prefix_replace["adapter."] = ""
826826
t2i_data = comfy.utils.state_dict_prefix_replace(t2i_data, prefix_replace)
827827
keys = t2i_data.keys()

comfy/ldm/modules/sub_quadratic_attention.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
from typing import Optional, NamedTuple, List
2323
from typing_extensions import Protocol
2424

25-
from torch import Tensor
2625
from typing import List
2726

2827
from comfy import model_management

comfy/patcher_extension.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def execute(self, *args, **kwargs):
113113
def _create_next_executor(self) -> 'WrapperExecutor':
114114
new_idx = self.idx + 1
115115
if new_idx > len(self.wrappers):
116-
raise Exception(f"Wrapper idx exceeded available wrappers; something went very wrong.")
116+
raise Exception("Wrapper idx exceeded available wrappers; something went very wrong.")
117117
if self.class_obj is None:
118118
return WrapperExecutor.new_executor(self.original, self.wrappers, new_idx)
119119
return WrapperExecutor.new_class_executor(self.original, self.class_obj, self.wrappers, new_idx)

comfy_extras/nodes_clip_sdxl.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,15 @@ class CLIPTextEncodeSDXL:
2222
@classmethod
2323
def INPUT_TYPES(s):
2424
return {"required": {
25+
"clip": ("CLIP", ),
2526
"width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}),
2627
"height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}),
2728
"crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}),
2829
"crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}),
2930
"target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}),
3031
"target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}),
31-
"text_g": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ),
32-
"text_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ),
32+
"text_g": ("STRING", {"multiline": True, "dynamicPrompts": True}),
33+
"text_l": ("STRING", {"multiline": True, "dynamicPrompts": True}),
3334
}}
3435
RETURN_TYPES = ("CONDITIONING",)
3536
FUNCTION = "encode"

execution.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -760,7 +760,7 @@ def validate_prompt(prompt):
760760
if 'class_type' not in prompt[x]:
761761
error = {
762762
"type": "invalid_prompt",
763-
"message": f"Cannot execute because a node is missing the class_type property.",
763+
"message": "Cannot execute because a node is missing the class_type property.",
764764
"details": f"Node ID '#{x}'",
765765
"extra_info": {}
766766
}

notebooks/comfyui_colab.ipynb

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -237,11 +237,7 @@
237237
"source": [
238238
"!npm install -g localtunnel\n",
239239
"\n",
240-
"import subprocess\n",
241240
"import threading\n",
242-
"import time\n",
243-
"import socket\n",
244-
"import urllib.request\n",
245241
"\n",
246242
"def iframe_thread(port):\n",
247243
" while True:\n",
@@ -288,8 +284,6 @@
288284
"outputs": [],
289285
"source": [
290286
"import threading\n",
291-
"import time\n",
292-
"import socket\n",
293287
"def iframe_thread(port):\n",
294288
" while True:\n",
295289
" time.sleep(0.5)\n",

ruff.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ lint.ignore = ["ALL"]
44
# Enable specific rules
55
lint.select = [
66
"S307", # suspicious-eval-usage
7-
"F401", # unused-import
8-
"F841", # unused-local-variable
9-
"F821", # undefined-name
7+
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
8+
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
9+
"F",
1010
]

server.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -584,7 +584,7 @@ async def get_history(request):
584584
return web.json_response(self.prompt_queue.get_history(max_items=max_items))
585585

586586
@routes.get("/history/{prompt_id}")
587-
async def get_history(request):
587+
async def get_history_prompt_id(request):
588588
prompt_id = request.match_info.get("prompt_id", None)
589589
return web.json_response(self.prompt_queue.get_history(prompt_id=prompt_id))
590590

@@ -831,7 +831,7 @@ def trigger_on_prompt(self, json_data):
831831
try:
832832
json_data = handler(json_data)
833833
except Exception:
834-
logging.warning(f"[ERROR] An error occurred during the on_prompt_handler processing")
834+
logging.warning("[ERROR] An error occurred during the on_prompt_handler processing")
835835
logging.warning(traceback.format_exc())
836836

837837
return json_data

0 commit comments

Comments
 (0)