Skip to content

Commit ba68e83

Browse files
authored
convert nodes_cond.py to V3 schema (#9719)
1 parent dcb8834 commit ba68e83

File tree

1 file changed

+48
-29
lines changed

1 file changed

+48
-29
lines changed

comfy_extras/nodes_cond.py

Lines changed: 48 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,25 @@
1+
from typing_extensions import override
12

3+
from comfy_api.latest import ComfyExtension, io
24

3-
class CLIPTextEncodeControlnet:
4-
@classmethod
5-
def INPUT_TYPES(s):
6-
return {"required": {"clip": ("CLIP", ), "conditioning": ("CONDITIONING", ), "text": ("STRING", {"multiline": True, "dynamicPrompts": True})}}
7-
RETURN_TYPES = ("CONDITIONING",)
8-
FUNCTION = "encode"
95

10-
CATEGORY = "_for_testing/conditioning"
6+
class CLIPTextEncodeControlnet(io.ComfyNode):
7+
@classmethod
8+
def define_schema(cls) -> io.Schema:
9+
return io.Schema(
10+
node_id="CLIPTextEncodeControlnet",
11+
category="_for_testing/conditioning",
12+
inputs=[
13+
io.Clip.Input("clip"),
14+
io.Conditioning.Input("conditioning"),
15+
io.String.Input("text", multiline=True, dynamic_prompts=True),
16+
],
17+
outputs=[io.Conditioning.Output()],
18+
is_experimental=True,
19+
)
1120

12-
def encode(self, clip, conditioning, text):
21+
@classmethod
22+
def execute(cls, clip, conditioning, text) -> io.NodeOutput:
1323
tokens = clip.tokenize(text)
1424
cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True)
1525
c = []
@@ -18,32 +28,41 @@ def encode(self, clip, conditioning, text):
1828
n[1]['cross_attn_controlnet'] = cond
1929
n[1]['pooled_output_controlnet'] = pooled
2030
c.append(n)
21-
return (c, )
31+
return io.NodeOutput(c)
32+
33+
class T5TokenizerOptions(io.ComfyNode):
34+
@classmethod
35+
def define_schema(cls) -> io.Schema:
36+
return io.Schema(
37+
node_id="T5TokenizerOptions",
38+
category="_for_testing/conditioning",
39+
inputs=[
40+
io.Clip.Input("clip"),
41+
io.Int.Input("min_padding", default=0, min=0, max=10000, step=1),
42+
io.Int.Input("min_length", default=0, min=0, max=10000, step=1),
43+
],
44+
outputs=[io.Clip.Output()],
45+
is_experimental=True,
46+
)
2247

23-
class T5TokenizerOptions:
2448
@classmethod
25-
def INPUT_TYPES(s):
26-
return {
27-
"required": {
28-
"clip": ("CLIP", ),
29-
"min_padding": ("INT", {"default": 0, "min": 0, "max": 10000, "step": 1}),
30-
"min_length": ("INT", {"default": 0, "min": 0, "max": 10000, "step": 1}),
31-
}
32-
}
33-
34-
CATEGORY = "_for_testing/conditioning"
35-
RETURN_TYPES = ("CLIP",)
36-
FUNCTION = "set_options"
37-
38-
def set_options(self, clip, min_padding, min_length):
49+
def execute(cls, clip, min_padding, min_length) -> io.NodeOutput:
3950
clip = clip.clone()
4051
for t5_type in ["t5xxl", "pile_t5xl", "t5base", "mt5xl", "umt5xxl"]:
4152
clip.set_tokenizer_option("{}_min_padding".format(t5_type), min_padding)
4253
clip.set_tokenizer_option("{}_min_length".format(t5_type), min_length)
4354

44-
return (clip, )
55+
return io.NodeOutput(clip)
56+
57+
58+
class CondExtension(ComfyExtension):
59+
@override
60+
async def get_node_list(self) -> list[type[io.ComfyNode]]:
61+
return [
62+
CLIPTextEncodeControlnet,
63+
T5TokenizerOptions,
64+
]
65+
4566

46-
NODE_CLASS_MAPPINGS = {
47-
"CLIPTextEncodeControlnet": CLIPTextEncodeControlnet,
48-
"T5TokenizerOptions": T5TokenizerOptions,
49-
}
67+
async def comfy_entrypoint() -> CondExtension:
68+
return CondExtension()

0 commit comments

Comments
 (0)