forked from A4P7J1N7M05OT/ComfyUI-PixelOE-Wrapper
-
Notifications
You must be signed in to change notification settings - Fork 0
/
nodes.py
103 lines (89 loc) · 2.99 KB
/
nodes.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import os
import cv2
import numpy as np
import torch
from .PixelOE.pixeloe import pixelize
script_directory = os.path.dirname(os.path.abspath(__file__))
class PixelOE:
def __init__(self):
pass
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"img": ("IMAGE",),
"mode": (["contrast", "center", "k-centroid", "bicubic", "nearest"],),
"target_size": ("INT", {
"default": 256,
"min": 0,
"max": 4096,
"step": 1,
"display": "number"
}),
"patch_size": ("INT", {
"default": 6,
"min": 0,
"max": 4096,
"step": 1,
"display": "number"
}),
"thickness": ("INT", {
"default": 1,
"min": 0,
"max": 4096,
"step": 1,
"display": "number"
}),
"color_matching": ("BOOLEAN", {
"default": False
}),
"contrast": ("FLOAT", {
"default": 1.0,
"min": 0.0,
"max": 10.0,
"step": 0.01,
"round": 0.001,
"display": "number"
}),
"saturation": ("FLOAT", {
"default": 1.0,
"min": 0.0,
"max": 10.0,
"step": 0.01,
"round": 0.001,
"display": "number"
}),
"colors": ("INT", {
"default": 256,
"min": 1,
"max": 256,
"step": 1,
"display": "number"
}),
"no_upscale": ("BOOLEAN", {
"default": False
}),
"no_downscale": ("BOOLEAN", {
"default": False
}),
},
}
RETURN_TYPES = ("IMAGE",)
RETURN_NAMES = ("image",)
FUNCTION = "process"
CATEGORY = "Pixelize"
def process(self, img, mode, target_size, patch_size, thickness, color_matching, contrast, saturation, colors, no_upscale, no_downscale):
img = img.squeeze().numpy()
img = (img * 255).astype(np.uint8)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
img_pix = pixelize(img, mode, target_size, patch_size, thickness, color_matching, contrast, saturation, colors, no_upscale, no_downscale)
img_pix = cv2.cvtColor(img_pix, cv2.COLOR_BGR2RGB)
img_pix_t = np.array(img_pix).astype(np.float32) / 255.0
img_pix_t = torch.from_numpy(img_pix_t)[None,]
return (img_pix_t,)
NODE_CLASS_MAPPINGS = {
"PixelOE": PixelOE,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"PixelOE": "PixelOE",
}