bokeh_3.5_medium / bokeh_turboX.json
sunhaha123's picture
Update bokeh_turboX.json
7489cf3 verified
{
"last_node_id": 71,
"last_link_id": 274,
"nodes": [
{
"id": 62,
"type": "Note",
"pos": {
"0": -101,
"1": 107,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 304.94696044921875,
"1": 114.46440887451172
},
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Do not enter overly complex prompt words, which will cause serious degradation of image performance,you can use emotional and atmospheric cues to improve the picture quality"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 59,
"type": "Note",
"pos": {
"0": 491,
"1": 201,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 249.3325653076172,
"1": 104.7717514038086
},
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"1920x1024 1728x1152 1152x1728 1280x1664 1440x1440"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 30,
"type": "CLIPTextEncodeSD3",
"pos": {
"0": 478,
"1": -323,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 258.8465881347656,
"1": 200
},
"flags": {
"collapsed": false
},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 258
},
{
"name": "clip_g",
"type": "STRING",
"link": 222,
"widget": {
"name": "clip_g"
}
},
{
"name": "clip_l",
"type": "STRING",
"link": 223,
"widget": {
"name": "clip_l"
}
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
236
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncodeSD3"
},
"widgets_values": [
"ugly,bad hand, cartoon,3d,anime,render,noise,chaos,pollution,(blurred background:0.5)",
"ugly,bad hand, cartoon,3d,anime,render,noise,chaos,pollution,(blurred background:0.5)",
"",
"none",
true,
true,
true
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": {
"0": 480,
"1": 33,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 266.8973388671875,
"1": 116.21234893798828
},
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
243
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [
1280,
1664,
1
]
},
{
"id": 32,
"type": "CLIPTextEncodeSD3",
"pos": {
"0": 475,
"1": -544,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 262.4820556640625,
"1": 190
},
"flags": {
"collapsed": false
},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 257
},
{
"name": "clip_g",
"type": "STRING",
"link": 122,
"widget": {
"name": "clip_g"
}
},
{
"name": "clip_l",
"type": "STRING",
"link": 123,
"widget": {
"name": "clip_l"
}
},
{
"name": "t5xxl",
"type": "STRING",
"link": 152,
"widget": {
"name": "t5xxl"
}
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
237
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncodeSD3"
},
"widgets_values": [
" In the foreground, there is a ceramic figurine of a meditating person in a light blue-green color, sitting on a dark base. Next to it is a clear glass vase with water, containing a single white flower and some green leaves. The vase is tied with a white ribbon.Fashion photography style.\n\n",
" In the foreground, there is a ceramic figurine of a meditating person in a light blue-green color, sitting on a dark base. Next to it is a clear glass vase with water, containing a single white flower and some green leaves. The vase is tied with a white ribbon.Fashion photography style.\n\n",
" In the foreground, there is a ceramic figurine of a meditating person in a light blue-green color, sitting on a dark base. Next to it is a clear glass vase with water, containing a single white flower and some green leaves. The vase is tied with a white ribbon.Fashion photography style.\n\n",
"empty_prompt",
true,
true,
true
]
},
{
"id": 70,
"type": "ModelSamplingSD3",
"pos": {
"0": 447,
"1": -715,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 315,
"1": 58
},
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 271
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
273
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3"
},
"widgets_values": [
5
]
},
{
"id": 10,
"type": "TripleCLIPLoader",
"pos": {
"0": -98,
"1": -265,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 521.9664916992188,
"1": 120.35124206542969
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
257,
258
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "TripleCLIPLoader"
},
"widgets_values": [
"bokeh_clip_g.safetensors",
"bokeh_clip_l.safetensors",
"t5xxl_fp16.safetensors"
]
},
{
"id": 13,
"type": "CheckpointLoaderSimple",
"pos": {
"0": -95,
"1": -414,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 510.9742431640625,
"1": 107.2224349975586
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
271
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
23
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"bokeh_8steps_final.safetensors"
]
},
{
"id": 18,
"type": "VAEDecode",
"pos": {
"0": 1091,
"1": -413,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 200.854736328125,
"1": 50.05826187133789
},
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 22
},
{
"name": "vae",
"type": "VAE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
274
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 71,
"type": "PreviewImage",
"pos": {
"0": 1332,
"1": -504,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 373.8262634277344,
"1": 537.5194702148438
},
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 274
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
}
},
{
"id": 12,
"type": "KSampler",
"pos": {
"0": 771,
"1": -418,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 279.5604553222656,
"1": 258
},
"flags": {
"collapsed": false
},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 273
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 237
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 236
},
{
"name": "latent_image",
"type": "LATENT",
"link": 243
},
{
"name": "seed",
"type": "INT",
"link": 21,
"widget": {
"name": "seed"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
22
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
127330995621170,
"randomize",
10,
1.8,
"euler_ancestral",
"sgm_uniform",
1
]
},
{
"id": 52,
"type": "PrimitiveNode",
"pos": {
"0": 218,
"1": -94,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 210,
"1": 151.54025268554688
},
"flags": {
"collapsed": false
},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"links": [
222,
223
],
"slot_index": 0,
"widget": {
"name": "clip_g"
}
}
],
"title": "Negative_prompt",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
"ugly,bad hand, cartoon,3d,anime,render,noise,chaos,pollution,(blurred background:0.5)"
]
},
{
"id": 20,
"type": "PrimitiveNode",
"pos": {
"0": -102,
"1": -94,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 306.11773681640625,
"1": 152.6992950439453
},
"flags": {
"collapsed": false
},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"links": [
122,
123,
152
],
"slot_index": 0,
"widget": {
"name": "clip_g"
}
}
],
"title": "Positive_prompt",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
" In the foreground, there is a ceramic figurine of a meditating person in a light blue-green color, sitting on a dark base. Next to it is a clear glass vase with water, containing a single white flower and some green leaves. The vase is tied with a white ribbon.Fashion photography style.\n\n"
]
},
{
"id": 17,
"type": "PrimitiveNode",
"pos": {
"0": 504,
"1": -74,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
"9": 0
},
"size": {
"0": 263.0819396972656,
"1": 82
},
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
21
],
"slot_index": 0,
"widget": {
"name": "seed"
}
}
],
"title": "seed",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
127330995621170,
"randomize"
]
}
],
"links": [
[
21,
17,
0,
12,
4,
"INT"
],
[
22,
12,
0,
18,
0,
"LATENT"
],
[
23,
13,
2,
18,
1,
"VAE"
],
[
122,
20,
0,
32,
1,
"STRING"
],
[
123,
20,
0,
32,
2,
"STRING"
],
[
152,
20,
0,
32,
3,
"STRING"
],
[
222,
52,
0,
30,
1,
"STRING"
],
[
223,
52,
0,
30,
2,
"STRING"
],
[
236,
30,
0,
12,
2,
"CONDITIONING"
],
[
237,
32,
0,
12,
1,
"CONDITIONING"
],
[
243,
5,
0,
12,
3,
"LATENT"
],
[
257,
10,
0,
32,
0,
"CLIP"
],
[
258,
10,
0,
30,
0,
"CLIP"
],
[
271,
13,
0,
70,
0,
"MODEL"
],
[
273,
70,
0,
12,
0,
"MODEL"
],
[
274,
18,
0,
71,
0,
"IMAGE"
]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.2839025177495067,
"offset": [
-230.23742339362215,
580.7003743952966
]
}
},
"version": 0.4
}