Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -50,22 +50,7 @@ MAX_SEED = np.iinfo(np.int32).max
|
|
| 50 |
TMP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tmp')
|
| 51 |
os.makedirs(TMP_DIR, exist_ok=True)
|
| 52 |
|
| 53 |
-
controlnet = ControlNetModel.from_pretrained(
|
| 54 |
-
"xinsir/controlnet-scribble-sdxl-1.0",
|
| 55 |
-
torch_dtype=torch.float16
|
| 56 |
-
)
|
| 57 |
-
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
| 58 |
-
|
| 59 |
-
pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
|
| 60 |
-
"sd-community/sdxl-flash",
|
| 61 |
-
controlnet=controlnet,
|
| 62 |
-
vae=vae,
|
| 63 |
-
torch_dtype=torch.float16,
|
| 64 |
-
# scheduler=eulera_scheduler,
|
| 65 |
-
)
|
| 66 |
-
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 67 |
|
| 68 |
-
pipe.to('cuda')
|
| 69 |
|
| 70 |
def start_session(req: gr.Request):
|
| 71 |
user_dir = os.path.join(TMP_DIR, str(req.session_hash))
|
|
@@ -78,7 +63,7 @@ def end_session(req: gr.Request):
|
|
| 78 |
|
| 79 |
|
| 80 |
def preprocess_image(image: Image.Image,
|
| 81 |
-
|
| 82 |
negative_prompt: str = "",
|
| 83 |
num_steps: int = 25,
|
| 84 |
guidance_scale: float = 5,
|
|
@@ -92,21 +77,23 @@ def preprocess_image(image: Image.Image,
|
|
| 92 |
Returns:
|
| 93 |
Image.Image: The preprocessed image.
|
| 94 |
"""
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
|
|
|
|
|
|
| 110 |
processed_image = pipeline.preprocess_image(image)
|
| 111 |
return processed_image
|
| 112 |
|
|
@@ -481,8 +468,26 @@ with gr.Blocks(delete_cache=(600, 600), js=js_func) as demo:
|
|
| 481 |
if __name__ == "__main__":
|
| 482 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
| 483 |
pipeline.cuda()
|
| 484 |
-
|
| 485 |
-
|
| 486 |
-
|
| 487 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 488 |
demo.launch()
|
|
|
|
| 50 |
TMP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tmp')
|
| 51 |
os.makedirs(TMP_DIR, exist_ok=True)
|
| 52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
|
|
|
|
| 54 |
|
| 55 |
def start_session(req: gr.Request):
|
| 56 |
user_dir = os.path.join(TMP_DIR, str(req.session_hash))
|
|
|
|
| 63 |
|
| 64 |
|
| 65 |
def preprocess_image(image: Image.Image,
|
| 66 |
+
prompt: str,
|
| 67 |
negative_prompt: str = "",
|
| 68 |
num_steps: int = 25,
|
| 69 |
guidance_scale: float = 5,
|
|
|
|
| 77 |
Returns:
|
| 78 |
Image.Image: The preprocessed image.
|
| 79 |
"""
|
| 80 |
+
if prompt is not None:
|
| 81 |
+
width, height = image['composite'].size
|
| 82 |
+
ratio = np.sqrt(1024. * 1024. / (width * height))
|
| 83 |
+
new_width, new_height = int(width * ratio), int(height * ratio)
|
| 84 |
+
image = image['composite'].resize((new_width, new_height))
|
| 85 |
+
|
| 86 |
+
image = pipe(
|
| 87 |
+
prompt=prompt,
|
| 88 |
+
negative_prompt=negative_prompt,
|
| 89 |
+
image=image,
|
| 90 |
+
num_inference_steps=num_steps,
|
| 91 |
+
controlnet_conditioning_scale=controlnet_conditioning_scale,
|
| 92 |
+
guidance_scale=guidance_scale,
|
| 93 |
+
width=new_width,
|
| 94 |
+
height=new_height).images[0]
|
| 95 |
+
print(type(image))
|
| 96 |
+
|
| 97 |
processed_image = pipeline.preprocess_image(image)
|
| 98 |
return processed_image
|
| 99 |
|
|
|
|
| 468 |
if __name__ == "__main__":
|
| 469 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
| 470 |
pipeline.cuda()
|
| 471 |
+
|
| 472 |
+
#scribble controlnet
|
| 473 |
+
controlnet = ControlNetModel.from_pretrained(
|
| 474 |
+
"xinsir/controlnet-scribble-sdxl-1.0",
|
| 475 |
+
torch_dtype=torch.float16
|
| 476 |
+
)
|
| 477 |
+
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
| 478 |
+
|
| 479 |
+
pipe_control = StableDiffusionXLControlNetPipeline.from_pretrained(
|
| 480 |
+
"sd-community/sdxl-flash",
|
| 481 |
+
controlnet=controlnet,
|
| 482 |
+
vae=vae,
|
| 483 |
+
torch_dtype=torch.float16,
|
| 484 |
+
# scheduler=eulera_scheduler,
|
| 485 |
+
)
|
| 486 |
+
pipe_control.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe_control.scheduler.config)
|
| 487 |
+
pipe_control.cuda()
|
| 488 |
+
|
| 489 |
+
# try:
|
| 490 |
+
# pipeline.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
| 491 |
+
# except:
|
| 492 |
+
# pass
|
| 493 |
demo.launch()
|