Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from layers import BilinearUpSampling2D | |
| import matplotlib.pyplot as plt | |
| import numpy as np | |
| from huggingface_hub import from_pretrained_keras | |
| from einops import repeat | |
| custom_objects = {'BilinearUpSampling2D': BilinearUpSampling2D, 'depth_loss_function': None} | |
| print('Loading model...') | |
| model = from_pretrained_keras("keras-io/monocular-depth-estimation", custom_objects=custom_objects, compile=False) | |
| print('Successfully loaded model...') | |
| import importlib | |
| import utils | |
| importlib.reload(utils) | |
| def layer_over_image(raw_image, filter, custom_color = [0, 0, 0]): | |
| # print(raw_image[:, :, 0]) | |
| out_image = raw_image | |
| out_image[:,:,0] = raw_image[:, :, 0] * filter | |
| out_image[:,:,1] = raw_image[:, :, 1] * filter | |
| out_image[:,:,2] = raw_image[:, :, 2] * filter | |
| return raw_image | |
| def infer(image, min_th, max_th): | |
| # print('_'*20) | |
| inputs = utils.load_images([image]) | |
| outputs = utils.predict(model, inputs) | |
| plasma = plt.get_cmap('plasma') | |
| rescaled = outputs[0][:, :, 0] | |
| # print("Min Max Bef", np.min(rescaled), np.max(rescaled)) | |
| rescaled = rescaled - np.min(rescaled) | |
| rescaled = rescaled / np.max(rescaled) | |
| im_heat = plasma(rescaled)[:, :, :3] | |
| # print("Min Max Aft", np.min(rescaled), np.max(rescaled)) | |
| # print("Shape Scaled:",rescaled.shape) | |
| filt_base = rescaled | |
| filt_base = repeat(filt_base, "h w -> (h 2) (w 2)") | |
| filt_arr_min = (filt_base > min_th/100) | |
| filt_arr_max = (filt_base < max_th/100) | |
| filt_arr = filt_arr_min * filt_base * filt_arr_max | |
| im_heat_filt = plasma(filt_arr)[:, :, :3] | |
| if max_th < 100: | |
| image_emph = layer_over_image(image, filt_arr_max) | |
| else: | |
| image_emph = image | |
| if min_th > 0: | |
| image_emph = layer_over_image(image, filt_arr_min) | |
| else: | |
| image_emph = image | |
| # print("filt arr min", filt_arr_min) | |
| # print("Shape Image:",image.shape) | |
| # print("Shape Image filt:",im_heat_filt.shape) | |
| # print("Shape Image Heat:",im_heat.shape) | |
| return im_heat, image_emph | |
| # def detr(im): | |
| # return im | |
| gr_input = [ | |
| gr.inputs.Image(label="Image", type="numpy", shape=(640, 480)) | |
| ,gr.inputs.Slider(minimum=0, maximum=100, step=0.5, default=0, label="Minimum Threshold") | |
| ,gr.inputs.Slider(minimum=0, maximum=100, step=0.5, default=100, label="Maximum Threshold") | |
| ] | |
| gr_output = [ | |
| gr.outputs.Image(type="pil",label="Depth HeatMap"), | |
| # gr.outputs.Image(type="pil",label="Filtered Image"), | |
| # gr.outputs.Image(type="pil",label="Before"), | |
| gr.outputs.Image(type="pil",label="Important Areas") | |
| ] | |
| iface = gr.Interface( | |
| fn=infer, | |
| title="Monocular Depth Filter", | |
| description = "Used Keras Monocular Depth Estimation Model for estimating the depth of areas in an image. User defines a threshold and the app filters out the image to only show the certain areas", | |
| inputs = gr_input, | |
| outputs = gr_output, | |
| examples=[ | |
| ["examples/00015_colors.png", 7, 85] | |
| ,["examples/car.JPG", 0, 30] | |
| ,["examples/dash.jpg", 10, 57.5] | |
| ] | |
| ) | |
| iface.launch() |