Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import cv2 | |
| import numpy as np | |
| # Function to compare two images and highlight differences | |
| def compare_images(mockup, ui_screenshot, check_text, check_color, check_spacing): | |
| # Convert images to numpy arrays | |
| mockup_array = np.array(mockup) | |
| ui_screenshot_array = np.array(ui_screenshot) | |
| # Resize images to the same dimensions | |
| if mockup_array.shape != ui_screenshot_array.shape: | |
| height, width = max(mockup_array.shape[0], ui_screenshot_array.shape[0]), max(mockup_array.shape[1], ui_screenshot_array.shape[1]) | |
| mockup_array = cv2.resize(mockup_array, (width, height)) | |
| ui_screenshot_array = cv2.resize(ui_screenshot_array, (width, height)) | |
| # Convert images to RGB | |
| mockup_rgb = cv2.cvtColor(mockup_array, cv2.COLOR_RGB2BGR) | |
| ui_screenshot_rgb = cv2.cvtColor(ui_screenshot_array, cv2.COLOR_RGB2BGR) | |
| # Compute the absolute difference between the two images | |
| difference = cv2.absdiff(mockup_rgb, ui_screenshot_rgb) | |
| # Threshold the difference image to get a binary image | |
| _, thresh = cv2.threshold(cv2.cvtColor(difference, cv2.COLOR_BGR2GRAY), 30, 255, cv2.THRESH_BINARY) | |
| # Find contours of the differences | |
| contours, _ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) | |
| # Create a copy of the UI screenshot to draw the differences | |
| highlighted_image = ui_screenshot_array.copy() | |
| # Define colors for different checks | |
| colors = { | |
| "text": (255, 0, 0), # Blue for text | |
| "color": (0, 255, 0), # Green for color | |
| "spacing": (0, 0, 255) # Red for spacing | |
| } | |
| # Draw contours on the highlighted image with different colors based on the checks | |
| for contour in contours: | |
| # Calculate the bounding box for the contour | |
| x, y, w, h = cv2.boundingRect(contour) | |
| if check_text: | |
| cv2.rectangle(highlighted_image, (x, y), (x + w, y + h), colors["text"], 2) | |
| if check_color: | |
| cv2.rectangle(highlighted_image, (x, y), (x + w, y + h), colors["color"], 2) | |
| if check_spacing: | |
| cv2.rectangle(highlighted_image, (x, y), (x + w, y + h), colors["spacing"], 2) | |
| return highlighted_image | |
| # Create the Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Spot The Difference Game") | |
| gr.Markdown("Upload a mockup and a UI screenshot to find the differences!") | |
| gr.Markdown("Important! Please ensure that you upload screenshots with the same resolutions for accurate results.") | |
| with gr.Row(): | |
| mockup = gr.Image(label="Mockup Image") | |
| ui_screenshot = gr.Image(label="UI Screenshot") | |
| with gr.Row(): | |
| check_text = gr.Checkbox(label="Check Text", value=True) | |
| check_color = gr.Checkbox(label="Check Color", value=True) | |
| check_spacing = gr.Checkbox(label="Check Spacing", value=True) | |
| with gr.Row(): | |
| compare_button = gr.Button("Compare Images") | |
| highlighted_image = gr.Image(label="Highlighted Differences") | |
| # Define the event listener for the compare button | |
| compare_button.click( | |
| fn=compare_images, | |
| inputs=[mockup, ui_screenshot, check_text, check_color, check_spacing], | |
| outputs=highlighted_image | |
| ) | |
| # Launch the Gradio app | |
| demo.launch(show_error=True) |