mahesh1209's picture
Update app.py
36ecdd3 verified
import gradio as gr
import numpy as np
import matplotlib.pyplot as plt
def optimize_and_plot(lr=0.01, steps=50):
def convex_fn(x): return x**2
def nonconvex_fn(x): return x**4 - 3*x**3 + 2
def grad_convex(x): return 2*x
def grad_nonconvex(x): return 4*x**3 - 9*x**2
def gradient_descent(f, grad_f, x0):
x_vals, y_vals = [x0], [f(x0)]
x = x0
for _ in range(steps):
x -= lr * grad_f(x)
x_vals.append(x)
y_vals.append(f(x))
return x_vals, y_vals
x_c, y_c = gradient_descent(convex_fn, grad_convex, x0=5)
x_nc, y_nc = gradient_descent(nonconvex_fn, grad_nonconvex, x0=2)
fig, axs = plt.subplots(1, 2, figsize=(10, 4))
x = np.linspace(-1, 6, 100)
axs[0].plot(x, convex_fn(x), label='Convex Function')
axs[0].scatter(x_c, y_c, c='red', s=10, label='Descent Path')
axs[0].set_title('Convex Optimization')
axs[0].legend()
axs[1].plot(x, nonconvex_fn(x), label='Non-Convex Function')
axs[1].scatter(x_nc, y_nc, c='purple', s=10, label='Descent Path')
axs[1].set_title('Non-Convex Optimization')
axs[1].legend()
plt.tight_layout()
return fig
demo = gr.Interface(fn=optimize_and_plot,
inputs=[gr.Slider(0.001, 0.1, value=0.01, label="Learning Rate"),
gr.Slider(10, 100, value=50, label="Steps")],
outputs=gr.Plot(),
title="Convex vs Non-Convex Optimization",
description="Visualize gradient descent on convex and non-convex functions.")
demo.launch()