Spaces:
Running
Running
Commit
·
b684c32
1
Parent(s):
c7cb493
update
Browse files
app.py
CHANGED
|
@@ -10,11 +10,12 @@ observed_points = []
|
|
| 10 |
generated_curves = []
|
| 11 |
generated_configurations = []
|
| 12 |
history = {}
|
|
|
|
| 13 |
surrogate_model = FTPFN()
|
| 14 |
|
| 15 |
# Function to generate curves
|
| 16 |
def generate_curves(num_curves, max_length):
|
| 17 |
-
global generated_curves, generated_configurations, history
|
| 18 |
reset_optimization()
|
| 19 |
|
| 20 |
configurations, curves = sample_curves(num_hyperparameters=num_curves,
|
|
@@ -35,6 +36,8 @@ def generate_curves(num_curves, max_length):
|
|
| 35 |
|
| 36 |
generated_curves = torch.FloatTensor(curves)
|
| 37 |
generated_configurations = torch.FloatTensor(configurations)
|
|
|
|
|
|
|
| 38 |
for i in range(num_curves):
|
| 39 |
history[i] = 0
|
| 40 |
return fig, None, hyperparam_display
|
|
@@ -106,6 +109,9 @@ def bayesian_optimization_step(num_curves, max_length):
|
|
| 106 |
next_point = np.random.randint(0, num_curves)
|
| 107 |
else:
|
| 108 |
next_point = next_observed_point(history, generated_curves, generated_configurations)
|
|
|
|
|
|
|
|
|
|
| 109 |
|
| 110 |
if next_point in history:
|
| 111 |
history[next_point] += 1
|
|
@@ -120,22 +126,29 @@ def bayesian_optimization_step(num_curves, max_length):
|
|
| 120 |
# Plot the updated curves with uncertainty and observed points
|
| 121 |
fig, ax = plt.subplots()
|
| 122 |
|
| 123 |
-
for
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
ax.fill_between(x, q05[i], q95[i], alpha=0.3, color=color)
|
| 128 |
-
|
| 129 |
-
for curve_id, epoch in history.items():
|
| 130 |
color = generated_configurations[curve_id].numpy().tolist()
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
plt.xlim(0, len(curves[0]) + 1)
|
| 135 |
plt.ylim(0, 1)
|
| 136 |
-
plt.title(f"
|
| 137 |
plt.xlabel("t")
|
| 138 |
plt.ylabel("y")
|
|
|
|
| 139 |
return fig
|
| 140 |
|
| 141 |
# Reset function for Bayesian optimization
|
|
@@ -143,6 +156,7 @@ def reset_optimization():
|
|
| 143 |
global observed_points, generated_curves, generated_configurations, history
|
| 144 |
observed_points = []
|
| 145 |
history = {}
|
|
|
|
| 146 |
for i in range(len(generated_curves)):
|
| 147 |
history[i] = 0
|
| 148 |
return None
|
|
@@ -160,11 +174,12 @@ with gr.Blocks() as demo:
|
|
| 160 |
gr.HTML(scroll_script)
|
| 161 |
|
| 162 |
# Add a title
|
| 163 |
-
gr.Markdown("
|
| 164 |
gr.Markdown("Paper: [https://arxiv.org/pdf/2404.16795](https://arxiv.org/pdf/2404.16795)")
|
|
|
|
| 165 |
|
| 166 |
# First section for curve generation
|
| 167 |
-
gr.Markdown("
|
| 168 |
gr.Markdown("""
|
| 169 |
To generate' a set of synthetic curves (according to the proposed curve prior, see section 4.1), please specify the number of curves and the maximum length of each curve, then click 'Generate'.
|
| 170 |
""")
|
|
@@ -180,7 +195,7 @@ with gr.Blocks() as demo:
|
|
| 180 |
curve_plot = gr.Plot()
|
| 181 |
|
| 182 |
# Separate section for Bayesian optimization
|
| 183 |
-
gr.Markdown("
|
| 184 |
gr.Markdown("""
|
| 185 |
After generating the curves, click 'One Step with ifBO' to take an optimization step.
|
| 186 |
During this step, **ifBO** will select the next point to evaluate based on the previously observed points, using the MFPI-random acquisition function (see section 4.2).
|
|
|
|
| 10 |
generated_curves = []
|
| 11 |
generated_configurations = []
|
| 12 |
history = {}
|
| 13 |
+
rank = []
|
| 14 |
surrogate_model = FTPFN()
|
| 15 |
|
| 16 |
# Function to generate curves
|
| 17 |
def generate_curves(num_curves, max_length):
|
| 18 |
+
global generated_curves, generated_configurations, history, rank
|
| 19 |
reset_optimization()
|
| 20 |
|
| 21 |
configurations, curves = sample_curves(num_hyperparameters=num_curves,
|
|
|
|
| 36 |
|
| 37 |
generated_curves = torch.FloatTensor(curves)
|
| 38 |
generated_configurations = torch.FloatTensor(configurations)
|
| 39 |
+
rank = list(range(num_curves))
|
| 40 |
+
|
| 41 |
for i in range(num_curves):
|
| 42 |
history[i] = 0
|
| 43 |
return fig, None, hyperparam_display
|
|
|
|
| 109 |
next_point = np.random.randint(0, num_curves)
|
| 110 |
else:
|
| 111 |
next_point = next_observed_point(history, generated_curves, generated_configurations)
|
| 112 |
+
|
| 113 |
+
rank.remove(next_point)
|
| 114 |
+
rank.append(next_point)
|
| 115 |
|
| 116 |
if next_point in history:
|
| 117 |
history[next_point] += 1
|
|
|
|
| 126 |
# Plot the updated curves with uncertainty and observed points
|
| 127 |
fig, ax = plt.subplots()
|
| 128 |
|
| 129 |
+
for r in rank:
|
| 130 |
+
curve_id = r
|
| 131 |
+
epoch = history[curve_id]
|
| 132 |
+
curve = curves[curve_id]
|
|
|
|
|
|
|
|
|
|
| 133 |
color = generated_configurations[curve_id].numpy().tolist()
|
| 134 |
+
|
| 135 |
+
# Plot the full curve with reduced opacity
|
| 136 |
+
ax.plot(np.arange(1, len(curves[0]) + 1), curve, alpha=0.1, color=color)
|
| 137 |
+
|
| 138 |
+
# Plot the uncertainty region
|
| 139 |
+
x = np.arange(max(history[curve_id], 1), len(q05[curve_id]) + max(history[curve_id], 1))
|
| 140 |
+
ax.fill_between(x, q05[curve_id], q95[curve_id], alpha=0.3, color=color)
|
| 141 |
+
|
| 142 |
+
# Plot observed points and lines for the fully observed part of the curve
|
| 143 |
+
ax.plot(np.arange(1, epoch + 1), curve[:epoch], 'ro', color=color)
|
| 144 |
+
ax.plot(np.arange(1, epoch + 1), curve[:epoch], color=color)
|
| 145 |
+
|
| 146 |
plt.xlim(0, len(curves[0]) + 1)
|
| 147 |
plt.ylim(0, 1)
|
| 148 |
+
plt.title(f"Iteration {len(observed_points)}: Observing curve {next_point+1} at step {history[next_point]}")
|
| 149 |
plt.xlabel("t")
|
| 150 |
plt.ylabel("y")
|
| 151 |
+
|
| 152 |
return fig
|
| 153 |
|
| 154 |
# Reset function for Bayesian optimization
|
|
|
|
| 156 |
global observed_points, generated_curves, generated_configurations, history
|
| 157 |
observed_points = []
|
| 158 |
history = {}
|
| 159 |
+
rank = []
|
| 160 |
for i in range(len(generated_curves)):
|
| 161 |
history[i] = 0
|
| 162 |
return None
|
|
|
|
| 174 |
gr.HTML(scroll_script)
|
| 175 |
|
| 176 |
# Add a title
|
| 177 |
+
gr.Markdown("<h1 style=\"text-align: center;\">ifBO: In-context Freeze-Thaw Bayesian Optimization</h1>")
|
| 178 |
gr.Markdown("Paper: [https://arxiv.org/pdf/2404.16795](https://arxiv.org/pdf/2404.16795)")
|
| 179 |
+
gr.Markdown("Code: [http://github.com/automl/ifBO](http://github.com/automl/ifBO)")
|
| 180 |
|
| 181 |
# First section for curve generation
|
| 182 |
+
gr.Markdown("## Curve Generation")
|
| 183 |
gr.Markdown("""
|
| 184 |
To generate' a set of synthetic curves (according to the proposed curve prior, see section 4.1), please specify the number of curves and the maximum length of each curve, then click 'Generate'.
|
| 185 |
""")
|
|
|
|
| 195 |
curve_plot = gr.Plot()
|
| 196 |
|
| 197 |
# Separate section for Bayesian optimization
|
| 198 |
+
gr.Markdown("## Bayesian Optimization")
|
| 199 |
gr.Markdown("""
|
| 200 |
After generating the curves, click 'One Step with ifBO' to take an optimization step.
|
| 201 |
During this step, **ifBO** will select the next point to evaluate based on the previously observed points, using the MFPI-random acquisition function (see section 4.2).
|