MachineLearningAlgorithms / data /Lasso_Regression.json
deedrop1140's picture
Upload 41 files
0d00d62 verified
{
"title": "Lasso Regression Mastery: 100 MCQs",
"description": "A comprehensive set of 100 multiple-choice questions designed to teach and test your understanding of Lasso Regression, from basic linear regression concepts to advanced feature selection, sparsity, regularization, and real-world scenarios.",
"questions": [
{
"id": 1,
"questionText": "What is the main goal of Lasso Regression?",
"options": [
"To predict categories",
"To find a straight-line relationship and perform feature selection",
"To compress the data",
"To find clusters in data"
],
"correctAnswerIndex": 1,
"explanation": "Lasso Regression adds an L1 penalty, which can shrink some coefficients to exactly zero, performing feature selection while modeling relationships."
},
{
"id": 2,
"questionText": "Lasso Regression differs from Ridge Regression because it:",
"options": [
"Uses L1 penalty instead of L2",
"Does not require standardization",
"Cannot handle multicollinearity",
"Always increases training error"
],
"correctAnswerIndex": 0,
"explanation": "Lasso uses L1 regularization, which can shrink some coefficients to zero, unlike Ridge which uses L2 and shrinks all coefficients."
},
{
"id": 3,
"questionText": "Scenario: You have a dataset with 20 features, some irrelevant. Which regression is suitable for automatic feature selection?",
"options": [
"Ridge Regression",
"Linear Regression",
"Lasso Regression",
"Polynomial Regression"
],
"correctAnswerIndex": 2,
"explanation": "Lasso’s L1 penalty can zero out irrelevant features, performing automatic feature selection."
},
{
"id": 4,
"questionText": "In Lasso Regression, increasing the alpha parameter will:",
"options": [
"Ignore correlated features",
"Shrink more coefficients to zero",
"Decrease training error and increase test error",
"Remove the intercept automatically"
],
"correctAnswerIndex": 1,
"explanation": "Higher alpha strengthens the L1 penalty, increasing sparsity and setting more coefficients exactly to zero."
},
{
"id": 5,
"questionText": "Why is feature standardization important in Lasso Regression?",
"options": [
"It removes noise automatically",
"Alpha is irrelevant",
"Intercept is ignored otherwise",
"L1 penalty depends on coefficient magnitude, which depends on feature scale"
],
"correctAnswerIndex": 3,
"explanation": "Without standardization, features with large scales dominate the penalty, leading to unfair coefficient shrinkage."
},
{
"id": 6,
"questionText": "Scenario: Two features are highly correlated. Lasso Regression may:",
"options": [
"Select one feature and zero the other",
"Ignore both",
"Shrink both equally like Ridge",
"Fail to converge"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty tends to pick one correlated feature and set the other to zero, performing feature selection."
},
{
"id": 7,
"questionText": "Scenario: Lasso Regression applied with alpha=0. Result?",
"options": [
"Removes multicollinearity",
"Performs feature selection",
"Reduces variance",
"Behaves like Linear Regression"
],
"correctAnswerIndex": 3,
"explanation": "Alpha=0 removes the L1 penalty, reducing Lasso to standard Linear Regression."
},
{
"id": 8,
"questionText": "Scenario: Lasso Regression applied to polynomial features with high degree. Main effect?",
"options": [
"Eliminates low-degree terms automatically",
"Increases training error only",
"Removes intercept",
"Controls overfitting by zeroing some coefficients"
],
"correctAnswerIndex": 3,
"explanation": "Lasso shrinks some high-degree term coefficients to zero, reducing overfitting."
},
{
"id": 9,
"questionText": "Scenario: Lasso Regression applied on dataset with missing values. Action?",
"options": [
"Remove alpha",
"Impute missing values before Lasso",
"L1 penalty handles missing automatically",
"Ignore missing values"
],
"correctAnswerIndex": 1,
"explanation": "Lasso requires complete data; missing values should be imputed or removed first."
},
{
"id": 10,
"questionText": "Scenario: Lasso Regression applied on standardized dataset. Alpha too high. Result?",
"options": [
"Intercept removed automatically",
"Coefficients remain unchanged",
"Low training error, high variance",
"Many coefficients set to zero, potential underfitting"
],
"correctAnswerIndex": 3,
"explanation": "Excessive regularization shrinks many coefficients to zero, increasing bias."
},
{
"id": 11,
"questionText": "Scenario: Lasso vs Ridge on dataset with irrelevant features. Observation?",
"options": [
"Ridge eliminates features; Lasso does not",
"Ridge shrinks coefficients but keeps them non-zero; Lasso may zero irrelevant features",
"Both produce identical results",
"Alpha irrelevant"
],
"correctAnswerIndex": 1,
"explanation": "Lasso performs feature selection while Ridge shrinks coefficients but retains all features."
},
{
"id": 12,
"questionText": "Scenario: Lasso Regression applied to time-series data with lag features. Standardization importance?",
"options": [
"Ensures fair penalty for all lag features",
"Alpha irrelevant",
"Intercept ignored otherwise",
"Training error minimized automatically"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty affects coefficients fairly only if features are standardized."
},
{
"id": 13,
"questionText": "Scenario: Lasso Regression applied to one-hot encoded categorical variables. Concern?",
"options": [
"May zero some dummy variables while keeping others",
"Intercept removed automatically",
"No need for scaling",
"Alpha irrelevant"
],
"correctAnswerIndex": 0,
"explanation": "Lasso may select one dummy feature and set others to zero, performing feature selection."
},
{
"id": 14,
"questionText": "Scenario: Lasso Regression applied to high-dimensional dataset (features > samples). Advantage?",
"options": [
"Training error minimal",
"Can produce sparse model, eliminating irrelevant features",
"Cannot converge",
"Removes intercept"
],
"correctAnswerIndex": 1,
"explanation": "L1 penalty creates sparsity, which is helpful in high-dimensional problems."
},
{
"id": 15,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Purpose of cross-validation?",
"options": [
"Eliminate correlated features",
"Always minimize training error",
"Select optimal alpha to balance bias-variance tradeoff",
"Ignore feature scaling"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation identifies alpha that minimizes validation/test error for better generalization."
},
{
"id": 16,
"questionText": "Scenario: Lasso Regression applied to noisy dataset. Observed smaller coefficients than Linear Regression. Why?",
"options": [
"Alpha zero",
"Noise ignored automatically",
"L1 penalty shrinks some coefficients to zero, reducing variance",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Regularization reduces sensitivity to noise, shrinking coefficients and improving generalization."
},
{
"id": 17,
"questionText": "Scenario: Lasso Regression vs Elastic Net. Main difference?",
"options": [
"Elastic Net ignores feature selection",
"Lasso uses L2",
"Elastic Net uses L1 only",
"Elastic Net combines L1 and L2, useful when correlated features exist"
],
"correctAnswerIndex": 3,
"explanation": "Elastic Net combines L1 and L2 penalties, balancing feature selection and coefficient shrinkage."
},
{
"id": 18,
"questionText": "Scenario: Lasso Regression applied to correlated features. Observation?",
"options": [
"Shrinks coefficients equally",
"Tends to select one and zero others",
"Fails to converge",
"Removes intercept"
],
"correctAnswerIndex": 1,
"explanation": "L1 penalty promotes sparsity, often selecting one feature from correlated groups."
},
{
"id": 19,
"questionText": "Scenario: Lasso Regression applied with very small alpha. Effect?",
"options": [
"High sparsity",
"Fails to converge",
"Minimal regularization, behaves like Linear Regression",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Small alpha means weak L1 penalty; model behaves like Linear Regression with minimal feature selection."
},
{
"id": 20,
"questionText": "Scenario: Lasso Regression applied to polynomial features. High-degree coefficients are large. Solution?",
"options": [
"Increase alpha to shrink some coefficients to zero",
"Ignore intercept",
"Decrease alpha",
"Remove low-degree terms"
],
"correctAnswerIndex": 0,
"explanation": "Higher alpha reduces overfitting by zeroing some large coefficients from high-degree terms."
},
{
"id": 21,
"questionText": "Scenario: Lasso Regression applied to dataset with irrelevant features. Test error high. Solution?",
"options": [
"Ignore irrelevant features",
"Increase alpha excessively",
"Alpha tuning via cross-validation, or consider Elastic Net",
"Decrease alpha to zero"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation or Elastic Net can select relevant features and improve generalization."
},
{
"id": 22,
"questionText": "Scenario: Lasso Regression applied to dataset with standardized features. Correlated features coefficients?",
"options": [
"One selected, others may be zero",
"Fails to converge",
"All coefficients equal",
"Intercept removed automatically"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty often selects one correlated feature and zeros others."
},
{
"id": 23,
"questionText": "Scenario: Lasso Regression applied on large dataset. Why standardize features?",
"options": [
"Data shrunk automatically",
"Alpha irrelevant",
"Intercept removed otherwise",
"L1 penalty treats all features fairly only if scaled"
],
"correctAnswerIndex": 3,
"explanation": "Without standardization, large-scale features dominate the penalty."
},
{
"id": 24,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Action?",
"options": [
"Remove alpha",
"Ignore missing values",
"Impute missing values first",
"L1 handles missing automatically"
],
"correctAnswerIndex": 2,
"explanation": "Lasso cannot handle missing values; imputation or removal is required."
},
{
"id": 25,
"questionText": "Scenario: Lasso Regression applied with high alpha. Many coefficients zero. Risk?",
"options": [
"Intercept ignored",
"Underfitting due to excessive sparsity",
"Overfitting",
"Training error minimized"
],
"correctAnswerIndex": 1,
"explanation": "Too high alpha increases bias and underfits the data."
},
{
"id": 26,
"questionText": "Scenario: Lasso Regression applied to dataset with 100 features, many irrelevant. Observation: only 20 features have non-zero coefficients. Reason?",
"options": [
"Alpha too small",
"Training error minimized",
"Intercept removed",
"L1 penalty induces sparsity, zeroing irrelevant features"
],
"correctAnswerIndex": 3,
"explanation": "Lasso’s L1 penalty shrinks some coefficients to exactly zero, effectively performing feature selection."
},
{
"id": 27,
"questionText": "Scenario: Lasso Regression applied to highly correlated features. Observation: model selects some features and zeros others. Advantage?",
"options": [
"Intercept removed automatically",
"All features retained",
"Reduces multicollinearity impact and produces sparse model",
"Increases variance"
],
"correctAnswerIndex": 2,
"explanation": "Lasso selects one feature from a correlated group, reducing multicollinearity and producing a simpler model."
},
{
"id": 28,
"questionText": "Scenario: Lasso Regression applied with cross-validation on dataset with noisy features. Best practice?",
"options": [
"Always use alpha=1",
"Select alpha minimizing validation/test error",
"Remove polynomial terms",
"Ignore feature scaling"
],
"correctAnswerIndex": 1,
"explanation": "Cross-validation finds alpha that balances bias and variance, improving generalization on noisy data."
},
{
"id": 29,
"questionText": "Scenario: Lasso Regression applied to polynomial regression of degree 8. Observation: some high-degree term coefficients zeroed. Why?",
"options": [
"Intercept removed",
"Training error minimized",
"Polynomial terms ignored automatically",
"L1 penalty shrinks less important coefficients to zero, controlling overfitting"
],
"correctAnswerIndex": 3,
"explanation": "Lasso penalizes large coefficients, eliminating less important high-degree terms to prevent overfitting."
},
{
"id": 30,
"questionText": "Scenario: Lasso Regression applied to dataset with one-hot encoded categorical features. Observation: some dummy variables zeroed. Effect?",
"options": [
"Training error increases",
"All features retained",
"Automatic feature selection among categorical levels",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Lasso can zero some dummy variables, selecting the most predictive categories."
},
{
"id": 31,
"questionText": "Scenario: Lasso Regression applied to dataset with features in different units. Observation: large coefficients for small-scale features. Cause?",
"options": [
"Data uncorrelated",
"Intercept removed",
"Alpha too low",
"L1 penalty is unfair without standardization"
],
"correctAnswerIndex": 3,
"explanation": "Without standardization, features with small scales are penalized less, leading to larger coefficients."
},
{
"id": 32,
"questionText": "Scenario: Lasso Regression applied to time-series lag features. Standardization importance?",
"options": [
"Intercept ignored",
"Training error minimized automatically",
"Alpha irrelevant",
"Ensures fair L1 penalty across features"
],
"correctAnswerIndex": 3,
"explanation": "Standardization ensures all features contribute fairly to the penalty."
},
{
"id": 33,
"questionText": "Scenario: Lasso Regression applied with alpha too small. Observation?",
"options": [
"Fails to converge",
"Many coefficients zero",
"Minimal sparsity, behaves like Linear Regression",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Small alpha provides weak L1 penalty; few or no coefficients are zeroed."
},
{
"id": 34,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Observation: model cannot train. Solution?",
"options": [
"Increase alpha",
"Ignore missing values",
"Impute missing values before Lasso",
"Decrease alpha"
],
"correctAnswerIndex": 2,
"explanation": "Lasso requires complete data; missing values must be imputed or removed before training."
},
{
"id": 35,
"questionText": "Scenario: Lasso Regression applied to dataset with highly noisy features. Observation: fewer non-zero coefficients than Ridge. Why?",
"options": [
"Alpha irrelevant",
"Intercept removed",
"L1 penalty zeroes some coefficients, reducing variance",
"Training error minimized"
],
"correctAnswerIndex": 2,
"explanation": "Lasso induces sparsity by zeroing less important coefficients, effectively reducing variance."
},
{
"id": 36,
"questionText": "Scenario: Lasso Regression applied to dataset with polynomial features of degree 10. Observation: high-degree terms zeroed. Effect?",
"options": [
"Intercept removed",
"Training error minimized",
"Low-degree terms ignored",
"Controls overfitting by eliminating less important terms"
],
"correctAnswerIndex": 3,
"explanation": "Lasso shrinks high-degree coefficients to zero, reducing overfitting and model complexity."
},
{
"id": 37,
"questionText": "Scenario: Lasso Regression applied to dataset with 1000 features, alpha tuned via cross-validation. Observation: 200 features non-zero. Interpretation?",
"options": [
"Intercept removed",
"Model underfits",
"Optimal alpha balances sparsity and generalization",
"Training error high"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation selects alpha that keeps predictive features while zeroing irrelevant ones."
},
{
"id": 38,
"questionText": "Scenario: Lasso Regression vs Ridge on dataset with correlated features. Observation?",
"options": [
"Ridge eliminates features; Lasso does not",
"Lasso selects some features and zeros others; Ridge shrinks all coefficients",
"Alpha irrelevant",
"Both produce sparse models"
],
"correctAnswerIndex": 1,
"explanation": "Lasso promotes sparsity and feature selection; Ridge keeps all correlated features."
},
{
"id": 39,
"questionText": "Scenario: Lasso Regression applied to dataset with irrelevant features. Test error high. Recommended solution?",
"options": [
"Increase alpha excessively",
"Ignore irrelevant features",
"Decrease alpha to zero",
"Adjust alpha via cross-validation or use Elastic Net"
],
"correctAnswerIndex": 3,
"explanation": "Cross-validation tuning or Elastic Net can remove irrelevant features while balancing regularization."
},
{
"id": 40,
"questionText": "Scenario: Lasso Regression applied on standardized features. Observation: correlated feature coefficients? ",
"options": [
"Intercept removed",
"One selected, others zeroed",
"Fail to converge",
"All coefficients equal"
],
"correctAnswerIndex": 1,
"explanation": "L1 penalty selects one correlated feature and zeros the others."
},
{
"id": 41,
"questionText": "Scenario: Lasso Regression applied to high-dimensional dataset (features > samples). Advantage?",
"options": [
"Produces sparse model, eliminating irrelevant features",
"Intercept removed",
"Cannot converge",
"Training error minimal"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty shrinks unimportant coefficients to zero, useful in high-dimensional problems."
},
{
"id": 42,
"questionText": "Scenario: Lasso Regression applied with very high alpha. Observation?",
"options": [
"High variance",
"Intercept ignored",
"Many coefficients zeroed, possible underfitting",
"Training error minimized"
],
"correctAnswerIndex": 2,
"explanation": "Excessive regularization increases bias, underfitting the model."
},
{
"id": 43,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Purpose?",
"options": [
"Always minimize training error",
"Ignore standardization",
"Select alpha that minimizes validation/test error",
"Remove correlated features"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation identifies alpha that optimally balances bias and variance."
},
{
"id": 44,
"questionText": "Scenario: Lasso Regression applied to polynomial regression. Observation: low-degree term coefficients non-zero, high-degree zero. Effect?",
"options": [
"Low-degree terms ignored",
"Training error minimized",
"Reduces overfitting by removing less important high-degree terms",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Lasso controls overfitting by penalizing and zeroing high-degree terms."
},
{
"id": 45,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Recommended practice?",
"options": [
"Increase alpha",
"Impute missing values before training",
"Ignore missing values",
"Decrease alpha"
],
"correctAnswerIndex": 1,
"explanation": "Lasso cannot handle missing data; it must be imputed or cleaned first."
},
{
"id": 46,
"questionText": "Scenario: Lasso Regression applied on dataset with noisy features. Observation: fewer non-zero coefficients than Ridge. Why?",
"options": [
"Intercept removed",
"Training error minimized",
"L1 penalty zeroes some coefficients, reducing variance",
"Alpha irrelevant"
],
"correctAnswerIndex": 2,
"explanation": "Lasso induces sparsity, setting some coefficients to zero, which reduces variance in noisy datasets."
},
{
"id": 47,
"questionText": "Scenario: Lasso Regression applied to dataset with one-hot encoded features. Observation: some dummy variables zeroed. Effect?",
"options": [
"Intercept removed",
"Feature selection among categories",
"All features retained",
"Training error increased"
],
"correctAnswerIndex": 1,
"explanation": "Lasso may zero less predictive categories, retaining only important levels."
},
{
"id": 48,
"questionText": "Scenario: Lasso Regression applied to polynomial features of degree 12. Observation: high-degree coefficients zeroed. Effect?",
"options": [
"Reduces overfitting and model complexity",
"Intercept removed",
"Low-degree terms ignored",
"Training error minimized"
],
"correctAnswerIndex": 0,
"explanation": "High-degree coefficients are penalized, reducing model complexity and overfitting."
},
{
"id": 49,
"questionText": "Scenario: Lasso Regression applied to dataset with highly correlated features. Observation: some features zeroed. Advantage?",
"options": [
"Reduces multicollinearity and simplifies model",
"Fails to converge",
"Increases variance",
"Intercept removed"
],
"correctAnswerIndex": 0,
"explanation": "Lasso selects one feature from correlated group, producing a simpler, less collinear model."
},
{
"id": 50,
"questionText": "Scenario: Lasso Regression applied with small alpha. Observation?",
"options": [
"Minimal feature selection; behaves like Linear Regression",
"Fails to converge",
"Many coefficients zeroed",
"Intercept removed"
],
"correctAnswerIndex": 0,
"explanation": "Small alpha provides weak L1 penalty; the model retains most features with minimal sparsity."
},
{
"id": 51,
"questionText": "Scenario: Lasso Regression applied to a medical dataset with 500 features. Many coefficients zeroed. Interpretation?",
"options": [
"Training error is zero",
"L1 penalty removed irrelevant features, simplifying the model",
"Model underfits due to small dataset",
"Intercept removed automatically"
],
"correctAnswerIndex": 1,
"explanation": "Lasso shrinks less important features to zero, keeping only predictive variables, which is useful in high-dimensional data."
},
{
"id": 52,
"questionText": "Scenario: Lasso Regression applied with correlated features. Observation: only one feature from each correlated group selected. Advantage?",
"options": [
"Reduces multicollinearity and produces sparse solution",
"All features retained",
"Intercept removed",
"Model overfits"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty selects one feature and zeros others, reducing multicollinearity and simplifying interpretation."
},
{
"id": 53,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Optimal alpha minimizes:",
"options": [
"Validation/test error",
"Training error",
"Intercept value",
"Number of non-zero features only"
],
"correctAnswerIndex": 0,
"explanation": "Cross-validation selects the alpha that balances bias-variance and gives best generalization on unseen data."
},
{
"id": 54,
"questionText": "Scenario: Lasso Regression applied on polynomial regression of degree 12. Observation: high-degree terms zeroed. Effect?",
"options": [
"Low-degree terms ignored",
"Reduces overfitting by eliminating less important terms",
"Training error minimized",
"Intercept removed"
],
"correctAnswerIndex": 1,
"explanation": "Lasso penalizes high-degree coefficients, zeroing unimportant terms to prevent overfitting."
},
{
"id": 55,
"questionText": "Scenario: Lasso Regression applied to time-series dataset with lag features. Importance of feature standardization?",
"options": [
"Alpha irrelevant",
"Training error minimized automatically",
"Ensures fair L1 penalty across all lag features",
"Intercept ignored"
],
"correctAnswerIndex": 2,
"explanation": "Standardization ensures features on different scales are penalized fairly."
},
{
"id": 56,
"questionText": "Scenario: Lasso Regression applied to dataset with one-hot encoded categorical features. Observation: some dummy variables zeroed. Effect?",
"options": [
"Intercept removed",
"Training error increased",
"Automatic feature selection among categories",
"All features retained"
],
"correctAnswerIndex": 2,
"explanation": "Lasso may zero less predictive categories, keeping only important levels."
},
{
"id": 57,
"questionText": "Scenario: Lasso Regression applied to high-dimensional dataset (features > samples). Advantage?",
"options": [
"Intercept removed",
"Cannot converge",
"Produces sparse model, eliminating irrelevant features",
"Training error minimal"
],
"correctAnswerIndex": 2,
"explanation": "L1 penalty zeros unimportant coefficients, which is effective in high-dimensional settings."
},
{
"id": 58,
"questionText": "Scenario: Lasso Regression applied with alpha too high. Observation?",
"options": [
"Training error minimized",
"High variance",
"Many coefficients zeroed, potential underfitting",
"Intercept ignored"
],
"correctAnswerIndex": 2,
"explanation": "Excessive regularization increases bias, underfitting the model."
},
{
"id": 59,
"questionText": "Scenario: Lasso Regression vs Ridge Regression. Observation: Lasso zeros some coefficients, Ridge does not. Implication?",
"options": [
"Both behave identically",
"Ridge produces sparse model",
"Lasso performs feature selection; Ridge does not",
"Alpha irrelevant"
],
"correctAnswerIndex": 2,
"explanation": "Lasso can zero out coefficients, effectively performing feature selection; Ridge shrinks coefficients but keeps all features."
},
{
"id": 60,
"questionText": "Scenario: Lasso Regression applied to dataset with noisy features. Observation: fewer non-zero coefficients than Ridge. Why?",
"options": [
"Intercept removed",
"Alpha irrelevant",
"L1 penalty zeroes less important coefficients, reducing variance",
"Training error minimized"
],
"correctAnswerIndex": 2,
"explanation": "Lasso induces sparsity and reduces variance by eliminating less predictive/noisy features."
},
{
"id": 61,
"questionText": "Scenario: Lasso Regression applied on dataset with highly correlated features. Observation: some features zeroed. Advantage?",
"options": [
"Increases variance",
"Fails to converge",
"Intercept removed",
"Reduces multicollinearity and simplifies model"
],
"correctAnswerIndex": 3,
"explanation": "Lasso selects one feature from correlated group, producing a simpler, less collinear model."
},
{
"id": 62,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Observation: model cannot train. Recommended solution?",
"options": [
"Impute missing values before training",
"Increase alpha",
"Decrease alpha",
"Ignore missing values"
],
"correctAnswerIndex": 0,
"explanation": "Lasso requires complete data; missing values must be imputed or removed."
},
{
"id": 63,
"questionText": "Scenario: Lasso Regression applied to polynomial features of degree 10. Observation: high-degree term coefficients zeroed. Effect?",
"options": [
"Training error minimized",
"Low-degree terms ignored",
"Reduces overfitting and model complexity",
"Intercept removed"
],
"correctAnswerIndex": 2,
"explanation": "Lasso shrinks unimportant high-degree coefficients to zero, preventing overfitting."
},
{
"id": 64,
"questionText": "Scenario: Lasso Regression applied to a dataset with many irrelevant features. Observation: test error high. Recommended solution?",
"options": [
"Increase alpha excessively",
"Decrease alpha to zero",
"Ignore irrelevant features",
"Adjust alpha via cross-validation or use Elastic Net"
],
"correctAnswerIndex": 3,
"explanation": "Cross-validation tuning or Elastic Net helps remove irrelevant features and improves generalization."
},
{
"id": 65,
"questionText": "Scenario: Lasso Regression applied to standardized features. Observation: correlated feature coefficients?",
"options": [
"One selected, others zeroed",
"All coefficients equal",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty selects one correlated feature and zeros the others."
},
{
"id": 66,
"questionText": "Scenario: Lasso Regression applied to high-dimensional data (features > samples). Observation: model converges with sparse solution. Why?",
"options": [
"L1 penalty promotes sparsity, retaining only important features",
"Training error minimized",
"Intercept removed",
"Model underfits"
],
"correctAnswerIndex": 0,
"explanation": "Lasso shrinks less important coefficients to zero, allowing sparse solution even in high-dimensional datasets."
},
{
"id": 67,
"questionText": "Scenario: Lasso Regression applied to noisy features. Observation: model zeroed several noisy features. Effect?",
"options": [
"Reduces variance and improves generalization",
"Intercept removed",
"Training error minimized",
"Alpha irrelevant"
],
"correctAnswerIndex": 0,
"explanation": "Lasso zeros noisy features, reducing variance and improving generalization."
},
{
"id": 68,
"questionText": "Scenario: Lasso Regression applied to polynomial features. Observation: only low-degree coefficients retained. Reason?",
"options": [
"High-degree coefficients penalized and zeroed by L1",
"Training error minimized",
"Intercept removed",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "Lasso penalizes high-degree coefficients, shrinking unimportant terms to zero."
},
{
"id": 69,
"questionText": "Scenario: Lasso Regression applied to dataset with one-hot encoded categorical features. Observation: some levels zeroed. Effect?",
"options": [
"Feature selection among categories",
"All levels retained",
"Intercept removed",
"Training error increased"
],
"correctAnswerIndex": 0,
"explanation": "Lasso may zero less predictive categories, retaining only important levels."
},
{
"id": 70,
"questionText": "Scenario: Lasso Regression applied with alpha too small. Observation?",
"options": [
"Few coefficients zeroed; model behaves like Linear Regression",
"Many coefficients zeroed",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Small alpha provides weak L1 penalty; model retains most features with minimal sparsity."
},
{
"id": 71,
"questionText": "Scenario: Lasso Regression applied to dataset with highly correlated features. Observation: some features zeroed. Effect?",
"options": [
"Simplifies model and reduces multicollinearity",
"Increases variance",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Lasso selects one feature from correlated groups, simplifying the model and reducing multicollinearity."
},
{
"id": 72,
"questionText": "Scenario: Lasso Regression applied with cross-validation on dataset with noisy features. Observation: alpha selected?",
"options": [
"Minimizes validation/test error, balances sparsity and generalization",
"Maximizes training error",
"Removes correlated features automatically",
"Intercept ignored"
],
"correctAnswerIndex": 0,
"explanation": "Cross-validation selects alpha for optimal bias-variance tradeoff."
},
{
"id": 73,
"questionText": "Scenario: Lasso Regression applied on polynomial features. Observation: high-degree coefficients zeroed. Reason?",
"options": [
"L1 penalty shrinks less important terms to zero",
"Intercept removed",
"Training error minimized",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "Lasso controls overfitting by penalizing and zeroing high-degree terms."
},
{
"id": 74,
"questionText": "Scenario: Lasso Regression applied to high-dimensional dataset. Observation: sparse solution. Advantage?",
"options": [
"Improves interpretability and reduces overfitting",
"Training error minimized",
"Intercept removed",
"Model underfits always"
],
"correctAnswerIndex": 0,
"explanation": "Sparse solution keeps only important features, enhancing interpretability and generalization."
},
{
"id": 75,
"questionText": "Scenario: Lasso Regression applied with standardized features. Observation: correlated features handled by sparsity. Benefit?",
"options": [
"Simpler model with reduced multicollinearity",
"Training error minimized",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Lasso selects only one feature from correlated groups, simplifying the model."
},
{
"id": 76,
"questionText": "Scenario: Lasso Regression applied to a genomics dataset with 20,000 features and 500 samples. Observation: only 150 features non-zero. Benefit?",
"options": [
"Reduces dimensionality and improves interpretability",
"Training error minimized",
"Intercept removed",
"All features retained"
],
"correctAnswerIndex": 0,
"explanation": "Lasso’s sparsity selects only the most predictive genes, reducing dimensionality and simplifying interpretation."
},
{
"id": 77,
"questionText": "Scenario: Lasso Regression applied on dataset with highly correlated financial indicators. Observation: only one indicator from each group retained. Advantage?",
"options": [
"Reduces multicollinearity and simplifies model",
"Increases variance",
"All features retained",
"Intercept removed"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty selects one feature from correlated groups, producing a simpler, less collinear model."
},
{
"id": 78,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Observation: optimal alpha very high. Effect?",
"options": [
"Many coefficients zeroed, risk of underfitting",
"All coefficients retained",
"Intercept removed",
"Training error minimized"
],
"correctAnswerIndex": 0,
"explanation": "High alpha increases regularization, shrinking many coefficients to zero and potentially underfitting the model."
},
{
"id": 79,
"questionText": "Scenario: Lasso Regression applied to polynomial regression (degree 15). Observation: high-degree coefficients zeroed. Benefit?",
"options": [
"Controls overfitting and reduces model complexity",
"Training error minimized",
"Intercept removed",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "Lasso penalizes high-degree terms, shrinking less important coefficients to zero and preventing overfitting."
},
{
"id": 80,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Action required?",
"options": [
"Impute missing values before training",
"Ignore missing values",
"Decrease alpha",
"Increase alpha"
],
"correctAnswerIndex": 0,
"explanation": "Lasso cannot handle missing data; it must be imputed or removed first."
},
{
"id": 81,
"questionText": "Scenario: Lasso Regression applied on standardized one-hot encoded categorical features. Observation: some dummy variables zeroed. Effect?",
"options": [
"Feature selection among categories",
"All dummy variables retained",
"Intercept removed",
"Training error increased"
],
"correctAnswerIndex": 0,
"explanation": "Lasso may zero less predictive categories, retaining only important levels."
},
{
"id": 82,
"questionText": "Scenario: Lasso Regression applied to a noisy dataset. Observation: fewer non-zero coefficients than Ridge. Why?",
"options": [
"L1 penalty zeroes less important coefficients, reducing variance",
"Training error minimized",
"Intercept removed",
"Alpha irrelevant"
],
"correctAnswerIndex": 0,
"explanation": "Lasso induces sparsity, setting some coefficients to zero, which reduces variance in noisy datasets."
},
{
"id": 83,
"questionText": "Scenario: Lasso Regression vs Elastic Net. When is Elastic Net preferred?",
"options": [
"When correlated features need to be retained and feature selection is desired",
"Always",
"When features are uncorrelated",
"When alpha=0"
],
"correctAnswerIndex": 0,
"explanation": "Elastic Net combines L1 and L2 penalties, balancing sparsity and correlated feature retention."
},
{
"id": 84,
"questionText": "Scenario: Lasso Regression applied with very small alpha. Observation?",
"options": [
"Minimal feature selection; behaves like Linear Regression",
"Many coefficients zeroed",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Small alpha provides weak regularization, retaining most coefficients and minimal sparsity."
},
{
"id": 85,
"questionText": "Scenario: Lasso Regression applied to polynomial regression with high-degree terms dominating. Observation: high-degree terms zeroed. Benefit?",
"options": [
"Reduces overfitting by removing less important terms",
"Training error minimized",
"Intercept removed",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "Lasso penalizes high-degree coefficients, reducing overfitting and improving generalization."
},
{
"id": 86,
"questionText": "Scenario: Lasso Regression applied to a dataset with 10,000 features, 1,000 samples. Observation: sparse solution. Advantage?",
"options": [
"Improves interpretability and reduces computational cost",
"Training error minimized",
"Intercept removed",
"Fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Sparse solution retains only important features, improving interpretability and computational efficiency."
},
{
"id": 87,
"questionText": "Scenario: Lasso Regression applied to dataset with correlated features. Observation: some coefficients zeroed. Effect?",
"options": [
"Simplifies model and reduces multicollinearity",
"Increases variance",
"Intercept removed",
"Training error minimized"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty selects one feature from correlated groups, producing a simpler, more stable model."
},
{
"id": 88,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Observation: alpha selected minimizes:",
"options": [
"Validation/test error",
"Training error",
"Number of features",
"Intercept value"
],
"correctAnswerIndex": 0,
"explanation": "Cross-validation selects alpha to balance bias-variance and achieve optimal generalization."
},
{
"id": 89,
"questionText": "Scenario: Lasso Regression applied on standardized features with highly correlated groups. Observation: some features zeroed. Reason?",
"options": [
"L1 penalty favors sparsity, selecting one feature from each correlated group",
"Intercept removed",
"Training error minimized",
"All coefficients retained"
],
"correctAnswerIndex": 0,
"explanation": "Lasso promotes sparsity, zeroing less important features in correlated groups."
},
{
"id": 90,
"questionText": "Scenario: Lasso Regression applied on dataset with polynomial features. Observation: only low-degree coefficients retained. Why?",
"options": [
"High-degree coefficients penalized and zeroed",
"Training error minimized",
"Intercept removed",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "Lasso penalizes less important high-degree coefficients, preventing overfitting."
},
{
"id": 91,
"questionText": "Scenario: Lasso Regression applied to dataset with missing values. Observation: model fails. Action required?",
"options": [
"Impute missing values first",
"Ignore missing values",
"Decrease alpha",
"Increase alpha"
],
"correctAnswerIndex": 0,
"explanation": "Lasso requires complete data; missing values must be imputed or removed."
},
{
"id": 92,
"questionText": "Scenario: Lasso Regression applied with alpha very high. Observation: almost all coefficients zeroed. Risk?",
"options": [
"Underfitting due to excessive sparsity",
"Overfitting",
"Intercept removed",
"Training error minimized"
],
"correctAnswerIndex": 0,
"explanation": "Excessive alpha increases bias and underfits the model."
},
{
"id": 93,
"questionText": "Scenario: Lasso Regression applied to standardized one-hot encoded categorical variables. Observation: some levels zeroed. Effect?",
"options": [
"Simplifies categorical feature selection",
"Intercept removed",
"All levels retained",
"Training error increased"
],
"correctAnswerIndex": 0,
"explanation": "Lasso zeros less predictive categories, keeping only important levels."
},
{
"id": 94,
"questionText": "Scenario: Lasso Regression applied to dataset with 100 correlated features. Observation: only a few selected. Advantage?",
"options": [
"Reduces multicollinearity and simplifies interpretation",
"Increases variance",
"Intercept removed",
"Training error minimized"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty selects a subset of features from correlated groups, reducing complexity."
},
{
"id": 95,
"questionText": "Scenario: Lasso Regression applied to polynomial features of degree 20. Observation: only low-degree terms retained. Reason?",
"options": [
"High-degree terms penalized and zeroed",
"Intercept removed",
"Training error minimized",
"Low-degree terms ignored"
],
"correctAnswerIndex": 0,
"explanation": "L1 penalty zeroes less important high-degree coefficients to control overfitting."
},
{
"id": 96,
"questionText": "Scenario: Lasso Regression applied to high-dimensional sparse data. Observation: sparse solution obtained. Benefit?",
"options": [
"Interpretability and reduced computational cost",
"Training error minimized",
"Intercept removed",
"Model fails to converge"
],
"correctAnswerIndex": 0,
"explanation": "Sparse solution retains only relevant features, improving interpretability and efficiency."
},
{
"id": 97,
"questionText": "Scenario: Lasso Regression applied to dataset with noisy features. Observation: model zeroed some noisy features. Effect?",
"options": [
"Reduces variance and improves generalization",
"Intercept removed",
"Training error minimized",
"All features retained"
],
"correctAnswerIndex": 0,
"explanation": "Lasso removes less predictive/noisy features, reducing variance and improving generalization."
},
{
"id": 98,
"questionText": "Scenario: Lasso Regression applied with cross-validation. Observation: alpha selected reduces validation error. Advantage?",
"options": [
"Balances bias-variance tradeoff and improves generalization",
"Minimizes training error only",
"Removes correlated features automatically",
"Intercept removed"
],
"correctAnswerIndex": 0,
"explanation": "Cross-validation selects alpha that optimally balances bias and variance for unseen data."
},
{
"id": 99,
"questionText": "Scenario: Lasso Regression applied to one-hot encoded dataset with missing values. Recommended action?",
"options": [
"Impute missing values before training",
"Ignore missing values",
"Decrease alpha",
"Increase alpha"
],
"correctAnswerIndex": 0,
"explanation": "Lasso cannot handle missing data; it must be imputed or removed first."
},
{
"id": 100,
"questionText": "Scenario: Lasso Regression applied to dataset with high multicollinearity, noisy features, and many irrelevant variables. Best approach?",
"options": [
"Standardize features, tune alpha via cross-validation, consider Elastic Net if feature selection needed",
"Use Linear Regression",
"Ignore alpha",
"Remove L1 penalty"
],
"correctAnswerIndex": 0,
"explanation": "Standardization, cross-validated Lasso, or Elastic Net handles noise, multicollinearity, and feature selection effectively."
}
]
}