SVM

# Step 0: Import the necessary libraries

import numpy as np

import matplotlib.pyplot as plt

from sklearn.datasets import make_classification

from sklearn.model_selection import train_test_split

from sklearn.preprocessing import StandardScaler

from sklearn.svm import SVC

 

# Step 1: Dataset

X, y = make_classification(n_samples=100, n_features=2,
n_redundant=0, n_clusters_per_class=1, flip_y=0.1, random_state=0)

 

# Step 2: Split the dataset into training and testing sets

X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2, random_state=42)

 

# Step 3: Preprocess the dataset by scaling the features

scaler = StandardScaler()

X_train_scaled = scaler.fit_transform(X_train)

X_test_scaled = scaler.transform(X_test)

 

# Step 4: Initialize the linear SVM classifier

clf = SVC(kernel=’linear’, C=1.0)

 

# Step 5: Train the classifier with the training data

clf.fit(X_train_scaled, y_train)

 

# Step 6: Plot the decision boundary

plt.figure(figsize=(10,6))

 

# Plot the decision boundary

ax = plt.gca()

xlim = ax.get_xlim()

ylim = ax.get_ylim()

 

# Create grid to evaluate model

xx = np.linspace(xlim[0], xlim[1], 30)

yy = np.linspace(ylim[0], ylim[1], 30)

YY, XX = np.meshgrid(yy, xx)

xy = np.vstack([XX.ravel(), YY.ravel()]).T

Z = clf.decision_function(xy).reshape(XX.shape)

 

# Plot decision boundary and margins

ax.contour(XX, YY, Z, colors=’k’, levels=[-1, 0, 1],
alpha=0.5, linestyles=[‘–‘, ‘-‘, ‘–‘])

 

# Plot support vectors

ax.scatter(clf.support_vectors_[:, 0],
clf.support_vectors_[:, 1], s=100, linewidth=1, facecolors=’none’,
edgecolors=’k’)

 

# Plot data points

plt.scatter(X_train_scaled[:, 0], X_train_scaled[:, 1],
c=y_train, cmap=plt.cm.Paired, edgecolors=’k’)

 

plt.xlabel(‘Feature 1’)

plt.ylabel(‘Feature 2’)

plt.title(‘SVM Decision Boundary with Support Vectors’)

plt.show()

Scroll to Top