Tartalom

Bevezetés:

import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.datasets.samples_generator import make_blobs
X, y = make_blobs(n_samples=50, centers=2,
                random_state=0, cluster_std=0.60)
xfit = np.linspace(-1, 3.5)
plt.scatter(X[:, 0], X[:, 1], c=y, s=50, cmap='autumn')
plt.plot([0.6], [2.1], 'x', color='red', markeredgewidth=2, markersize=10)
plt.xlim(-1, 3.5);
## (-1, 3.5)
for m, b in [(1, 0.65), (0.5, 1.6), (-0.2, 2.9)]:
    plt.plot(xfit, m * xfit + b, '-k')
plt.show()

First ideas

Melyik a legjobb Távolság a pontok és a hipersík között * Keressük azt, amelyikre ez legnagyobb. Matematikailag (az \(({\bf x_i},y_i)\) tanuló adatokra (\(y_1=1\) az 1. csoport és \(y_1=-1\) a 2. csoport): \(||\beta||^2\to \min\), a feltétel \(y_i({\bf \beta^T x_i}+\beta_0)\ge 1\) (\(i=1,\dots n\)). *Az \(y_i({\bf \beta ^T x_i}+\beta_0)= 1\) egyenletet kielégítő pontok a támasz vektorok.

xfit = np.linspace(-1, 3.5)
plt.scatter(X[:, 0], X[:, 1], c=y, s=50, cmap='autumn')
for m, b, d in [(1, 0.65, 0.33), (0.5, 1.6, 0.55), (-0.2, 2.9, 0.2)]:
    yfit = m * xfit + b
    plt.plot(xfit, yfit, '-k')
    plt.fill_between(xfit, yfit - d, yfit + d, edgecolor='none', color='#AAAAAA', alpha=0.4)
plt.xlim(-1, 3.5);
## (-1, 3.5)
plt.show()

Python-ban:

from sklearn.svm import SVC # "Support vector classifier"
model = SVC(kernel='linear', C=1E10)
print(model)
## SVC(C=10000000000.0, cache_size=200, class_weight=None, coef0=0.0,
##   decision_function_shape='ovr', degree=3, gamma='auto', kernel='linear',
##   max_iter=-1, probability=False, random_state=None, shrinking=True,
##   tol=0.001, verbose=False)
model.fit(X, y)
## SVC(C=10000000000.0, cache_size=200, class_weight=None, coef0=0.0,
##   decision_function_shape='ovr', degree=3, gamma='auto', kernel='linear',
##   max_iter=-1, probability=False, random_state=None, shrinking=True,
##   tol=0.001, verbose=False)
model.support_vectors_
## array([[0.44359863, 3.11530945],
##        [2.33812285, 3.43116792],
##        [2.06156753, 1.96918596]])

A modell nem érzékeny a többi megfigyelésre, amik nem támasz vektorok

Puha határok

def plot_svc_decision_function(model, ax=None,     plot_support=True):
    """Plot the decision function for a 2D SVC"""
    if ax is None:
        ax = plt.gca()
    xlim = ax.get_xlim()
    ylim = ax.get_ylim()
    
    # create grid to evaluate model
    x = np.linspace(xlim[0], xlim[1], 30)
    y = np.linspace(ylim[0], ylim[1], 30)
    Y, X = np.meshgrid(y, x)
    xy = np.vstack([X.ravel(), Y.ravel()]).T
    P = model.decision_function(xy).reshape(X.shape)
    
    # plot decision boundary and margins
    ax.contour(X, Y, P, colors='k',
               levels=[-1, 0, 1], alpha=0.5,
               linestyles=['--', '-', '--'])
    
    # plot support vectors
    if plot_support:
        ax.scatter(model.support_vectors_[:, 0],
                   model.support_vectors_[:, 1],
                   s=300, linewidth=1, facecolors='none');
    ax.set_xlim(xlim)
    ax.set_ylim(ylim)

X, y = make_blobs(n_samples=100, centers=2,
      random_state=0, cluster_std=0.8)
      
fig, ax = plt.subplots(1, 2, figsize=(16, 6))
fig.subplots_adjust(left=0.0625, right=0.95, wspace=0.1)

for axi, C in zip(ax, [10.0, 0.1]):
    model = SVC(kernel='linear', C=C).fit(X, y)
    axi.scatter(X[:, 0], X[:, 1], c=y, s=50, cmap='autumn')
    plot_svc_decision_function(model, axi)
    axi.scatter(model.support_vectors_[:, 0],
            model.support_vectors_[:, 1],
            s=300, lw=1, facecolors='none');
    axi.set_title('C = {0:.1f}'.format(C), size=14)
plt.show()