BVH
BVH
BVH
import pandas as pd
import tensorflow as tf
import sklearn.metrics as metrics
from sklearn.preprocessing import normalize
# for visualization
import matplotlib.pyplot as plt
import seaborn as sns
import datetime as dt
%matplotlib inline
# Import Warnings
import warnings
warnings.simplefilter(action="ignore")
=============================================================
def show_data(data):
sns.pairplot(data,hue="diagnosis")
plt.show()
=============================================================
def preprocessing_data():
#data collection
data = pd.read_csv('Breast.csv') # Load the Iris dataset into a Pandas
DataFrame
print(data.head())
#print(data)
print(data.describe())
#data cleaning
data.drop('id',axis=1,inplace=True) #dropping the id column as it is unecessary
data.drop('Unnamed: 32',axis=1,inplace=True)
# print(data)
print(data.isna().sum())# Checking for missing values
#data.dropna(inplace=True)
#show data
#show_data(data)
X_train=
train[['radius_mean','texture_mean','perimeter_mean','area_mean','smoothness_mean',
'compactness_mean','concavity_mean','concave points_mean',
'symmetry_mean','fractal_dimension_mean','radius_se','texture_se','perimeter_se','a
rea_se','smoothness_se','compactness_se','concavity_se','concave
points_se','symmetry_se','fractal_dimension_se','radius_worst','texture_worst','per
imeter_worst','area_worst','smoothness_worst','compactness_worst','concavity_worst'
,'concave points_worst','symmetry_worst','fractal_dimension_worst']] # taking the
training data features
y_train=train.diagnosis
# output of our training data
X_test=
test[['radius_mean','texture_mean','perimeter_mean','area_mean','smoothness_mean','
compactness_mean','concavity_mean','concave points_mean',
'symmetry_mean','fractal_dimension_mean','radius_se','texture_se','perimeter_se','a
rea_se','smoothness_se','compactness_se','concavity_se','concave
points_se','symmetry_se','fractal_dimension_se','radius_worst','texture_worst','per
imeter_worst','area_worst','smoothness_worst','compactness_worst','concavity_worst'
,'concave points_worst','symmetry_worst','fractal_dimension_worst']] # taking the
training data features # taking test data features
y_test=test.diagnosis
#data transformation
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# #data transformation
svc.fit(X_train,y_train)