Skip to content

Instantly share code, notes, and snippets.

View ParthNipunDave's full-sized avatar

Parth Nipun Dave ParthNipunDave

  • NA
  • Ahmedabad
View GitHub Profile
print('Number of unique values --> ',data['BloodPressure'].nunique())
sns.displot(data['BloodPressure'])
sns.boxplot(data=data,y='BloodPressure')
sns.boxplot(data=data,x='Outcome',y='BloodPressure')
print('Number of unique values --> ',data['Glucose'].nunique())
sns.displot(data['Glucose'])
sns.boxplot(data=data,y='Glucose')
sns.boxplot(data=data,x='Outcome',y='Glucose')
print('Number of unique values --> ',data['Pregnancies'].nunique())
sns.displot(data['Pregnancies'])
sns.boxplot(data=data,y='Pregnancies')
sns.boxplot(data=data,x='Outcome',y='Pregnancies')
data=pd.read_csv('../input/pima-indians-diabetes-database/diabetes.csv')
print(data)
print(data.describe())
print(data.info())
print('Number of duplicate values --> ',data.duplicated().sum())
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report, recall_score
from imblearn.over_sampling import SMOTE
from keras.layers import Dense,Dropout
from keras.models import Sequential
history=model.fit(train_data,validation_data=valid_data,steps_per_epoch=len(train_data),epochs=10)
model=ResNet50(input_shape=(255,255,3),include_top=False,weights='imagenet',classes=80)
for layer in model.layers:
layer.trainable=False
model=Sequential()
model.add(ResNet50(include_top=False,weights='imagenet',pooling='max'))
model.add(Dense(80,activation='softmax'))
model.compile(optimizer=tf.keras.optimizers.SGD(lr=0.0001),loss='categorical_crossentropy',metrics=['acc'])
model.summary()
data=ImageDataGenerator(validation_split=0.3,zoom_range=0.3,rescale=1./255,horizontal_flip=True)
train_data=data.flow_from_directory('../input/animals-detection-images-dataset/train',target_size=(255,255),subset='training',shuffle=True,batch_size=8)
valid_data=data.flow_from_directory('../input/animals-detection-images-dataset/train',target_size=(255,255),subset='validation',shuffle=True,batch_size=8)
batch_size=8
from keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.applications import ResNet50
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dense, Flatten, GlobalAveragePooling2D
from tensorflow.keras.applications import ResNet50
import tensorflow as tf
import matplotlib.pyplot as plt
plt.plot(resnet_history.history["loss"],label="train")
plt.plot(resnet_history.history["val_loss"],label="val")
plt.title("Training Loss and Validation Loss")
plt.legend()