TPCV1+2+3+4.ipynb - Colaboratory
TPCV1+2+3+4.ipynb - Colaboratory
ipynb - Colaboratory
DOUAA KHILA 3 ID
TP1
requirments
import cv2
import os
import imageio
import matplotlib.pyplot as plt
import random
import shutil
import pandas as pd
import numpy as np
from fastai.vision.all import *
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
Visualize pictures
path = "/content/drive/MyDrive/tpcv/Db2_b"
images = []
def display_images(images):
for i, img in enumerate(images):
plt.subplot(1, len(images), i+1)
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.axis('off')
plt.show()
# Display images
display_images(images[1:6])
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 1/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
# Specify the path where you want to save the split dataset
output_folder = "/content/drive/MyDrive/tpcv/data"
# Define the desired split ratio (e.g., 80% train, 20% test)
split_ratio = 0.8 # 80% train, 20% test
# List all files in the input folder (assumes all files are images)
all_files = os.listdir(input_folder)
os.makedirs(train_folder, exist_ok=True)
os.makedirs(test_folder, exist_ok=True)
dataset summary
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 2/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import os
import matplotlib.pyplot as plt
from PIL import Image
plt.subplot(1, 2, 2)
plt.hist(image_heights, bins=20, edgecolor='k', alpha=0.7)
plt.title("Image Heights")
plt.xlabel("Height (pixels)")
plt.ylabel("Frequency")
plt.tight_layout()
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 3/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
filename label
0 101_6 1
1 107_4 1
2 105_5 1
3 107_7 1
4 109_1 1
filename label
0 107_3 1
1 106_2 1
2 101_2 1
3 105_7 1
4 106_5 1
print('train:', len(train))
print('test:', len(test))
train: 80
test: 79
Binarization of pictures
import os
import cv2
import matplotlib.pyplot as plt
threshold_value = 128
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 4/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import os
import cv2
import matplotlib.pyplot as plt
# Binarize the image using your function (replace with your actual binarization function)
binary_image = convert_to_binary(image)
# Display the original and binarized images for the first three examples
for i in range(min(3, len(processed_images))):
original_image = cv2.cvtColor(processed_images[i][0], cv2.COLOR_BGR2RGB)
binary_image = processed_images[i][1]
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
plt.title('Before Binarization')
plt.imshow(original_image)
plt.axis('off')
plt.subplot(1, 2, 2)
plt.title('After Binarization')
plt.imshow(binary_image, cmap='gray')
plt.axis('off')
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 5/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
IMG_PATH = "/content/drive/MyDrive/tpcv/data/train/101_7.tif"
imgArray = cv2.imread(IMG_PATH)
plt.imshow(imgArray)
plt.show()
imgArray.shape
(364, 256, 3)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 6/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
np.random.seed(42)
# data loader
data = ImageDataLoaders.from_folder(
path,
train=".",
valid_pct=0.2,
item_tfms=RandomResizedCrop(512, min_scale=0.75),
bs=32,
batch_tfms=[*aug_transforms(size=256, max_warp=0), Normalize.from_stats(*imagenet_stats)],
num_workers=0
)
data.show_batch(nrows=3, figsize=(7,8))
plt.subplots(figsize=(15,10))
plt.imshow(convertedArray)
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 7/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 8/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(15, 4))
plt.tight_layout()
plt.show()
plt.xlabel("X")
plt.legend()
plt.show()
TP2
Filtres
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 9/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
pathB = "/content/drive/MyDrive/tpcv/BONNE"
imagesB = []
import cv2
import os
pathM = "/content/drive/MyDrive/tpcv/MAUVAISE"
imagesM = []
import cv2
import os
pathM = "/content/drive/MyDrive/tpcv/MAUVAISE"
imagesM = []
Mauvais images
display_images(imagesM)
Bonnes images
display_images(imagesB)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 10/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
filtre moyenneur
(5,5)
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 11/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
(3,3)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 12/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
Filtre GAUSSIAN
(5,5) 0
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 13/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
(5,5) 0.25
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 14/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
(5,5) 3.5
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 15/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
(5,5) 15
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 16/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as pl
filtered_images=[]
# Parcourez tous les fichiers d'images dans le dossier
for nom_fichier in os.listdir(pathM):
chemin_image = os.path.join(pathM, nom_fichier)
# Vérifiez si le fichier est une image (par exemple, au format JPG ou PNG)
if nom_fichier.lower().endswith((".tif")):
# Chargez l'image
image = cv2.imread(chemin_image)
plt.show()
bilateral
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 17/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 19/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
Filtre MEDIAN
noyau 5
import cv2
import os
import matplotlib.pyplot as plt
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 20/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
plt.show()
noyau 7
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 21/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
noyau 3
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 22/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 23/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 24/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import matplotlib.pyplot as plt
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 25/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import os
import numpy as np
import matplotlib.pyplot as plt
plt.show()
TRANSFORMATION DE FOURIER
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 26/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
plt.subplot(122)
plt.imshow(magnitude_spectrum, cmap='gray')
plt.title("Magnitude Spectrum (Fourier Transform)")
plt.axis('off')
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 27/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
rows, cols = image_gray.shape
crow,ccol = rows//2 , cols//2
f = np.fft.fft2(image_gray)
fshift = np.fft.fftshift(f)
fshift[crow-30:crow+31, ccol-30:ccol+31] = 0
f_ishift = np.fft.ifftshift(fshift)
img_back = np.fft.ifft2(f_ishift)
img_back = np.real(img_back)
plt.subplot(131),plt.imshow(imaget, cmap = 'gray')
plt.title('Input Image'), plt.xticks([]), plt.yticks([])
plt.subplot(132),plt.imshow(img_back, cmap = 'gray')
plt.title('Image after HPF'), plt.xticks([]), plt.yticks([])
plt.subplot(133),plt.imshow(img_back)
plt.title('Result in JET'), plt.xticks([]), plt.yticks([])
plt.show()
import numpy as np
import matplotlib.pyplot as plt
from skimage.io import imread, imshow
from skimage.color import rgb2hsv, rgb2gray, rgb2yuv
from skimage import color, exposure, transform
from skimage.exposure import equalize_hist
dark_image_grey = rgb2gray(imaget)
plt.figure(num=None, figsize=(8, 6), dpi=80)
plt.imshow(dark_image_grey, cmap='gray');
dark_image_grey_fourier = np.fft.fftshift(np.fft.fft2(dark_image_grey))
plt.figure(num=None, figsize=(8, 6), dpi=80)
plt.imshow(np.log(abs(dark_image_grey_fourier)), cmap='gray');
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 28/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
fourier_masker_ver(imaget, 1)
passe haut
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 29/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Chargez l'image
image = cv2.imread(path_to_image)
# Créez un masque avec des valeurs 1 dans la région haute fréquence et 0 dans la région basse fréquence
mask = np.ones((rows, cols), np.uint8)
mask[crow - 30:crow + 30, ccol - 30:ccol + 30] = 0
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 30/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Chargez l'image
image = cv2.imread(path_to_image)
# Convertissez l'image en niveaux de gris si elle n'est pas déjà en niveaux de gris
if len(image.shape) == 3:
image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
else:
image_gray = image
# Appliquez le filtre passe-bas en supprimant les hautes fréquences (en gardant seulement les basses fréquences)
rows, cols = image_gray.shape
crow, ccol = rows // 2, cols // 2 # Centre du spectre
mask = np.zeros((rows, cols), np.uint8)
mask[crow - 30:crow + 30, ccol - 30:ccol + 30] = 1 # Gardez une région carrée de basses fréquences
plt.subplot(122)
plt.imshow(image_filtered, cmap='gray')
plt.title("Image Filtrée (Passe-Bas)")
plt.axis('off')
plt.show()
filtre laplacien
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 31/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Chargez l'image
image = cv2.imread(path_to_image)
# Convertissez l'image en niveaux de gris si elle n'est pas déjà en niveaux de gris
if len(image.shape) == 3:
image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
else:
image_gray = image
plt.subplot(122)
plt.imshow(image_laplacian, cmap='gray')
plt.title("Image Filtrée (Laplacien)")
plt.axis('off')
plt.show()
filtre gaussian
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 32/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Chargez l'image
image = cv2.imread(path_to_image)
# Convertissez l'image en niveaux de gris si elle n'est pas déjà en niveaux de gris
if len(image.shape) == 3:
image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
else:
image_gray = image
plt.subplot(122)
plt.imshow(image_filtered, cmap='gray')
plt.title("Image Filtrée (Gaussien)")
plt.axis('off')
plt.show()
compare filters
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 33/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Define the threshold values for the high-pass and low-pass filters
high_pass_threshold = 10 # Adjust the threshold for the high-pass filter
low_pass_threshold = 50 # Adjust the threshold for the low-pass filter
plt.subplot(131)
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
plt.title("Original Image")
plt.axis('off')
plt.subplot(132)
plt.imshow(image_high_pass, cmap='gray')
plt.title("High-Pass Filtered Image")
plt.axis('off')
plt.subplot(133)
plt.imshow(image_low_pass, cmap='gray')
plt.title("Low-Pass Filtered Image")
plt.axis('off')
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 34/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
seuillage
import numpy as np
import matplotlib.pyplot as plt
# Assuming you have loaded your image into the variable image
THRESHOLD1 = image.mean()
vertical_robert_filter = np.array([[1,0],[0,-1]])
horizontal_robert_filter = np.array([[0,1],[-1,0]])
vertical_sobel_filter = np.array([[-1,0,1],[-2,0,2],[-1,0,1]])
horizontal_sobel_filter = np.array([[-1,-2,-1],[0,0,0],[1,2,1]])
vertical_prewitt_filter = np.array([[-1,0,1],[-1,0,1],[-1,0,1]])
horizontal_prewitt_filter = np.array([[-1,-1,-1],[0,0,0],[1,1,1]])
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 35/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
[-1 0 1]
[-1 0 1]]
horizontal prewitt filter:
[[-1 -1 -1]
[ 0 0 0]
[ 1 1 1]]
convolved_img1 = convolve(gray_img,vertical_robert_filter)
convolved_img1 = convolve(convolved_img1,horizontal_robert_filter)
convolved_img2 = convolve(gray_img,vertical_sobel_filter)
convolved_img2 = convolve(convolved_img2,horizontal_sobel_filter)
convolved_img3 = convolve(gray_img,vertical_prewitt_filter )
convolved_img3 = convolve(gray_img,horizontal_prewitt_filter )
<ipython-input-214-b9823d611340>:1: DeprecationWarning: Please use `convolve` from the `scipy.ndimage` namespace, the `scipy.ndimag
from scipy.ndimage.filters import convolve
TP3
segmentation
def imageHist(image):
_, axis = plt.subplots(ncols=2, figsize=(12, 3))
if (image.ndim == 2):
# Grascale Image
axis[0].imshow(image, cmap=plt.get_cmap('gray'))
axis[1].set_title('Histogram')
axis[0].set_title('Grayscale Image')
hist = exposure.histogram(image)
axis[1].plot(hist[0])
else:
# Color image
axis[0].imshow(image, cmap='gray')
axis[1].set_title('Histogram')
axis[0].set_title('Colored Image')
rgbcolors = ['red', 'green', 'blue']
for i, mycolor in enumerate(rgbcolors):
axis[1].plot(exposure.histogram(image[...,i])[0], color=mycolor)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 36/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
imageHist(image)
import glob
fig, ax = plt.subplots()
plt.imshow(gray_shapes, cmap="gray")
<matplotlib.image.AxesImage at 0x7fe55449f4c0>
fig, ax = plt.subplots()
plt.plot(bin_edges[0:-1], histogram)
plt.title("Grayscale Histogram")
plt.xlabel("grayscale value")
plt.ylabel("pixels")
plt.xlim(0, 1.0)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 37/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
(0.0, 1.0)
fig, ax = plt.subplots()
plt.imshow(binary_mask, cmap="gray")
<matplotlib.image.AxesImage at 0x7fe55437ecb0>
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 38/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
seuillage automatique
fig, ax = plt.subplots()
plt.imshow(otsuimg , cmap="gray")
<matplotlib.image.AxesImage at 0x7fe54cbfd780>
(0.0, 1.0)
seuillage suavola
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 39/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import numpy as np
import cv2
from skimage import io
from skimage.filters import threshold_sauvola
from skimage.color import rgb2gray
from google.colab.patches import cv2_imshow
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 40/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
suillage wolf
import numpy as np
import cv2
from skimage import io
from skimage.filters import threshold_local
from skimage.color import rgb2gray
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 41/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
(0.0, 1.0)
suillage niblack
import numpy as np
import cv2
from skimage import io
from skimage.filters import threshold_niblack
from skimage.color import rgb2gray
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 42/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
(0.0, 1.0)
k-means
k=3
import cv2
import numpy as np
from sklearn.cluster import KMeans
/usr/local/lib/python3.10/dist-packages/sklearn/cluster/_kmeans.py:870: FutureWarning
warnings.warn(
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 43/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
from sklearn.cluster import KMeans
/usr/local/lib/python3.10/dist-packages/sklearn/cluster/_kmeans.py:870: FutureWarning
warnings.warn(
k=7
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 44/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
k=22
Gradiant
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 45/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
plt.subplot(2, 3, 1)
plt.imshow(image, cmap='gray')
plt.title('Original Image')
plt.axis('off')
plt.subplot(2, 3, 2)
plt.imshow(np.abs(sobel_x), cmap='gray')
plt.title('Sobel X')
plt.axis('off')
plt.subplot(2, 3, 3)
plt.imshow(np.abs(sobel_y), cmap='gray')
plt.title('Sobel Y')
plt.axis('off')
plt.subplot(2, 3, 4)
plt.imshow(np.abs(prewitt_x), cmap='gray')
plt.title('Prewitt X')
plt.axis('off')
plt.subplot(2, 3, 5)
plt.imshow(np.abs(prewitt_y), cmap='gray')
plt.title('Prewitt Y')
plt.axis('off')
plt.subplot(2, 3, 6)
plt.imshow(canny, cmap='gray')
plt.title('Canny')
plt.axis('off')
plt.tight_layout()
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 46/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
# Erosion
erosion = cv2.erode(canny, kernel, iterations=1)
# Dilation
dilation = cv2.dilate(canny, kernel, iterations=1)
#Detect contours
contours, hierarchy = cv2.findContours(dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
print(contours[0])
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 47/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
[[[138 356]]
[[137 357]]
[[136 357]]
[[135 357]]
[[134 357]]
[[134 358]]
[[134
# Detect 359]]
contours
contours, hierarchy = cv2.findContours(dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
[[134 360]]
print(contours[0])
[[134 361]]
[[[138 356]]
[[135 361]]
[[137 361]]
[[136 357]]
[[136 360]]
[[137 357]]
[[135 360]]
[[138 357]]
[[134 360]]
[[139 357]]
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 48/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
[[140
[[134 360]]
358]]
[[141
[[134 360]]
359]]
[[142
[[134 361]]
360]]
[[143
[[134 361]]
361]]
[[144
[[135 361]]
361]]
[[144
[[136 360]]
361]]
[[144
[[137 359]]
360]]
[[144
[[138 358]]
360]]
[[144
[[139 357]]
360]]
[[143
[[140 357]]
360]]
[[142
[[141 356]]
360]]
[[141
[[142 356]]
361]]
[[140
[[143 356]]
361]]
[[139
[[144 356]]]
361]]
[[144 360]]
[[144 359]]
[[144 358]]
[[144 357]]
[[143 357]]
[[142 356]]
[[141 356]]
[[140 356]]
[[139 356]]]
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 49/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
from google.colab.patches import cv2_imshow
import numpy as np
True
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 50/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
#fingerprint
import cv2
from PIL import ImageOps
from google.colab.patches import cv2_imshow
import matplotlib.pyplot as plt
import skimage
from skimage import measure, morphology
from skimage.color import label2rgb
from skimage.measure import regionprops
from PIL import Image, ImageDraw, ImageOps
img = cv2.imread('/content/final_image.png', 0)
img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY)[1]
# Extract Blobs
blobs = img > img.mean()
blobs_labels = measure.label(blobs, background=1)
image_label_overlay = label2rgb(blobs_labels, image=img,bg_label=0)
total_area = 0
counter = 0
average = 0.0
for region in regionprops(blobs_labels):
if region.area >70:
total_area = total_area + region.area
counter = counter + 1
# Threshold
average = (total_area/counter)
a4_constant = ((average/100.0)*250.0)+100
b = morphology.remove_small_objects(blobs_labels, a4_constant)
plt.imsave('pre_version.png', b)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 51/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
#signature
import cv2
from google.colab.patches import cv2_imshow
import matplotlib.pyplot as plt
import skimage
from skimage import measure, morphology
from skimage.color import label2rgb
from skimage.measure import regionprops
img = cv2.imread('/content/final_image.png', 0)
#cv2_imshow(img)
img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY)[1]
# Extract Blobs
blobs = img > img.mean()
blobs_labels = measure.label(blobs, background=1)
image_label_overlay = label2rgb(blobs_labels, image=img)
b = morphology.remove_small_objects(blobs_labels, 230)
plt.imsave('pre_version.png', b)
# read the pre-version
img2 = cv2.imread('pre_version.png', 0)
hsv = cv2.cvtColor(img2, cv2.COLOR_GRAY2BGR)
lower = np.array([0, 0, 80])
upper = np.array([255, 255, 255])
mask = cv2.inRange(hsv, lower, upper)
mask = cv2.threshold(mask, 0, 255, cv2.THRESH_BINARY_INV)[1]
cv2_imshow(mask)
plt.imsave('/content/signature.png',mask)
TP4
cv2_imshow( thresh2)
cv2.waitKey(0)
cv2.destroyAllWindows()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 52/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
cv2_imshow( result)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 53/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
cv2_imshow( result)
# Restore the image by filling the non-fingerprint regions with a specified value (e.g., 255 for white)
restored_img = np.full_like(thresh2, 255) # Create a white image with the same size as the original
cv2_imshow( restored_img)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 54/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
# Restore the image by filling the non-fingerprint regions with a specified value (e.g., 255 for white)
restored_img = np.full_like(thresh2, 255) # Create a white image with the same size as the original
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 55/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import fingerprint_enhancer # Load the library
img = cv2.imread('/content/drive/MyDrive/tpcv/BONNE/105_5.tif')
out = fingerprint_enhancer.enhance_Fingerprint(img) # enhance the fingerprint image
cv2_imshow( out)
Detection bifurcations
import cv2
import numpy as np
kernel_bifurcation = np.array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0]
], dtype=np.uint8)
for x, y in bifurcation_coords:
cv2.circle(out, (y, x), 3, (0, 255, 0), -1)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 56/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
282 64
[[ 96 205]
[ 96 207]
[117 218]
detection minutiaes
[120 194]
[161 88]
[173 133]
import cv2
[234 90]
from skimage
[261 69]import morphology
import [282
numpy 64]]
as np
import
[]os
def detect_minutiae(image_path):
if not os.path.isfile(image_path):
raise FileNotFoundError(f"Image not found at path: {image_path}")
if img is None:
raise Exception(f"Failed to load image at path: {image_path}")
return minutiae
minutiae = detect_minutiae(image_path)
minutiae = detect_minutiae(image_path)
from google.colab.patches import cv2_imshow
# Load the original image
original_img = cv2.imread(image_path)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 57/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
Feature extraction
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 58/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
def display_histogram(image_path):
img = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
hist = cv2.calcHist([img], [0], None, [256], [0, 256])
plt.plot(hist)
plt.xlabel('Pixel Value')
plt.ylabel('Frequency')
plt.title('Histogram')
plt.show()
input_Bonne_path = "/content/drive/MyDrive/tpcv/BONNE"
input_Mauv_path = "/content/drive/MyDrive/tpcv/MAUVAISE"
output_Bonne_path_fourier2 = "/content/drive/MyDrive/tpcv/seg_Bonne"
output_Mauv_path_fourier2 = "/content/drive/MyDrive/tpcv/seg_Mauv"
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 59/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
import cv2
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import skew, kurtosis
import os
def calculate_orientation(image):
# Calculate gradient using Sobel operators
gradient_x = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=3)
gradient_y = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=3)
return orientation_map
def calculate_histogram(orientation_map):
# Flatten the orientation map
flat_orientation = orientation_map.flatten()
# Calculate histogram
hist, bins = np.histogram(flat_orientation, bins=180, range=[-np.pi, np.pi])
def calculate_first_order_statistics(hist):
# Calculate first-order statistics
mean = np.mean(hist)
variance = np.var(hist)
skewness = skew(hist)
kurt = kurtosis(hist)
def plot_orientation_histogram(orientation_map):
# Calculate histogram
hist, bins = calculate_histogram(orientation_map)
mean, variance, skewness, kurt, moments = calculate_first_order_statistics(hist)
print("F1 : ",[mean, variance, skewness, kurt, moments])
print("**********")
print(f"Mean: {mean}")
print(f"Variance: {variance}")
print(f"Skewness: {skewness}")
print(f"Kurtosis: {kurt}")
print(f"Central Moments: {moments}")
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 60/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
Mean: 517.6888888888889
Variance: 60996.480987654315
Skewness: 2.8082130855596765
Kurtosis: 11.683704042814195
Central Moments: [2.021099337273174e-14, 60996.480987654315, 42304564.56792319, 54631758924.57806]
def calculate_texture_features(image):
# Convert the image to grayscale
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 61/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
Contrast: 4127.718636271256
Homogeneity: 0.003354058374801654
Correlation: 0.02166119875189265
Energy: 83951.09746308379
# Apply skeletonization
skeleton = morphology.skeletonize(binary_img // 255)
import os
import cv2
import matplotlib.pyplot as plt
def load_images_from_folder(folder_path):
images = []
for filename in os.listdir(folder_path):
img_path = os.path.join(folder_path, filename)
img = cv2.imread(img_path)
if img is not None:
images.append(img)
return images
plt.show()
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 62/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
f3 vecteur :Calcul des Primitives Géométriques et de Texture pour les Images Squelettisées
def calculate_geometric_primitives(contour):
# Calculate area and perimeter
area = cv2.contourArea(contour)
perimeter = cv2.arcLength(contour, True)
# Calculate solidity
hull = cv2.convexHull(contour)
hull_area = cv2.contourArea(hull)
solidity = float(area) / hull_area
# Calculate extent
extent = float(area) / (w * h)
def calculate_texture_primitives(image):
# Convert the image to grayscale
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Calculate uniformity
uniformity = np.std(gray_image)
# Find contours
contours, _ = cv2.findContours(gray_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 63/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
Étendue : 0.13018384401114205
lissage: 129.09812843406593
uniformite: 72.52514896233994
def calculate_first_order_primitives(image):
# Calculate Mean, Variance, Skewness, Kurtosis, and Moments
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
mean_value = np.mean(gray_image)
variance_value = np.var(gray_image)
skewness_value = np.mean((gray_image - mean_value) ** 3) / np.power(variance_value, 1.5)
kurtosis_value = np.mean((gray_image - mean_value) ** 4) / np.power(variance_value, 2) - 3
moments = cv2.moments(gray_image)
def calculate_geometric_texture_primitives(image):
# Calculate geometric primitives: Area, Perimeter, Aspect Ratio, Solidity, Extent
# Calculate texture primitives: Smoothness, Uniformity
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Find contours
contours, _ = cv2.findContours(gray_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
area = cv2.contourArea(largest_contour)
perimeter = cv2.arcLength(largest_contour, True)
x, y, w, h = cv2.boundingRect(largest_contour)
aspect_ratio = w / float(h)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 64/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 65/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
y = np.random.randint(2, size=len(X)) # Example labels (replace with your actual labels)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 66/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
from sklearn.decomposition import PCA
from skimage import io
import os
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
# Apply PCA
pca = PCA(n_components=10) # Set the number of components you want to keep (adjust as needed)
X_pca = pca.fit_transform(X_flatten)
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
y = np.random.randint(2, size=len(X)) # Example labels (replace with your actual labels)
Accuracy: 68.75 %
/usr/local/lib/python3.10/dist-packages/sklearn/feature_selection/_univariate_selection.py:112: UserWarning: Features [ 255 511
3327 3583 3839 4095 4351 4607 4863 5119 5375 5631 5887 6143
6399 6655 6911 7167 7423 7679 7935 8191 8447 8703 8959 9215
9471 9727 9983 10239 10495 10751 11007 11263 11519 11775 12031 12287
12543 12799 13055 13311 13567 13823 14079 14335 14591 14847 15103 15359
15615 15871 16127 16383 16639 16895 17151 17407 17663 17919 18175 18431
18687 18943 19199 19455 19711 19967 20223 20479 20735 20991 21247 21503
21759 22015 22271 22527 22783 23039 23295 23551 23807 24063 24319 24575
24831 25087 25343 25599 25855 26111 26367 26623 26879 27135 27391 27647
27903 28159 28415 28671 28927 29183 29439 29695 29951 30207 30463 30719
30975 31231 31487 31743 31999 32255 32511 32767 33023 33279 33535 33791
34047 34303 34559 34815 35071 35327 35583 35839 36095 36351 36607 36863
37119 37375 37631 37887 38143 38399 38655 38911 39167 39423 39679 39935
40191 40447 40703 40959 41215 41471 41727 41983 42239 42495 42751 43007
43263 43519 43775 44031 44287 44543 44799 45055 45311 45567 45823 46079
46335 46591 46719 46847 47103 47359 47615 47871 48127 48383 48639 48895
49151 49407 49663 49919 50175 50431 50687 50943 51199 51455 51711 51967
52223 52479 52735 52991 53247 53503 53759 54015 54271 54527 54783 55039
55295 55551 55807 56063 56319 56575 56831 57087 57343 57599 57855 58111
58367 58623 58879 59135 59391 59647 59903 60159 60415 60671 60927 61183
61439 61695 61951 62207 62463 62719 62975 63231 63487 63743 63999 64255
64511 64767 65023 65279 65535 65791 66047 66303 66559 66815 67071 67327
67583 67839 68095 68351 68607 68863 69119 69375 69631 69887 70143 70399
70655 70911 71167 71423 71679 71935 72191 72447 72703 72959 73215 73471
73727 73983 74239 74495 74751 75007 75263 75519 75775 76031 76287 76543
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 67/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
76799 77055 77311 77567 77823 78079 78335 78591 78847 79103 79359 79615
79871 80127 80383 80639 80895 81151 81407 81663 81919 82175 82431 82615
82687 82943 83199 83455 83711 83967 84223 84479 84735 84991 85247 85503
85759 86015 86271 86527 86783 87039 87295 87551 87807 88063 88319 88575
88831 89087 89343 89599 89855 90111 90367 90623 90879 91135 91391 91647
91903 92159 92415 92671 92927 93183] are constant.
warnings.warn("Features %s are constant." % constant_features_idx, UserWarning)
/usr/local/lib/python3.10/dist-packages/sklearn/feature_selection/_univariate_selection.py:113: RuntimeWarning: invalid value encou
f = msb / msw
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
y = np.random.randint(2, size=len(X)) # Example labels (replace with your actual labels)
# Apply PCA
pca = PCA(n_components=10) # Set the number of components you want to keep (adjust as needed)
X_pca = pca.fit_transform(X_flatten)
Accuracy: 37.5 %
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 68/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from sklearn.feature_selection import SelectKBest, f_classif
from skimage import io
import os
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
y = np.random.randint(2, size=len(X)) # Example labels (replace with your actual labels)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 69/70
11/25/23, 7:40 PM TPCV1+2+3+4.ipynb - Colaboratory
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from sklearn.feature_selection import SelectKBest, f_classif
from skimage import io
import os
# Load images and labels (replace this with your actual data loading code)
image_files = [os.path.join(image_folder_path, file) for file in os.listdir(image_folder_path)]
X = np.array([io.imread(file) for file in image_files])
y = np.random.randint(2, size=len(X)) # Example labels (replace with your actual labels)
https://colab.research.google.com/drive/1fW5yI8419PqZCHz7Ke2CB4cA5o6I_S8s#scrollTo=sJ--gN94v-e8&printMode=true 70/70