Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
E
eds_22_23
Manage
Activity
Members
Labels
Plan
Issues
0
Issue boards
Milestones
Wiki
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Deploy
Package Registry
Model registry
Operate
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Antoine Lucas
eds_22_23
Commits
f57b4084
Commit
f57b4084
authored
2 years ago
by
Antoine Lucas
⛷️
Browse files
Options
Downloads
Patches
Plain Diff
Upload New File
parent
647e8243
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
Labs/Lab_3_LiDAR_ML_Segmentation/libLab3_Lidar.py
+461
-0
461 additions, 0 deletions
Labs/Lab_3_LiDAR_ML_Segmentation/libLab3_Lidar.py
with
461 additions
and
0 deletions
Labs/Lab_3_LiDAR_ML_Segmentation/libLab3_Lidar.py
0 → 100644
+
461
−
0
View file @
f57b4084
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 18 15:09:57 2021
@authors: Grégory Sainton & Antoine Lucas
@purpose: Lib linked to the Lab3 notebook EDS_3D_LiDAR_Claiff_ML_*.ipynb
It contains useful function to run the lab, to plot several sets of data
"""
import
matplotlib.pyplot
as
plt
from
mpl_toolkits.mplot3d
import
Axes3D
import
numpy
as
np
def
readData
(
filename
):
"""
Function to read the landscape data
INPUT:
------
@filename: string
OUTPUT:
-------
x, y, z: numpy arrays
"""
import
numpy
as
np
import
os
if
os
.
path
.
isfile
(
filename
):
f
=
open
(
filename
)
floor
=
np
.
genfromtxt
(
filename
)
x
=
floor
[:,
0
]
y
=
floor
[:,
1
]
z
=
floor
[:,
2
]
f
.
close
()
return
x
,
y
,
z
else
:
print
(
f
'
File
{
filename
}
is not accessible
'
)
def
plot_figs
(
fig_num
,
elev
,
azim
,
dx
,
dy
,
dz
,
density
=
2
):
"""
Draw (x, y z) coordinates into a 3 dimensional scatter plot.
INPUT:
------
@fig_num: int - Figure number
@elev : float - Elevation
@azim : float - Azimuth
@dx, @dy, dz : numpy arrays - data to plot
@density: int - optionnal - factor to decimate the data on plot
OUTPUT:
-------
None
"""
# Subsample input data with a factor "density"
X
=
dx
[::
density
]
Y
=
dy
[::
density
]
Z
=
dz
[::
density
]
# Plot the data in 3D
fig
=
plt
.
figure
(
figsize
=
(
10
,
8
))
plt
.
clf
()
ax
=
Axes3D
(
fig
,
rect
=
[
0
,
0
,
.
95
,
1
],
elev
=
elev
,
azim
=
azim
)
ax
.
scatter
(
dx
[::
density
],
dy
[::
density
],
dz
[::
density
],
c
=
dz
[::
density
],
marker
=
'
+
'
,
alpha
=
.
4
)
# OPTIONNAL - BUT NICE TO IMPROVE YOUR POINT OF VIEW
# Create a blanck cubic bounding box to simulate equal aspect ratio in 3D plots
max_range
=
np
.
array
([
X
.
max
()
-
X
.
min
(),
Y
.
max
()
-
Y
.
min
(),
Z
.
max
()
-
Z
.
min
()]).
max
()
Xb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
0
].
flatten
()
+
0.5
*
(
X
.
max
()
+
X
.
min
())
Yb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
1
].
flatten
()
+
0.5
*
(
Y
.
max
()
+
Y
.
min
())
Zb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
2
].
flatten
()
+
0.5
*
(
Z
.
max
()
+
Z
.
min
())
for
xb
,
yb
,
zb
in
zip
(
Xb
,
Yb
,
Zb
):
ax
.
plot
([
xb
],
[
yb
],
[
zb
],
'
w
'
)
plt
.
show
()
def
getEigenvaluePCA
(
a
,
b
,
c
,
dim
,
decim
=
1
,
Verbose
=
False
):
"""
Function to estimate the eigenvalues of the PCA
INPUT:
------
@a, @b, @c: numpy arrays with the coordinates
@dim : float - diameter of interest of the neighborhood ball
decim : integer - decimation factor to lower the time processing.
"""
from
sklearn.decomposition
import
PCA
from
sklearn
import
preprocessing
from
scipy
import
spatial
comp
=
None
Y
=
None
return
comp
,
Y
def
estimateTernaryCoord
(
comp
):
"""
Function which estimate the ternary coordinates
from the list of eigenvalues.
----
INPUT:
@comp: np array - list of eigenvalue for every
points with a given dimension
OUTPUT:
-------
@X, @Y, @pdf
"""
# Conversion towards a,b,c space
# Consider a point of coordinates (p1, p2) -> Red point on the graphic above.
# What is the projection of this point on the line (p1p2) ?
# Estimate distances along p1-p2 axis
# Estimate a,b and c coordinates from the results just above
# Norm factor v: the sum of the eigenvalue (p1, p2 and p3 is equal to 1)
# Conversion towards ternary graph
# Consider a point of coordinates (p1, p2) -> Red point on the graphic above.
# What is the projection of this point on the line (p1p2) ?
# We know that p1+p2+p3 = 1 -> p3 = 1-p1-p2
# To find the projection x_p1p2 : x_p1p2 = p1 + dx
# with dx = p3.cos(theta)
# with theta = angle of the slope (-pi/4)
# yp1p2 is derive from the fact that the slope is -1.
# Estimate distances along p1-p2 axis
# Estimate a,b and c coordinates from the results just above
# Norm factor: the sum of the eigenvalue (p1, p2 and p3 is equal to 1)
# Conversion towards ternary graph
X
=
None
Y
=
None
# Finally, let's compute density probability function for clarity
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.gaussian_kde.html
from
scipy.stats
import
gaussian_kde
xy
=
np
.
vstack
([
X
,
Y
])
pdf
=
gaussian_kde
(
xy
)(
xy
)
return
X
,
Y
,
pdf
def
density_scatter
(
x
,
y
,
ax
=
None
,
sort
=
True
,
bins
=
20
,
**
kwargs
):
"""
Function to plot a density plot from x,y data
----
"""
from
matplotlib.colors
import
Normalize
from
scipy.interpolate
import
interpn
import
matplotlib.tri
as
tri
if
ax
is
None
:
fig
,
ax
=
plt
.
subplots
()
data
,
x_e
,
y_e
=
np
.
histogram2d
(
x
,
y
,
bins
=
bins
,
density
=
True
)
z
=
interpn
(
(
0.5
*
(
x_e
[
1
:]
+
x_e
[:
-
1
])
,
0.5
*
(
y_e
[
1
:]
+
y_e
[:
-
1
])
)
,
data
,
np
.
vstack
([
x
,
y
]).
T
,
method
=
"
splinef2d
"
,
bounds_error
=
False
)
#To be sure to plot all data
z
[
np
.
where
(
np
.
isnan
(
z
))]
=
0.0
# Sort the points by density, so that the densest points are plotted last
if
sort
:
idx
=
z
.
argsort
()
x
,
y
,
z
=
x
[
idx
],
y
[
idx
],
z
[
idx
]
ax
.
scatter
(
x
,
y
,
c
=
z
,
s
=
0.5
)
norm
=
Normalize
(
vmin
=
np
.
min
(
z
),
vmax
=
np
.
max
(
z
))
# creating the TRI grid
corners
=
np
.
array
([[
0
,
0
],
[
1
,
0
],
[
0.5
,
np
.
sqrt
(
3
)
*
0.5
]])
triangle
=
tri
.
Triangulation
(
corners
[:,
0
],
corners
[:,
1
])
refiner
=
tri
.
UniformTriRefiner
(
triangle
)
trimesh
=
refiner
.
refine_triangulation
(
subdiv
=
4
)
# plotting the mesh
plt
.
triplot
(
trimesh
,
'
k-
'
,
linewidth
=
0.1
)
plt
.
axis
(
'
equal
'
)
plt
.
axis
(
'
off
'
)
plt
.
show
()
def
plotTernary
(
x
,
y
,
z
,
title
):
"""
Function to plot a ternary graph from list of ternary coordinates.
----
INPUT:
@x, @y, @z: numpy arrays - ternary coordinates
@title : string - title of the graph
----
OUTPUT:
None
"""
import
matplotlib.tri
as
tri
# Plot part
plt
.
figure
(
figsize
=
(
7
,
5
))
plt
.
clf
()
plt
.
scatter
(
x
,
y
,
c
=
z
,
s
=
0.5
)
# creating the TRI grid
corners
=
np
.
array
([[
0
,
0
],
[
1
,
0
],
[
0.5
,
np
.
sqrt
(
3
)
*
0.5
]])
triangle
=
tri
.
Triangulation
(
corners
[:,
0
],
corners
[:,
1
])
refiner
=
tri
.
UniformTriRefiner
(
triangle
)
trimesh
=
refiner
.
refine_triangulation
(
subdiv
=
4
)
# plotting the mesh
plt
.
triplot
(
trimesh
,
'
k-
'
,
linewidth
=
0.1
)
plt
.
title
(
title
)
plt
.
axis
(
'
equal
'
)
plt
.
axis
(
'
off
'
)
plt
.
show
()
def
plot_3dcladd
(
dx
,
dy
,
dz
,
y
,
density
,
fileout
):
X
=
dx
[::
density
]
Y
=
dy
[::
density
]
Z
=
dz
[::
density
]
elev
=
36
azim
=
-
144
# Plot the data in 3D
fig
=
plt
.
figure
(
figsize
=
(
10
,
8
))
plt
.
clf
()
ax
=
Axes3D
(
fig
,
rect
=
[
0
,
0
,
.
95
,
1
],
elev
=
elev
,
azim
=
azim
)
ax
.
scatter
(
dx
[::
density
],
dy
[::
density
],
dz
[::
density
],
c
=
y
,
marker
=
'
.
'
,
alpha
=
.
5
,
cmap
=
'
YlGn
'
)
# OPTIONNAL - BUT NICE TO IMPROVE YOUR POINT OF VIEW
# Create a blanck cubic bounding box to simulate equal aspect ratio in 3D plots
max_range
=
np
.
array
([
X
.
max
()
-
X
.
min
(),
Y
.
max
()
-
Y
.
min
(),
Z
.
max
()
-
Z
.
min
()]).
max
()
Xb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
0
].
flatten
()
+
0.5
*
(
X
.
max
()
+
X
.
min
())
Yb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
1
].
flatten
()
+
0.5
*
(
Y
.
max
()
+
Y
.
min
())
Zb
=
0.5
*
max_range
*
np
.
mgrid
[
-
1
:
2
:
2
,
-
1
:
2
:
2
,
-
1
:
2
:
2
][
2
].
flatten
()
+
0.5
*
(
Z
.
max
()
+
Z
.
min
())
for
xb
,
yb
,
zb
in
zip
(
Xb
,
Yb
,
Zb
):
ax
.
plot
([
xb
],
[
yb
],
[
zb
],
'
w
'
)
plt
.
savefig
(
fileout
)
def
plot_contours_compare
(
X
,
y
,
X_train
,
y_train
,
X_test
,
y_test
,
classifiers
,
plot_input
=
False
):
"""
Function to plot in the same plot several classifiers
contour to compare them.
The librairies of the classifiers are supposed to be loaded out of the functions
since we don
'
t know a priori the type of classifiers in input.
----
INPUT:
@X: DataFrame of data
@y: DataFrame of label
@X_train: Training set of data
@y_train: Training set of label
@X_test: Test set of data
@y_test: Test set of label
@classifiers: list - list of instance of classifiers
ie : classifiers = [LinearDiscriminantAnalysis(store_covariance=True),
DecisionTreeClassifier()]
plot_input: boolean - optionnal - Option to plot the input
data alone in a separate plot
----
OUTPUT
None
EXAMPLE:
-------
classifiers = [
LinearDiscriminantAnalysis(store_covariance=True),
SVC(kernel=
"
linear
"
, C=0.025),
QuadraticDiscriminantAnalysis(),
DecisionTreeClassifier(),
RandomForestClassifier()]
plot_contours_compare(X, y, X_train , y_train, X_test, y_test, classifiers, plot_input = False)
"""
# Code adapted from
# https://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html
import
numpy
as
np
import
matplotlib.pyplot
as
plt
from
matplotlib.colors
import
ListedColormap
from
matplotlib
import
colors
cmap
=
colors
.
LinearSegmentedColormap
(
'
red_blue_classes
'
,
{
'
red
'
:
[(
0
,
1
,
1
),
(
1
,
0.7
,
0.7
)],
'
green
'
:
[(
0
,
0.7
,
0.7
),
(
1
,
0.7
,
0.7
)],
'
blue
'
:
[(
0
,
0.7
,
0.7
),
(
1
,
1
,
1
)]})
plt
.
cm
.
register_cmap
(
cmap
=
cmap
)
nb_plot
=
len
(
classifiers
)
+
1
if
plot_input
else
len
(
classifiers
)
# Here is defined the list of classifier's name
names
=
[
"
LDA
"
,
"
Linear SVM
"
,
"
QDA
"
,
"
Decision Tree
"
,
"
Random Forest
"
]
figure
=
plt
.
figure
(
figsize
=
(
27
,
9
))
i
=
1
h
=
.
02
# step size in the mesh
x_min
,
x_max
=
X
.
iloc
[:,
0
].
min
()
-
.
5
,
X
.
iloc
[:,
0
].
max
()
+
.
5
y_min
,
y_max
=
X
.
iloc
[:,
1
].
min
()
-
.
5
,
X
.
iloc
[:,
1
].
max
()
+
.
5
xx
,
yy
=
np
.
meshgrid
(
np
.
arange
(
x_min
,
x_max
,
h
),
np
.
arange
(
y_min
,
y_max
,
h
))
ds_cnt
=
0
# just plot the dataset first
cm
=
plt
.
cm
.
RdBu
#cm_bright = ListedColormap(['#FF0000', '#0000FF'])
cm_bright
=
ListedColormap
([
'
#FF0000
'
,
'
#0000FF
'
])
ax
=
plt
.
subplot
(
1
,
len
(
classifiers
)
+
1
,
i
)
if
ds_cnt
==
0
:
ax
.
set_title
(
"
Input data
"
)
# Plot the training points
ax
.
scatter
(
X_train
.
iloc
[:,
0
],
X_train
.
iloc
[:,
1
],
c
=
y_train
,
cmap
=
cm_bright
,
edgecolors
=
'
k
'
)
# Plot the testing points
ax
.
scatter
(
X_test
.
iloc
[:,
0
],
X_test
.
iloc
[:,
1
],
c
=
y_test
,
cmap
=
cm_bright
,
alpha
=
0.6
,
edgecolors
=
'
k
'
)
ax
.
set_xlim
(
xx
.
min
(),
xx
.
max
())
ax
.
set_ylim
(
yy
.
min
(),
yy
.
max
())
ax
.
set_xticks
(())
ax
.
set_yticks
(())
i
+=
1
ds_cnt
+=
1
# iterate over classifiers
for
name
,
clf
in
zip
(
names
,
classifiers
):
ax
=
plt
.
subplot
(
1
,
len
(
classifiers
)
+
1
,
i
)
clf
.
fit
(
X_train
,
y_train
)
score
=
clf
.
score
(
X_test
,
y_test
)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
if
hasattr
(
clf
,
"
decision_function
"
):
Z
=
clf
.
decision_function
(
np
.
c_
[
xx
.
ravel
(),
yy
.
ravel
()])
else
:
Z
=
clf
.
predict_proba
(
-
np
.
c_
[
xx
.
ravel
(),
yy
.
ravel
()])[:,
1
]
# Put the result into a color plot
Z
=
Z
.
reshape
(
xx
.
shape
)
#ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)
ax
.
pcolormesh
(
xx
,
yy
,
Z
,
cmap
=
'
red_blue_classes
'
,
norm
=
colors
.
Normalize
(
0.
,
1.
),
zorder
=
0
,
shading
=
'
auto
'
)
ax
.
contour
(
xx
,
yy
,
Z
,
[
0.5
],
linewidths
=
2.
,
colors
=
'
white
'
)
#ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)
#'red_blue_classes'
# Plot the training points
ax
.
scatter
(
X_train
.
iloc
[:,
0
],
X_train
.
iloc
[:,
1
],
c
=
y_train
,
cmap
=
cm_bright
,
edgecolors
=
'
k
'
)
# Plot the testing points
ax
.
scatter
(
X_test
.
iloc
[:,
0
],
X_test
.
iloc
[:,
1
],
c
=
y_test
,
cmap
=
cm_bright
,
edgecolors
=
'
k
'
,
alpha
=
0.6
)
ax
.
set_xlim
(
xx
.
min
(),
xx
.
max
())
ax
.
set_ylim
(
yy
.
min
(),
yy
.
max
())
ax
.
set_xticks
(())
ax
.
set_yticks
(())
ax
.
set_title
(
name
)
ax
.
text
(
xx
.
max
()
-
.
3
,
yy
.
min
()
+
.
3
,
(
'
%.2f
'
%
score
).
lstrip
(
'
0
'
),
size
=
15
,
horizontalalignment
=
'
right
'
)
i
+=
1
plt
.
tight_layout
()
plt
.
show
()
def
plot_confMat2
(
cnf_matrix
,
ClaName
,
ClassId
):
"""
Usefull function to plot a confusion matrix
----
INPUT:
cnf_matrix: confusion matrix calculated with scikit
ClaName: string : name of the classifier
"""
import
pandas
as
pd
import
seaborn
as
sns
import
numpy
as
np
import
matplotlib.pyplot
as
plt
class_names
=
ClassId
# [0,1,2,3] # name of classes
fig
,
ax
=
plt
.
subplots
()
# create heatmap
sns
.
heatmap
(
pd
.
DataFrame
(
cnf_matrix
),
annot
=
True
,
cmap
=
"
YlGnBu
"
,
fmt
=
'
g
'
)
#ax.xaxis.set_label_position("top")
plt
.
tight_layout
();
plt
.
title
(
"
Confusion matrix using
"
+
ClaName
,
y
=
1.1
);
plt
.
ylabel
(
'
Actual label
'
);
plt
.
xlabel
(
'
Predicted label
'
);
tick_marks
=
np
.
arange
(
len
(
class_names
))
+
.
5
plt
.
xticks
(
tick_marks
,
class_names
)
plt
.
yticks
(
tick_marks
,
class_names
)
plt
.
margins
(
0.1
)
plt
.
show
()
def
lazToXYZ
(
lasFile
):
"""
Usefull function to read LAS/LAZ and extract the X,Y,Z position of points
----
INPUT:
lasFile: LAS file name
----
OUTPUT:
X,Y,Z vectors
"""
import
laspy
import
os
if
os
.
path
.
isfile
(
lasFile
):
las
=
laspy
.
read
(
lasFile
)
x
=
las
.
x
y
=
las
.
y
z
=
las
.
z
return
x
,
y
,
z
else
:
print
(
f
'
File
{
lasFile
}
is not accessible
'
)
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment