PointNet DL model improvement, window segmentation

I’m making a project where I give a pointnet architekture a bunch of pointclouds from rooms and the points have indexes/labels. They re simple .xyz files but I add a forth row 1 or 0. 1 means it part of a window, 0 means its not.
So, the model making is running well with low loss and high accuracy. The accuracy increasing as it should.
But when I want to use it for segmentating it thinks ever point is part of a window on that pointcloud, its sure in it not even 0.99 threshold helps, it still exports the whole cloud. And I just can’t figure it out why is not working.
Here is the code witch makes the model:

Plain text
Copy to clipboard
Open code in new window
EnlighterJS 3 Syntax Highlighter
<code>import tensorflow as tf
from tensorflow import keras
from keras import layers, models
import numpy as np
import os
def read_xyz_file(file_path):
"""Read a .xyz file and convert it to tensors of points and labels."""
try:
file_content = tf.io.read_file(file_path)
lines = tf.strings.split(file_content, 'n')
split_lines = tf.strings.split(lines, sep='t')
points = tf.strings.to_number(split_lines[:, :3], out_type=tf.float32)
labels_str = tf.strings.reduce_join(split_lines[:, 3:])
labels = tf.strings.to_number(tf.strings.split(labels_str), out_type=tf.int32)
points = tf.reshape(points, [-1, 3])
labels = tf.reshape(labels, [-1])
if tf.shape(points)[0] == 1000000:
return points, labels
else:
print(f"File {file_path} does not have exactly 1,000,000 points.")
return None, None
except tf.errors.InvalidArgumentError as e:
print(f"Error reading {file_path}: {e}")
return None, None
def augment_point_cloud(points, labels):
"""Apply random transformations to point clouds for data augmentation."""
angle = np.random.uniform() * 2 * np.pi
rotation_matrix = np.array([
[np.cos(angle), -np.sin(angle), 0],
[np.sin(angle), np.cos(angle), 0],
[0, 0, 1]
])
points = np.dot(points, rotation_matrix)
points += np.random.normal(scale=0.02, size=points.shape)
return points, labels
def point_cloud_generator(file_list):
"""Generator that yields point clouds and labels for training."""
for filename in file_list:
points, labels = read_xyz_file(filename)
if points is not None and labels is not None:
yield points, labels # Yield original data
augmented_points, augmented_labels = augment_point_cloud(points, labels)
yield augmented_points, augmented_labels # Yield augmented data
def prepare_dataset(file_list, batch_size=2):
"""Prepare a TensorFlow dataset from a list of .xyz files."""
dataset = tf.data.Dataset.from_generator(
point_cloud_generator,
output_signature=(
tf.TensorSpec(shape=(1000000, 3), dtype=tf.float32),
tf.TensorSpec(shape=(1000000,), dtype=tf.int32)
),
args=(file_list,)
)
return dataset.shuffle(buffer_size=1024).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE)
def conv_bn(x, filters):
x = layers.Conv1D(filters, kernel_size=1, padding="valid")(x)
x = layers.BatchNormalization(momentum=0.0)(x)
return layers.Activation("relu")(x)
def dense_bn(x, filters):
x = layers.Dense(filters)(x)
x = layers.BatchNormalization(momentum=0.0)(x)
return layers.Activation("relu")(x)
class OrthogonalRegularizer(keras.regularizers.Regularizer):
def __init__(self, num_features, l2reg=0.001):
self.num_features = num_features
self.l2reg = l2reg
self.eye = tf.eye(num_features)
def __call__(self, x):
x = tf.reshape(x, (-1, self.num_features, self.num_features))
xxt = tf.tensordot(x, x, axes=(2, 2))
xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))
def get_config(self):
return {'num_features': self.num_features, 'l2reg': self.l2reg}
def tnet(inputs, num_features):
bias = keras.initializers.Constant(np.eye(num_features).flatten())
reg = OrthogonalRegularizer(num_features)
x = conv_bn(inputs, 32)
x = conv_bn(x, 64)
x = conv_bn(x, 512)
x = layers.GlobalMaxPooling1D()(x)
x = dense_bn(x, 256)
x = dense_bn(x, 128)
x = layers.Dense(
num_features * num_features,
kernel_initializer="zeros",
bias_initializer=bias,
activity_regularizer=reg,
)(x)
feat_T = layers.Reshape((num_features, num_features))(x)
return layers.Dot(axes=(2, 1))([inputs, feat_T])
def create_pointnet_model(num_classes=2):
"""Creates a PointNet model for point cloud segmentation."""
inputs = keras.Input(shape=(1000000, 3))
x = tnet(inputs, 3)
x = conv_bn(x, 32)
x = conv_bn(x, 32)
x = tnet(x, 32)
x = conv_bn(x, 32)
x = conv_bn(x, 64)
x = conv_bn(x, 512)
global_feature = layers.GlobalMaxPooling1D()(x)
x = dense_bn(x, 256)
x = layers.Dropout(0.3)(x)
x = dense_bn(x, 128)
x = layers.Dropout(0.3)(x)
outputs = layers.Conv1D(num_classes, kernel_size=1, activation='softmax')(x)
model = models.Model(inputs=inputs, outputs=outputs, name='pointnet_segmentation')
return model
def main(use_gpu=False):
"""Main function to run training."""
if use_gpu and tf.config.list_physical_devices('GPU'):
print("GPU available. Using GPU.")
else:
print("No GPU available or GPU use not requested. Using CPU.")
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
folder_path = 'Label_database'
file_list = [os.path.join(folder_path, f) for f in os.listdir(folder_path) if f.endswith('.xyz')]
print("Files to be loaded:")
for file in file_list:
print(file)
dataset = prepare_dataset(file_list)
model = create_pointnet_model(num_classes=2)
model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Callbacks for learning rate scheduling and early stopping
lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.5, patience=3, min_lr=1e-6)
early_stopping = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
model.fit(dataset, epochs=20, callbacks=[lr_scheduler, early_stopping])
model.save('window_segmentation_model_v1.h5')
if __name__ == "__main__":
main()
</code>
<code>import tensorflow as tf from tensorflow import keras from keras import layers, models import numpy as np import os def read_xyz_file(file_path): """Read a .xyz file and convert it to tensors of points and labels.""" try: file_content = tf.io.read_file(file_path) lines = tf.strings.split(file_content, 'n') split_lines = tf.strings.split(lines, sep='t') points = tf.strings.to_number(split_lines[:, :3], out_type=tf.float32) labels_str = tf.strings.reduce_join(split_lines[:, 3:]) labels = tf.strings.to_number(tf.strings.split(labels_str), out_type=tf.int32) points = tf.reshape(points, [-1, 3]) labels = tf.reshape(labels, [-1]) if tf.shape(points)[0] == 1000000: return points, labels else: print(f"File {file_path} does not have exactly 1,000,000 points.") return None, None except tf.errors.InvalidArgumentError as e: print(f"Error reading {file_path}: {e}") return None, None def augment_point_cloud(points, labels): """Apply random transformations to point clouds for data augmentation.""" angle = np.random.uniform() * 2 * np.pi rotation_matrix = np.array([ [np.cos(angle), -np.sin(angle), 0], [np.sin(angle), np.cos(angle), 0], [0, 0, 1] ]) points = np.dot(points, rotation_matrix) points += np.random.normal(scale=0.02, size=points.shape) return points, labels def point_cloud_generator(file_list): """Generator that yields point clouds and labels for training.""" for filename in file_list: points, labels = read_xyz_file(filename) if points is not None and labels is not None: yield points, labels # Yield original data augmented_points, augmented_labels = augment_point_cloud(points, labels) yield augmented_points, augmented_labels # Yield augmented data def prepare_dataset(file_list, batch_size=2): """Prepare a TensorFlow dataset from a list of .xyz files.""" dataset = tf.data.Dataset.from_generator( point_cloud_generator, output_signature=( tf.TensorSpec(shape=(1000000, 3), dtype=tf.float32), tf.TensorSpec(shape=(1000000,), dtype=tf.int32) ), args=(file_list,) ) return dataset.shuffle(buffer_size=1024).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE) def conv_bn(x, filters): x = layers.Conv1D(filters, kernel_size=1, padding="valid")(x) x = layers.BatchNormalization(momentum=0.0)(x) return layers.Activation("relu")(x) def dense_bn(x, filters): x = layers.Dense(filters)(x) x = layers.BatchNormalization(momentum=0.0)(x) return layers.Activation("relu")(x) class OrthogonalRegularizer(keras.regularizers.Regularizer): def __init__(self, num_features, l2reg=0.001): self.num_features = num_features self.l2reg = l2reg self.eye = tf.eye(num_features) def __call__(self, x): x = tf.reshape(x, (-1, self.num_features, self.num_features)) xxt = tf.tensordot(x, x, axes=(2, 2)) xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features)) return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye)) def get_config(self): return {'num_features': self.num_features, 'l2reg': self.l2reg} def tnet(inputs, num_features): bias = keras.initializers.Constant(np.eye(num_features).flatten()) reg = OrthogonalRegularizer(num_features) x = conv_bn(inputs, 32) x = conv_bn(x, 64) x = conv_bn(x, 512) x = layers.GlobalMaxPooling1D()(x) x = dense_bn(x, 256) x = dense_bn(x, 128) x = layers.Dense( num_features * num_features, kernel_initializer="zeros", bias_initializer=bias, activity_regularizer=reg, )(x) feat_T = layers.Reshape((num_features, num_features))(x) return layers.Dot(axes=(2, 1))([inputs, feat_T]) def create_pointnet_model(num_classes=2): """Creates a PointNet model for point cloud segmentation.""" inputs = keras.Input(shape=(1000000, 3)) x = tnet(inputs, 3) x = conv_bn(x, 32) x = conv_bn(x, 32) x = tnet(x, 32) x = conv_bn(x, 32) x = conv_bn(x, 64) x = conv_bn(x, 512) global_feature = layers.GlobalMaxPooling1D()(x) x = dense_bn(x, 256) x = layers.Dropout(0.3)(x) x = dense_bn(x, 128) x = layers.Dropout(0.3)(x) outputs = layers.Conv1D(num_classes, kernel_size=1, activation='softmax')(x) model = models.Model(inputs=inputs, outputs=outputs, name='pointnet_segmentation') return model def main(use_gpu=False): """Main function to run training.""" if use_gpu and tf.config.list_physical_devices('GPU'): print("GPU available. Using GPU.") else: print("No GPU available or GPU use not requested. Using CPU.") os.environ['CUDA_VISIBLE_DEVICES'] = '-1' folder_path = 'Label_database' file_list = [os.path.join(folder_path, f) for f in os.listdir(folder_path) if f.endswith('.xyz')] print("Files to be loaded:") for file in file_list: print(file) dataset = prepare_dataset(file_list) model = create_pointnet_model(num_classes=2) model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy']) # Callbacks for learning rate scheduling and early stopping lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.5, patience=3, min_lr=1e-6) early_stopping = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True) model.fit(dataset, epochs=20, callbacks=[lr_scheduler, early_stopping]) model.save('window_segmentation_model_v1.h5') if __name__ == "__main__": main() </code>
import tensorflow as tf
from tensorflow import keras
from keras import layers, models
import numpy as np
import os

def read_xyz_file(file_path):
    """Read a .xyz file and convert it to tensors of points and labels."""
    try:
        file_content = tf.io.read_file(file_path)
        lines = tf.strings.split(file_content, 'n')
        split_lines = tf.strings.split(lines, sep='t')
        points = tf.strings.to_number(split_lines[:, :3], out_type=tf.float32)
        labels_str = tf.strings.reduce_join(split_lines[:, 3:])
        labels = tf.strings.to_number(tf.strings.split(labels_str), out_type=tf.int32)
        points = tf.reshape(points, [-1, 3])
        labels = tf.reshape(labels, [-1])
        if tf.shape(points)[0] == 1000000:
            return points, labels
        else:
            print(f"File {file_path} does not have exactly 1,000,000 points.")
            return None, None
    except tf.errors.InvalidArgumentError as e:
        print(f"Error reading {file_path}: {e}")
        return None, None

def augment_point_cloud(points, labels):
    """Apply random transformations to point clouds for data augmentation."""
    angle = np.random.uniform() * 2 * np.pi
    rotation_matrix = np.array([
        [np.cos(angle), -np.sin(angle), 0],
        [np.sin(angle), np.cos(angle), 0],
        [0, 0, 1]
    ])
    points = np.dot(points, rotation_matrix)
    points += np.random.normal(scale=0.02, size=points.shape)
    return points, labels

def point_cloud_generator(file_list):
    """Generator that yields point clouds and labels for training."""
    for filename in file_list:
        points, labels = read_xyz_file(filename)
        if points is not None and labels is not None:
            yield points, labels  # Yield original data
            augmented_points, augmented_labels = augment_point_cloud(points, labels)
            yield augmented_points, augmented_labels  # Yield augmented data

def prepare_dataset(file_list, batch_size=2):
    """Prepare a TensorFlow dataset from a list of .xyz files."""
    dataset = tf.data.Dataset.from_generator(
        point_cloud_generator,
        output_signature=(
            tf.TensorSpec(shape=(1000000, 3), dtype=tf.float32),
            tf.TensorSpec(shape=(1000000,), dtype=tf.int32)
        ),
        args=(file_list,)
    )
    return dataset.shuffle(buffer_size=1024).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE)

def conv_bn(x, filters):
    x = layers.Conv1D(filters, kernel_size=1, padding="valid")(x)
    x = layers.BatchNormalization(momentum=0.0)(x)
    return layers.Activation("relu")(x)

def dense_bn(x, filters):
    x = layers.Dense(filters)(x)
    x = layers.BatchNormalization(momentum=0.0)(x)
    return layers.Activation("relu")(x)

class OrthogonalRegularizer(keras.regularizers.Regularizer):
    def __init__(self, num_features, l2reg=0.001):
        self.num_features = num_features
        self.l2reg = l2reg
        self.eye = tf.eye(num_features)

    def __call__(self, x):
        x = tf.reshape(x, (-1, self.num_features, self.num_features))
        xxt = tf.tensordot(x, x, axes=(2, 2))
        xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
        return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))

    def get_config(self):
        return {'num_features': self.num_features, 'l2reg': self.l2reg}

def tnet(inputs, num_features):
    bias = keras.initializers.Constant(np.eye(num_features).flatten())
    reg = OrthogonalRegularizer(num_features)
    x = conv_bn(inputs, 32)
    x = conv_bn(x, 64)
    x = conv_bn(x, 512)
    x = layers.GlobalMaxPooling1D()(x)
    x = dense_bn(x, 256)
    x = dense_bn(x, 128)
    x = layers.Dense(
        num_features * num_features,
        kernel_initializer="zeros",
        bias_initializer=bias,
        activity_regularizer=reg,
    )(x)
    feat_T = layers.Reshape((num_features, num_features))(x)
    return layers.Dot(axes=(2, 1))([inputs, feat_T])

def create_pointnet_model(num_classes=2):
    """Creates a PointNet model for point cloud segmentation."""
    inputs = keras.Input(shape=(1000000, 3))
    x = tnet(inputs, 3)
    x = conv_bn(x, 32)
    x = conv_bn(x, 32)
    x = tnet(x, 32)
    x = conv_bn(x, 32)
    x = conv_bn(x, 64)
    x = conv_bn(x, 512)
    global_feature = layers.GlobalMaxPooling1D()(x)
    x = dense_bn(x, 256)
    x = layers.Dropout(0.3)(x)
    x = dense_bn(x, 128)
    x = layers.Dropout(0.3)(x)
    outputs = layers.Conv1D(num_classes, kernel_size=1, activation='softmax')(x)
    model = models.Model(inputs=inputs, outputs=outputs, name='pointnet_segmentation')
    return model

def main(use_gpu=False):
    """Main function to run training."""
    if use_gpu and tf.config.list_physical_devices('GPU'):
        print("GPU available. Using GPU.")
    else:
        print("No GPU available or GPU use not requested. Using CPU.")
        os.environ['CUDA_VISIBLE_DEVICES'] = '-1'

    folder_path = 'Label_database'
    file_list = [os.path.join(folder_path, f) for f in os.listdir(folder_path) if f.endswith('.xyz')]

    print("Files to be loaded:")
    for file in file_list:
        print(file)

    dataset = prepare_dataset(file_list)
    model = create_pointnet_model(num_classes=2)
    model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
    
    # Callbacks for learning rate scheduling and early stopping
    lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.5, patience=3, min_lr=1e-6)
    early_stopping = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
    
    model.fit(dataset, epochs=20, callbacks=[lr_scheduler, early_stopping])
    model.save('window_segmentation_model_v1.h5')

if __name__ == "__main__":
    main()

And here is the code, where I try segmentation.

Plain text
Copy to clipboard
Open code in new window
EnlighterJS 3 Syntax Highlighter
<code>import tensorflow as tf
import numpy as np
import os
from keras import layers, models
from tensorflow import keras
class OrthogonalRegularizer(keras.regularizers.Regularizer):
def __init__(self, num_features, l2reg=0.001):
self.num_features = num_features
self.l2reg = l2reg
self.eye = tf.eye(num_features)
def __call__(self, x):
x = tf.reshape(x, (-1, self.num_features, self.num_features))
xxt = tf.tensordot(x, x, axes=(2, 2))
xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))
def get_config(self):
return {'num_features': self.num_features, 'l2reg': self.l2reg}
def read_xyz_file_no_labels(file_path):
"""Read a .xyz file and convert it to a tensor of points."""
try:
# Read the file as binary data
file_content = tf.io.read_file(file_path)
# Decode the file content using ISO-8859-1 (Latin-1) encoding
file_content = file_content.numpy().decode('ISO-8859-1')
# Split the file content into lines
lines = file_content.split('n')
# Split each line into components: x, y, z
points = []
for line in lines:
if line.strip(): # Skip empty lines
x, y, z = map(float, line.split()[:3])
points.append([x, y, z])
points = np.array(points, dtype=np.float32)
# Ensure the points are valid and exactly 1,000,000 points
if points.shape[0] == 1000000:
return tf.convert_to_tensor(points)
else:
print(f"File {file_path} does not have exactly 1,000,000 points.")
return None
except Exception as e:
print(f"Error reading {file_path}: {e}")
return None
# Load the trained PointNet model
model_path = 'window_segmentation_model_v1.h5'
# Use custom_objects to load the model with custom regularizer
custom_objects = {'OrthogonalRegularizer': OrthogonalRegularizer}
model = tf.keras.models.load_model(model_path, custom_objects=custom_objects, compile=False)
# Read the new point cloud data
new_file_path = 'Test_cloud.xyz'
points = read_xyz_file_no_labels(new_file_path)
if points is not None:
print(f"Number of points read: {points.shape[0]}")
# Normalize points if normalization was used during training
points_min = tf.reduce_min(points, axis=0)
points_max = tf.reduce_max(points, axis=0)
points = (points - points_min) / (points_max - points_min)
print(f"Normalized points range: min={tf.reduce_min(points)}, max={tf.reduce_max(points)}")
# Add a batch dimension to the points tensor
points = tf.expand_dims(points, axis=0)
# Predict the labels for the new point cloud
predictions = model.predict(points)
# Get the probability for the target class (e.g., class 1)
target_class_probs = predictions[..., 1]
# Print some of the predicted probabilities for debugging
print(f"Predicted probabilities (first 10 points): {target_class_probs[0, :10]}")
print(f"Probability range: min={np.min(target_class_probs)}, max={np.max(target_class_probs)}")
# Define a threshold for segmentation
threshold = 0.6 # Adjust this threshold based on your needs
# Get the predicted class labels based on the threshold
predicted_labels = (target_class_probs > threshold).astype(int).squeeze()
# Print some of the predicted labels for debugging
print(f"Predicted labels (first 10 points): {predicted_labels[:10]}")
print(f"Number of points classified as window: {np.sum(predicted_labels)}")
# Save only the window points to a new .xyz file
output_file_path = 'Segmented_Test_cloud.xyz'
with open(output_file_path, 'w') as f:
for point, label in zip(points.numpy().squeeze(), predicted_labels):
if label == 1: # Only save points classified as windows
x, y, z = point
f.write(f"{x}t{y}t{z}n")
print(f"Window points saved to {output_file_path}")
# Step 1: Verify Model Prediction on a Single Point
# Test with a single point that is known to be a window
test_point = np.array([[0.5, 0.5, 0.5]], dtype=np.float32) # Example point
test_point = (test_point - points_min) / (points_max - points_min)
test_point = tf.expand_dims(test_point, axis=0)
test_prediction = model.predict(test_point)
print(f"Test point prediction: {test_prediction}")
else:
print("Failed to read the new point cloud data.")
</code>
<code>import tensorflow as tf import numpy as np import os from keras import layers, models from tensorflow import keras class OrthogonalRegularizer(keras.regularizers.Regularizer): def __init__(self, num_features, l2reg=0.001): self.num_features = num_features self.l2reg = l2reg self.eye = tf.eye(num_features) def __call__(self, x): x = tf.reshape(x, (-1, self.num_features, self.num_features)) xxt = tf.tensordot(x, x, axes=(2, 2)) xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features)) return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye)) def get_config(self): return {'num_features': self.num_features, 'l2reg': self.l2reg} def read_xyz_file_no_labels(file_path): """Read a .xyz file and convert it to a tensor of points.""" try: # Read the file as binary data file_content = tf.io.read_file(file_path) # Decode the file content using ISO-8859-1 (Latin-1) encoding file_content = file_content.numpy().decode('ISO-8859-1') # Split the file content into lines lines = file_content.split('n') # Split each line into components: x, y, z points = [] for line in lines: if line.strip(): # Skip empty lines x, y, z = map(float, line.split()[:3]) points.append([x, y, z]) points = np.array(points, dtype=np.float32) # Ensure the points are valid and exactly 1,000,000 points if points.shape[0] == 1000000: return tf.convert_to_tensor(points) else: print(f"File {file_path} does not have exactly 1,000,000 points.") return None except Exception as e: print(f"Error reading {file_path}: {e}") return None # Load the trained PointNet model model_path = 'window_segmentation_model_v1.h5' # Use custom_objects to load the model with custom regularizer custom_objects = {'OrthogonalRegularizer': OrthogonalRegularizer} model = tf.keras.models.load_model(model_path, custom_objects=custom_objects, compile=False) # Read the new point cloud data new_file_path = 'Test_cloud.xyz' points = read_xyz_file_no_labels(new_file_path) if points is not None: print(f"Number of points read: {points.shape[0]}") # Normalize points if normalization was used during training points_min = tf.reduce_min(points, axis=0) points_max = tf.reduce_max(points, axis=0) points = (points - points_min) / (points_max - points_min) print(f"Normalized points range: min={tf.reduce_min(points)}, max={tf.reduce_max(points)}") # Add a batch dimension to the points tensor points = tf.expand_dims(points, axis=0) # Predict the labels for the new point cloud predictions = model.predict(points) # Get the probability for the target class (e.g., class 1) target_class_probs = predictions[..., 1] # Print some of the predicted probabilities for debugging print(f"Predicted probabilities (first 10 points): {target_class_probs[0, :10]}") print(f"Probability range: min={np.min(target_class_probs)}, max={np.max(target_class_probs)}") # Define a threshold for segmentation threshold = 0.6 # Adjust this threshold based on your needs # Get the predicted class labels based on the threshold predicted_labels = (target_class_probs > threshold).astype(int).squeeze() # Print some of the predicted labels for debugging print(f"Predicted labels (first 10 points): {predicted_labels[:10]}") print(f"Number of points classified as window: {np.sum(predicted_labels)}") # Save only the window points to a new .xyz file output_file_path = 'Segmented_Test_cloud.xyz' with open(output_file_path, 'w') as f: for point, label in zip(points.numpy().squeeze(), predicted_labels): if label == 1: # Only save points classified as windows x, y, z = point f.write(f"{x}t{y}t{z}n") print(f"Window points saved to {output_file_path}") # Step 1: Verify Model Prediction on a Single Point # Test with a single point that is known to be a window test_point = np.array([[0.5, 0.5, 0.5]], dtype=np.float32) # Example point test_point = (test_point - points_min) / (points_max - points_min) test_point = tf.expand_dims(test_point, axis=0) test_prediction = model.predict(test_point) print(f"Test point prediction: {test_prediction}") else: print("Failed to read the new point cloud data.") </code>
import tensorflow as tf
import numpy as np
import os
from keras import layers, models
from tensorflow import keras

class OrthogonalRegularizer(keras.regularizers.Regularizer):
    def __init__(self, num_features, l2reg=0.001):
        self.num_features = num_features
        self.l2reg = l2reg
        self.eye = tf.eye(num_features)

    def __call__(self, x):
        x = tf.reshape(x, (-1, self.num_features, self.num_features))
        xxt = tf.tensordot(x, x, axes=(2, 2))
        xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
        return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))

    def get_config(self):
        return {'num_features': self.num_features, 'l2reg': self.l2reg}

def read_xyz_file_no_labels(file_path):
    """Read a .xyz file and convert it to a tensor of points."""
    try:
        # Read the file as binary data
        file_content = tf.io.read_file(file_path)
        
        # Decode the file content using ISO-8859-1 (Latin-1) encoding
        file_content = file_content.numpy().decode('ISO-8859-1')
        
        # Split the file content into lines
        lines = file_content.split('n')
        
        # Split each line into components: x, y, z
        points = []
        for line in lines:
            if line.strip():  # Skip empty lines
                x, y, z = map(float, line.split()[:3])
                points.append([x, y, z])
        
        points = np.array(points, dtype=np.float32)
        
        # Ensure the points are valid and exactly 1,000,000 points
        if points.shape[0] == 1000000:
            return tf.convert_to_tensor(points)
        else:
            print(f"File {file_path} does not have exactly 1,000,000 points.")
            return None
    except Exception as e:
        print(f"Error reading {file_path}: {e}")
        return None

# Load the trained PointNet model
model_path = 'window_segmentation_model_v1.h5'

# Use custom_objects to load the model with custom regularizer
custom_objects = {'OrthogonalRegularizer': OrthogonalRegularizer}
model = tf.keras.models.load_model(model_path, custom_objects=custom_objects, compile=False)

# Read the new point cloud data
new_file_path = 'Test_cloud.xyz'
points = read_xyz_file_no_labels(new_file_path)

if points is not None:
    print(f"Number of points read: {points.shape[0]}")
    
    # Normalize points if normalization was used during training
    points_min = tf.reduce_min(points, axis=0)
    points_max = tf.reduce_max(points, axis=0)
    points = (points - points_min) / (points_max - points_min)
    print(f"Normalized points range: min={tf.reduce_min(points)}, max={tf.reduce_max(points)}")

    # Add a batch dimension to the points tensor
    points = tf.expand_dims(points, axis=0)

    # Predict the labels for the new point cloud
    predictions = model.predict(points)

    # Get the probability for the target class (e.g., class 1)
    target_class_probs = predictions[..., 1]

    # Print some of the predicted probabilities for debugging
    print(f"Predicted probabilities (first 10 points): {target_class_probs[0, :10]}")
    print(f"Probability range: min={np.min(target_class_probs)}, max={np.max(target_class_probs)}")
    
    # Define a threshold for segmentation
    threshold = 0.6  # Adjust this threshold based on your needs

    # Get the predicted class labels based on the threshold
    predicted_labels = (target_class_probs > threshold).astype(int).squeeze()

    # Print some of the predicted labels for debugging
    print(f"Predicted labels (first 10 points): {predicted_labels[:10]}")
    print(f"Number of points classified as window: {np.sum(predicted_labels)}")

    # Save only the window points to a new .xyz file
    output_file_path = 'Segmented_Test_cloud.xyz'
    with open(output_file_path, 'w') as f:
        for point, label in zip(points.numpy().squeeze(), predicted_labels):
            if label == 1:  # Only save points classified as windows
                x, y, z = point
                f.write(f"{x}t{y}t{z}n")
    
    print(f"Window points saved to {output_file_path}")

    # Step 1: Verify Model Prediction on a Single Point
    # Test with a single point that is known to be a window
    test_point = np.array([[0.5, 0.5, 0.5]], dtype=np.float32)  # Example point
    test_point = (test_point - points_min) / (points_max - points_min)
    test_point = tf.expand_dims(test_point, axis=0)
    test_prediction = model.predict(test_point)
    print(f"Test point prediction: {test_prediction}")

else:
    print("Failed to read the new point cloud data.")

Any suggestions what could be the problem?

I tryed different layer sizes, diefferent datasets, and smaller datasets.
No, I can’t do it in bigger batch, I only got 32 Gb RAM. And if I resample my pointcloud more time the windows will disappear.

New contributor

TheGreek is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.

Trang chủ Giới thiệu Sinh nhật bé trai Sinh nhật bé gái Tổ chức sự kiện Biểu diễn giải trí Dịch vụ khác Trang trí tiệc cưới Tổ chức khai trương Tư vấn dịch vụ Thư viện ảnh Tin tức - sự kiện Liên hệ Chú hề sinh nhật Trang trí YEAR END PARTY công ty Trang trí tất niên cuối năm Trang trí tất niên xu hướng mới nhất Trang trí sinh nhật bé trai Hải Đăng Trang trí sinh nhật bé Khánh Vân Trang trí sinh nhật Bích Ngân Trang trí sinh nhật bé Thanh Trang Thuê ông già Noel phát quà Biểu diễn xiếc khỉ Xiếc quay đĩa Dịch vụ tổ chức sự kiện 5 sao Thông tin về chúng tôi Dịch vụ sinh nhật bé trai Dịch vụ sinh nhật bé gái Sự kiện trọn gói Các tiết mục giải trí Dịch vụ bổ trợ Tiệc cưới sang trọng Dịch vụ khai trương Tư vấn tổ chức sự kiện Hình ảnh sự kiện Cập nhật tin tức Liên hệ ngay Thuê chú hề chuyên nghiệp Tiệc tất niên cho công ty Trang trí tiệc cuối năm Tiệc tất niên độc đáo Sinh nhật bé Hải Đăng Sinh nhật đáng yêu bé Khánh Vân Sinh nhật sang trọng Bích Ngân Tiệc sinh nhật bé Thanh Trang Dịch vụ ông già Noel Xiếc thú vui nhộn Biểu diễn xiếc quay đĩa Dịch vụ tổ chức tiệc uy tín Khám phá dịch vụ của chúng tôi Tiệc sinh nhật cho bé trai Trang trí tiệc cho bé gái Gói sự kiện chuyên nghiệp Chương trình giải trí hấp dẫn Dịch vụ hỗ trợ sự kiện Trang trí tiệc cưới đẹp Khởi đầu thành công với khai trương Chuyên gia tư vấn sự kiện Xem ảnh các sự kiện đẹp Tin mới về sự kiện Kết nối với đội ngũ chuyên gia Chú hề vui nhộn cho tiệc sinh nhật Ý tưởng tiệc cuối năm Tất niên độc đáo Trang trí tiệc hiện đại Tổ chức sinh nhật cho Hải Đăng Sinh nhật độc quyền Khánh Vân Phong cách tiệc Bích Ngân Trang trí tiệc bé Thanh Trang Thuê dịch vụ ông già Noel chuyên nghiệp Xem xiếc khỉ đặc sắc Xiếc quay đĩa thú vị
Trang chủ Giới thiệu Sinh nhật bé trai Sinh nhật bé gái Tổ chức sự kiện Biểu diễn giải trí Dịch vụ khác Trang trí tiệc cưới Tổ chức khai trương Tư vấn dịch vụ Thư viện ảnh Tin tức - sự kiện Liên hệ Chú hề sinh nhật Trang trí YEAR END PARTY công ty Trang trí tất niên cuối năm Trang trí tất niên xu hướng mới nhất Trang trí sinh nhật bé trai Hải Đăng Trang trí sinh nhật bé Khánh Vân Trang trí sinh nhật Bích Ngân Trang trí sinh nhật bé Thanh Trang Thuê ông già Noel phát quà Biểu diễn xiếc khỉ Xiếc quay đĩa
Thiết kế website Thiết kế website Thiết kế website Cách kháng tài khoản quảng cáo Mua bán Fanpage Facebook Dịch vụ SEO Tổ chức sinh nhật