In [1]:
# PINN tutorial
import tensorflow as tf
from tensorflow import keras
from keras import Model
from keras.layers import Dense, Input
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import gdown
import pickle
In [2]:
# Extrapolation on toy problem
x = np.linspace(0, 2*np.pi, 100)
y = -np.sin(x)

train_split = 0.8
x_train = x[:int(train_split*x.shape[0])]
x_val = x[int(train_split*x.shape[0]):]
y_train = y[:int(train_split*y.shape[0])]
y_val = y[int(train_split*y.shape[0]):]
In [ ]:
input_layer = Input(shape=(1,), batch_size=10)
hidden_layer = Dense(units=20, activation='relu')(input_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
output_layer = Dense(units=1, activation='linear')(hidden_layer)

nn = Model(inputs=input_layer, outputs=output_layer)
opt = keras.optimizers.Adam(learning_rate=0.01)
nn.compile(optimizer=opt, loss='mse')
training_history = nn.fit(x_train, y_train, validation_data=[x_val, y_val], epochs=500, verbose=1)
In [5]:
plt.figure(dpi=100)
plt.plot(training_history.history['loss'], label='Training')
plt.plot(training_history.history['val_loss'], label='Validation')
plt.legend()
Out[5]:
<matplotlib.legend.Legend at 0x7f3da4773220>
No description has been provided for this image
In [7]:
# Extrapolation
x_extended = np.linspace(-3*np.pi, 3*np.pi, 100)
y_extended = -np.sin(x_extended)
ypred_extended = nn.predict(x_extended)

plt.figure(dpi=100)
plt.plot(x_extended, y_extended, label='-sin(x)')
plt.plot(x_extended, ypred_extended, label='NN')
plt.scatter(x_train, y_train, marker='o', label='Training data')
plt.scatter(x_val, y_val, marker='^', label='Validation data')
plt.xlabel('x'), plt.ylabel('y')
plt.legend()
4/4 [==============================] - 0s 2ms/step
Out[7]:
<matplotlib.legend.Legend at 0x7f3d975b03d0>
No description has been provided for this image
In [ ]:
# PINN
input_layer = Input(shape=(1,))
hidden_layer = Dense(units=20, activation='tanh')(input_layer)
hidden_layer = Dense(units=20, activation='tanh')(hidden_layer)
output_layer = Dense(units=1, activation='linear')(hidden_layer)
pinn = Model(inputs=input_layer, outputs=output_layer)
x_collocation = np.linspace(-np.pi, 3*np.pi)
collocation_points = tf.convert_to_tensor(x_collocation.reshape((-1,1)), dtype=tf.float32)

def pinn_loss():
  with tf.GradientTape() as tape:
    tape.watch(collocation_points)
    y_pred_coll = pinn(collocation_points)
  grady = tape.gradient(y_pred_coll, collocation_points)
  dydx = tf.reshape(grady[:,-1], [collocation_points.shape[0], 1])
  true_dydx = tf.reshape(-tf.math.cos(collocation_points), [collocation_points.shape[0], 1])
  derivative_loss = tf.reshape(tf.reduce_mean(tf.square(dydx - true_dydx), axis=-1), [-1,1])
  return derivative_loss

alpha=1.0; beta=1.0;
def total_loss(y_true, y_pred):
  mse_loss = tf.reduce_mean(tf.square(y_true - y_pred), axis=-1)
  derivative_loss = pinn_loss()
  total_loss = derivative_loss*alpha + mse_loss*beta
  return total_loss

opt = keras.optimizers.Adam(learning_rate=0.005)
pinn.compile(optimizer=opt, loss=total_loss)
pinn.summary()
pinn_training = pinn.fit(x_train, y_train, validation_data=[x_val, y_val], epochs=2000, verbose=2)
In [9]:
plt.figure(dpi=100)
plt.plot(pinn_training.history['loss'], label='Training')
plt.plot(pinn_training.history['val_loss'], label='Validation')
Out[9]:
[<matplotlib.lines.Line2D at 0x7f3d9560c7c0>]
No description has been provided for this image
In [11]:
# Extrapolation with PINN
x_extended = np.linspace(-2*np.pi, 3*np.pi, 100)
y_extended = -np.sin(x_extended)
ypred_extended = nn.predict(x_extended)
ypred_pinn = pinn.predict(x_extended)

plt.figure(dpi=100)
plt.plot(x_extended, y_extended, label='-sin(x)')
plt.plot(x_extended, ypred_extended, label='NN')
plt.plot(x_extended, ypred_pinn, label='PINN')
plt.scatter(x_train, y_train, marker='o', label='Training data')
plt.scatter(x_val, y_val, marker='^', label='Validation data')
plt.xlabel('x'), plt.ylabel('y')
plt.legend()
4/4 [==============================] - 0s 4ms/step
4/4 [==============================] - 0s 4ms/step
Out[11]:
<matplotlib.legend.Legend at 0x7f3d957ef100>
No description has been provided for this image
In [ ]:
# Google Drive file ID
file_id = '1H8JDNgNncYEWjwR34F71CjQmAGEoqBf3'

# Construct the download URL
download_url = f'https://drive.google.com/uc?id={file_id}'

# Download the file
output_file = 'wigged_db.pkl'  # Change the output file name and extension as needed
gdown.download(download_url, output_file, quiet=False)
database = pd.read_pickle(output_file)
database['Nf'] = database['Nf'].apply(np.log10)
plt.scatter(database.Nf, database.sig_eq)
plt.xlabel("log(N_f)"), plt.ylabel("$S_{eq}}$")
print(database.head())
In [ ]:
Nf = database.Nf
seq = database.sig_eq
train_split = 0.8
Nf_train = Nf[:int(train_split*Nf.shape[0])]
Nf_val = Nf[int(train_split*Nf.shape[0]):]
seq_train = seq[:int(train_split*Nf.shape[0])]
seq_val = seq[int(train_split*Nf.shape[0]):]

input_layer = Input(shape=(1,), batch_size=10)
hidden_layer = Dense(units=20, activation='relu')(input_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
output_layer = Dense(units=1, activation='linear')(hidden_layer)

nn = Model(inputs=input_layer, outputs=output_layer)
opt = keras.optimizers.Adam(learning_rate=0.01)
nn.compile(optimizer=opt, loss='mse')
nn_training = nn.fit(Nf_train, seq_train, validation_data=[Nf_val, seq_val], epochs=500, verbose=1)
In [ ]:
plt.figure(dpi=100)
plt.plot(nn_training.history['loss'], label='Training')
plt.plot(nn_training.history['val_loss'], label='Validation')
plt.figure(dpi=100)
plt.scatter(database.Nf, database.sig_eq)
Nf_range = np.linspace(1, 12, 100)
seq_pred = nn.predict(Nf_range)
plt.plot(Nf_range, seq_pred, color='r')
plt.xlabel("log(N_f)"), plt.ylabel("$S_{eq}}$")
4/4 [==============================] - 0s 5ms/step
Out[ ]:
(Text(0.5, 0, 'log(N_f)'), Text(0, 0.5, '$S_{eq}}$'))
No description has been provided for this image
No description has been provided for this image
In [ ]:
# Predict S-N from manufacturing variables
input_vars = ['Orientation', 'Beam diameter', 'hatch', 'layer thickness', 'Nf']
x = database.loc[:, input_vars]
y = seq

x_train = x.iloc[:int(train_split*Nf.shape[0]), :]
x_val = x.iloc[int(train_split*Nf.shape[0]):, :]
y_train = seq_train; y_val = seq_val;

input_layer = Input(shape=(5,), batch_size=10)
hidden_layer = Dense(units=20, activation='relu')(input_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
output_layer = Dense(units=1, activation='linear')(hidden_layer)

nn = Model(inputs=input_layer, outputs=output_layer)
opt = keras.optimizers.Adam(learning_rate=0.01)
nn.compile(optimizer=opt, loss='mse')
nn_training = nn.fit(x_train, y_train, validation_data=[x_val, y_val], epochs=500, verbose=1)
In [ ]:
plt.figure(dpi=100)
plt.plot(nn_training.history['loss'], label='Training')
plt.plot(nn_training.history['val_loss'], label='Validation')
plt.figure(dpi=100)
plt.scatter(database.Nf, database.sig_eq)
for i in range(10):
  Nf_range = np.linspace(1, 9, 100)
  manufacturing_vars = ['Orientation', 'Beam diameter', 'hatch', 'layer thickness']
  random_extraction = database.loc[np.random.randint(0,database.shape[0]), manufacturing_vars]
  random_extraction.hatch=200.0
  # print(random_extraction)
  x_test = np.hstack((np.tile(random_extraction, (100, 1)), Nf_range.reshape(-1,1)))
  seq_pred = nn.predict(x_test.astype(float), verbose=0)
  plt.plot(Nf_range, seq_pred, color='r')
plt.xlabel("log(N_f)"), plt.ylabel("$S_{eq}}$")
Out[ ]:
(Text(0.5, 0, 'log(N_f)'), Text(0, 0.5, '$S_{eq}}$'))
No description has been provided for this image
No description has been provided for this image
In [ ]:
# PINN
Nf = database.Nf
seq = database.sig_eq
train_split = 0.8
Nf_train = Nf[:int(train_split*Nf.shape[0])]
Nf_val = Nf[int(train_split*Nf.shape[0]):]
seq_train = seq[:int(train_split*Nf.shape[0])]
seq_val = seq[int(train_split*Nf.shape[0]):]

input_vars = ['Orientation', 'Beam diameter', 'hatch', 'layer thickness', 'Nf']
manufacturing_vars = ['Orientation', 'Beam diameter', 'hatch', 'layer thickness']
x = database.loc[:, input_vars]
y = seq

x_train = x.iloc[:int(train_split*Nf.shape[0]), :]
x_val = x.iloc[int(train_split*Nf.shape[0]):, :]
y_train = seq_train; y_val = seq_val;

input_layer = Input(shape=(5,))
hidden_layer = Dense(units=20, activation='relu')(input_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
hidden_layer = Dense(units=20, activation='relu')(hidden_layer)
output_layer = Dense(units=1, activation='linear')(hidden_layer)
pinn = Model(inputs=input_layer, outputs=output_layer)
Nf_collocation = np.linspace(2,9, 50)

def pinn_loss():
  random_extraction = database.loc[np.random.randint(0,database.shape[0]), manufacturing_vars]
  x_test = np.hstack((np.tile(random_extraction, (50, 1)), Nf_collocation.reshape(-1,1)))
  collocation_points = tf.convert_to_tensor(x_test.reshape((-1,5)), dtype=tf.float32)
  with tf.GradientTape() as tape:
    tape.watch(collocation_points)
    y_pred_coll = pinn(collocation_points)
  grady = tape.gradient(y_pred_coll, collocation_points)
  dSdN = tf.reshape(grady[:,-1], [collocation_points.shape[0], 1])
  f = (dSdN - tf.abs(dSdN)) / 2
  derivative_loss = tf.reshape(tf.reduce_mean(tf.square(f), axis=-1), [-1,1])
  return derivative_loss

alpha=1.0; beta=1.0;
def total_loss(y_true, y_pred):
  mse_loss = tf.reduce_mean(tf.square(y_true - y_pred), axis=-1)
  derivative_loss = pinn_loss()
  total_loss = 0.2*derivative_loss*alpha + 0.8*mse_loss*beta
  return total_loss

opt = keras.optimizers.Adam(learning_rate=0.005)
pinn.compile(optimizer=opt, loss=total_loss)
pinn.summary()
pinn_training = pinn.fit(x_train, y_train, validation_data=[x_val, y_val], epochs=250, verbose=2)
In [ ]:
plt.figure(dpi=100)
plt.plot(pinn_training.history['loss'], label='Training')
plt.plot(pinn_training.history['val_loss'], label='Validation')
plt.figure(dpi=100)
plt.scatter(database.Nf, database.sig_eq)
for i in range(10):
  Nf_range = np.linspace(1, 9, 100)
  manufacturing_vars = ['Orientation', 'Beam diameter', 'hatch', 'layer thickness']
  random_extraction = database.loc[np.random.randint(0,database.shape[0]), manufacturing_vars]
  random_extraction.hatch=200.0
  # print(random_extraction)
  x_test = np.hstack((np.tile(random_extraction, (100, 1)), Nf_range.reshape(-1,1)))
  seq_pred = pinn.predict(x_test.astype(float), verbose=0)
  plt.plot(Nf_range, seq_pred, color='r')
plt.xlabel("log(N_f)"), plt.ylabel("$S_{eq}}$")
Out[ ]:
(Text(0.5, 0, 'log(N_f)'), Text(0, 0.5, '$S_{eq}}$'))
No description has been provided for this image
No description has been provided for this image