# data for training the ann mode# option 1:celsius=np.array([-40,-10,0,8,15,22,38],dtype=float)fahrenheit=np.array([-40,14,32,46,59,72,100],dtype=float)# option 2: (X°C x 9/5) + 32 = 41 °Fpoints=1000np.random.seed(99)dataIn=np.linspace(-40,60,points)target=dataIn*9/5+32+4*np.random.randn(points)plt.plot(celsius,fahrenheit,'or',label='data-set 1')plt.plot(dataIn,target,'.b',alpha=0.3,label='data-set 2')plt.legend()plt.grid()plt.show()
fromtensorflow.keras.modelsimportSequential# ANN typefromtensorflow.keras.layersimportDense,Input# All nodes connected# NN definitionhn=2model=Sequential()model.add(Input(shape=(1,),name='input'))model.add(Dense(hn,activation='linear',name='hidden'))model.add(Dense(1,activation='linear',name='output'))model.summary()
### veri important note implement a python code # to show the ANN model connection using ascii
fromtensorflow.keras.optimizersimportAdam#hyper parametersepoch=500lr=0.01tf.random.set_seed(99)# For TensorFlowmodel.compile(optimizer=Adam(lr),loss='mean_squared_error')print("Starting training ...")historial=model.fit(dataIn,target,epochs=epoch,verbose=False,)print("Model trainned!")
# Do the Maths:inTest=np.array(inTest)whi=np.array([[-0.27738443,0.7908125]])bh=np.array([-8.219968,6.714554])Oh=np.dot(inTest,whi)+bhwho=np.array([[-1.9934888],[1.5958738]])bo=np.array([5.1361823])Oo=np.dot(Oh,who)+boOo
array([213.73814765])
sklearn
fromsklearn.neural_networkimportMLPRegressorfromsklearn.preprocessingimportStandardScalerfromsklearn.model_selectionimporttrain_test_splitimportnumpyasnp# Datos de ejemplo# Escalado de los datosscaler_X=StandardScaler()scaler_y=StandardScaler()X_scaled=scaler_X.fit_transform(dataIn.reshape(-1,1))y_scaled=scaler_y.fit_transform(target.reshape(-1,1)).ravel()# Modelo equivalente al de Kerasmlp=MLPRegressor(hidden_layer_sizes=(2,),# 1 capa oculta con 2 neuronasactivation='identity',# activación lineallearning_rate_init=0.001,# 👈 Learning ratesolver='adam',max_iter=1000,tol=1e-6,random_state=4)# Entrenar modelomlp.fit(X_scaled,y_scaled)# Prediccióny_pred_scaled=mlp.predict(X_scaled)y_pred=scaler_y.inverse_transform(y_pred_scaled.reshape(-1,1))# Visualizar resultados (opcional)importmatplotlib.pyplotaspltplt.scatter(dataIn,target,label="Original data")plt.plot(dataIn,y_pred,color='red',label="MLPRegressor output")plt.legend()plt.show()
plt.plot(mlp.loss_curve_,'.k')plt.xlabel("Épocas")plt.ylabel("Error (loss)")plt.title("Evolución del error en entrenamiento")plt.grid(True)plt.show()
print("Pesos entre capa de entrada y oculta:",mlp.coefs_[0])print("Pesos entre capa oculta y salida:",mlp.coefs_[1])print("Bias de capa oculta:",mlp.intercepts_[0])print("Bias de salida:",mlp.intercepts_[1])
Pesos entre capa de entrada y oculta: [[ 1.70549238 -0.37235861]]
Pesos entre capa oculta y salida: [[ 0.30934654]
[-1.25842791]]
Bias de capa oculta: [1.02819949 1.02732046]
Bias de salida: [0.97683886]
Model scheme
defgenerate_ascii_ann(model):ascii_diagram="\nArtificial Neural Network Architecture:\n"fori,layerinenumerate(model.layers):weights=layer.get_weights()# Determine layer type and number of neuronsifisinstance(layer,Dense):input_dim=weights[0].shape[0]# Number of inputsoutput_dim=weights[0].shape[1]# Number of neuronsascii_diagram+=f"\nLayer {i+1}: {layer.name} ({layer.__class__.__name__})\n"ascii_diagram+=f" Inputs: {input_dim}, Neurons: {output_dim}\n"ascii_diagram+=f" Weights Shape: {weights[0].shape}\n"iflen(weights)>1:# If bias existsascii_diagram+=f" Biases Shape: {weights[1].shape}\n"# ASCII representation of neuronsascii_diagram+=" "+" o "*output_dim+" <- Output Neurons\n"ascii_diagram+=" | "*output_dim+"\n"ascii_diagram+=" "+" | "*input_dim+" <- Inputs\n"returnascii_diagram# Generate and print the ASCII diagramascii_ann=generate_ascii_ann(model)print(ascii_ann)