본문 바로가기

하드웨어/Verilog-NN

Verilog-NN : Recurrent Neural Network : RNN

반응형

 

module RecurrentNeuralNetwork(input clk,
                              input [2:0] inputs,
                              output reg [0:0] output);

  reg [1:0] hidden_neurons;
  reg [0:0] output_neuron;
  
  // weights for inputs to hidden layer
  parameter w0 = 0.3;
  parameter w1 = 0.4;
  parameter w2 = 0.2;
  
  // weights for hidden layer to output
  parameter w3 = 0.5;
  parameter w4 = 0.7;
  
  // weights for hidden layer to itself
  parameter w5 = 0.8;
  parameter w6 = 0.6;
  
  always @(posedge clk) begin
    hidden_neurons[0] <= inputs[0] * w0 + inputs[1] * w1 + inputs[2] * w2 + hidden_neurons[0] * w5;
    hidden_neurons[1] <= inputs[0] * w2 + inputs[1] * w1 + inputs[2] * w0 + hidden_neurons[1] * w6;
    output_neuron <= hidden_neurons[0] * w3 + hidden_neurons[1] * w4;
  end
  
  assign output = output_neuron;
endmodule

 

https://towardsdatascience.com/the-mostly-complete-chart-of-neural-networks-explained-3fb6f2367464

 

The mostly complete chart of Neural Networks, explained

The zoo of neural network types grows exponentially. One needs a map to navigate between many emerging architectures and approaches.

towardsdatascience.com

 

module RNN(
  input clk,
  input reset,
  input [15:0] x,
  input [15:0] h_prev,
  output [15:0] y,
  output [15:0] h
);

// Define the weights and biases
reg [15:0] W_xh, W_hh, b_h;

// State of the RNN
reg [15:0] h_t;

// Output of the RNN
reg [15:0] y_t;

// Function to compute the activation
function [15:0] activation(input [15:0] x);
  // Define the activation function (e.g., sigmoid, tanh, etc.)
  // ...
endfunction

// Update the state of the RNN
always @(posedge clk) begin
  if (reset) begin
    h_t <= 0;
  end else begin
    h_t <= activation(W_xh * x + W_hh * h_prev + b_h);
  end
end

// Compute the output of the RNN
always @(posedge clk) begin
  if (reset) begin
    y_t <= 0;
  end else begin
    y_t <= activation(W_xh * x + W_hh * h_t + b_h);
  end
end

// Connect the outputs
assign h = h_t;
assign y = y_t;

endmodule

 

 

반응형