diff --git a/main.py b/main.py
index 927ef8ad67f0f50226279df30d17094e418e42bf..a44fe90676fc4d1354d091aa0ebfc0f65d296d09 100644
--- a/main.py
+++ b/main.py
@@ -18,11 +18,17 @@ logger = logging.get_logger(__name__)
 housing_data = pandas.read_csv('./Documents/other_housing.csv')
 
 # Initially only work with first 10, for testing purposes.
-housing_data = housing_data[0:10]
+housing_data = housing_data[0:3]
 
 # Normalize data.
 normalizer = neural_net.Normalizer()
 normalized_data = normalizer.normalize_data(housing_data)
+features = normalized_data.loc[:, normalized_data.columns != 'SalePrice']
+targets = normalized_data['SalePrice']
+
+logger.info('')
+logger.info('Normalized Features: \n{0}'.format(features))
+logger.info('Normailzed Targets: \n{0}'.format(targets))
 
 # Start neural net.
 backprop = neural_net.BackPropNet(normalized_data)
diff --git a/neural_net.py b/neural_net.py
index 3ad7ce179405ec6278f25bbe5f74b3d8207cab59..d60a2895521a7c1a4729f140735e7652da6fe366 100644
--- a/neural_net.py
+++ b/neural_net.py
@@ -3,7 +3,7 @@ Neural Net logic.
 """
 
 # System Imports.
-import numpy, pandas
+import math, numpy, pandas
 
 # User Class Imports.
 from resources import logging
@@ -28,42 +28,167 @@ class Normalizer():
         # logger.info(data.columns.values)
 
         # Address individual columns.
+        normalized_data = pandas.DataFrame()
+
+        column = 'Lot Area'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Year Built'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Year Remod/Add'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Fireplaces'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Garage Area'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Pool Area'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'Yr Sold'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        column = 'SalePrice'
+        if column in data.columns:
+            frame = pandas.DataFrame(data[column])
+            normalized_data = normalized_data.join(frame, how='outer')
+            data = data.loc[:, data.columns != column]
+
+        return normalized_data
+
+    def squish_values(self):
+        """
+        Squishes vector values to be between 0 and 1.
+        :return:
+        """
+        pass
 
-        return data
+    def separate_categories(self):
+        pass
 
 class BackPropNet():
     """
     Neural Net implementing back propagation.
     """
     def __init__(self, data):
-        self.weights = self._initialize_weights(data)
+        self.hidden_layer_size = 3
+        self.network = []
+        self._create_architecture(data)
 
-    def _initialize_weights(self, data):
+    def _create_architecture(self, data):
         """
-        Initialize weights based of number of passed columns in data.
-        Values are initialized to random decimals near 0, using a normal distribution.
-        :param data: Data to create weights for.
-        :return: Vector of column weights.
+        Creates neural net architecture.
+        Each layer has sets of weights equal to the number of nodes in the layer.
+        Each set of weights has x values where x is the number of nodes in the previous layer, plus a bias.
+        Weight values are randomized values near 0, using a normal distribution.
+        :param data:
+        :return:
         """
-        weights = []
-        for column in data.columns:
-            weights.append(numpy.random.randn() * 0.001)
-
-        # logger.info(weights)
-        return weights
-
-    def predict(self):
-        pass
+        # Create first hidden layer.
+        hidden_layer_1 = []
+        for index in range(self.hidden_layer_size):
+            hidden_layer_1.append([
+                (numpy.random.randn() * 0.001) for index in range(len(data) + 1)
+            ])
+
+        # Create second hidden layer.
+        hidden_layer_2 = []
+        for index in range(self.hidden_layer_size):
+            hidden_layer_2.append([
+                (numpy.random.randn() * 0.001) for index in range(self.hidden_layer_size + 1)
+            ])
+
+        # Create output layer
+        output_layer = [[
+            (numpy.random.randn() * 0.001) for index in range(self.hidden_layer_size + 1)
+        ]]
+
+        # Add layers to network.
+        self.network.append(hidden_layer_1)
+        self.network.append(hidden_layer_2)
+        self.network.append(output_layer)
+
+        logger.info('Network:')
+        index = 0
+        for layer in self.network:
+            logger.info('Layer {0}: {1}'.format(index, layer))
+            index += 1
+
+    def activation(self, weights, inputs):
+        """
+        Calculate if neuron fires or not, based on inputs and weights being calculated and passed into sigmoid.
+        :param weights: Weights of given layer.
+        :param inputs: Inputs to calculate with.
+        :return: Calculated value, passed through sigmoid.
+        """
+        # Calculate single value based on inputs and weights.
+        value = weights[-1]
+        for index in range(len(weights) - 1):
+            value += weights[index] * inputs[index]
 
-    def train(self):
-        pass
+        # Pass into sigmoid, then return result.
+        return self.sigmoid(value)
 
-    def _train_step(self):
-        pass
+    def sigmoid(self, value):
+        """
+        Calculate the sigmoid of the provided value.
+        :param value: Single value to calculate.
+        :return: Sigmoid of value.
+        """
+        return ( 1 / (1 + math.exp(-value)) )
 
-    def _calculate_error(self):
-        pass
+    def reverse_sigmoid(self, value):
+        """
+        Calculate the derivative of sigmoid.
+        :param value: Single value to calculate.
+        :return: Reverse sigmoid of value.
+        """
+        return ( self.sigmoid(value) * ( 1 - self.sigmoid(value) ) )
 
+    def forward_propagate(self, inputs):
+        """
+        Walk forward through the neural network.
+        :param inputs: Initial inputs for network.
+        :return: Output results of network.
+        """
+        outputs = None
+        for layer in self.network:
+            outputs = []
+            for neuron in layer:
+                outputs = inputs.append(self.activation(neuron[0], inputs))
+            inputs = outputs
+        return outputs
+
+    def backward_propagate(self, inputs):
+        """
+        Walk backward through the neural network, using derivatives.
+        :param inputs: Original output of network.
+        :return: ???
+        """
 
 class ResultTracker():
     """