diff --git a/backend/code/file_tokenizer.py b/backend/code/file_tokenizer.py
index 3633f1f..c73931d 100755
--- a/backend/code/file_tokenizer.py
+++ b/backend/code/file_tokenizer.py
@@ -8,6 +8,7 @@
 
 from pathlib import Path
 
+from code.utils import custom_hash
 from config import SOURCES_FOLDER, HASH_EXTENSION
 
 
@@ -57,7 +58,7 @@ def hash_file_blocks(file_path: str, block_size: int = 512):
     with open(os.path.join(SOURCES_FOLDER, hackthehill_file), 'w', encoding="utf-8") as f:
         f.write(hash_block)
 
-    return hashlib.sha256(json.dumps(header).encode('utf-8')).hexdigest()
+    return custom_hash(json.dumps(header))
 
 
 def get_block_content(file_path, block_index: int, block_size: int = 512) -> bytes:
diff --git a/backend/test/test_file_tokenizer.py b/backend/test/test_file_tokenizer.py
index a606605..dab56a9 100644
--- a/backend/test/test_file_tokenizer.py
+++ b/backend/test/test_file_tokenizer.py
@@ -1,18 +1,54 @@
 """
-TODO
+Testing the File Tokenizer functions.
 """
-
+import json
+import os.path
 import unittest
 
+from code.file_tokenizer import hash_file_blocks
+from code.utils import custom_hash
+from config import UPLOADS_FOLDER, SOURCES_FOLDER, HASH_EXTENSION
+
 
 class TestFileTokenizer(unittest.TestCase):
     """
-    TODO
+    File Tokenizer functions are the core of project, as how they convert data and de-hash it is
+    very important to how our application is looking at files. The better the hash and de-hashing,
+    the better quality of file sharing we provide.
     """
 
-    def test_dummy(self):
+    def test_hash_file_blocks_with_empty_file_returns_correct_value(self):
         """
-        TODO
+        We should only see the header.
         """
 
-        self.assertEqual(1 + 1, 2)
+        testing_file = os.path.join(
+            UPLOADS_FOLDER,
+            "test_hash_file_blocks_with_empty_file_returns_correct_value.txt"
+        )
+        hackthehill_file = os.path.join(
+            SOURCES_FOLDER,
+            "test_hash_file_blocks_with_empty_file_returns_correct_value" + HASH_EXTENSION
+        )
+        test_header = {
+            "header": {
+                "file_name": os.path.basename(testing_file),
+                "file_size": 0,
+                "number_of_blocks": 0,
+                "block_size": 512,
+            },
+            "blocks": {}
+        }
+
+        with open(testing_file, "x", encoding="utf-8") as _:
+            hackthehill_file_hashed_content = hash_file_blocks(testing_file)
+
+            with open(hackthehill_file, "r", encoding="utf-8") as g:
+                hackthehill_file_content = json.loads(g.read())
+
+                self.assertEqual(hackthehill_file_content, test_header)
+                self.assertEqual(hackthehill_file_hashed_content,
+                                 custom_hash(json.dumps(hackthehill_file_content)))
+
+        os.remove(testing_file)
+        os.remove(hackthehill_file)