Skip to content

Commit

Permalink
add tests for server/client
Browse files Browse the repository at this point in the history
  • Loading branch information
atharvakale343 committed Sep 20, 2024
1 parent 77b0a2c commit 0205a23
Show file tree
Hide file tree
Showing 7 changed files with 163 additions and 14 deletions.
2 changes: 2 additions & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@ name = "pypi"
[packages]
pydantic = "~=2.9"
flask-ml = "0.0.8"
colorama = "*"

[dev-packages]
black = "~=24.8"
mypy = "~=1.11"
flake8 = "~=7.1"
isort = "~=5.13"
hapless = "*"

[requires]
python_version = "3.11"
82 changes: 81 additions & 1 deletion Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions client_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,27 @@
{
"input": {
"input_type": "TARGET_FOLDER",
"file_path": f"{root.joinpath("examples", "target_folder")}",
"file_path": f'{root.joinpath("examples", "target_folder")}',
},
},
{
"input": {
"input_type": "KNOWN_DATASET",
"file_path": f"{root.joinpath("examples", "known_dataset")}",
"file_path": f'{root.joinpath("examples", "known_dataset")}',
},
},
{
"input": {
"input_type": "OUTPUT_SQL_PATH",
"file_path": f"{root.joinpath("examples", "out", "known_content_hashes.sqlite")}",
"file_path": f'{root.joinpath("examples", "out", "known_content_hashes.sqlite")}',
},
},
]

data_type = DataTypes.CUSTOM # The type of the input data

# Parameters of the model
parameters = {"block_size": 4096, "target_probability": 0.99}
parameters = {"block_size": 4, "target_probability": 0.99}

response = client.request(inputs, data_type, parameters) # Send a request to the server

Expand Down
3 changes: 3 additions & 0 deletions small_blk_forensics/ml/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def run_with_known_content_directory(

# Hash the target directory and check for matches using random blocks
response = self._hash_directory_random_blocks(target_directory, db_conn)
db_conn.close()

return response

Expand All @@ -60,6 +61,7 @@ def run_with_known_content_sqlite(

# Hash the target directory and check for matches
response = self._hash_directory_random_blocks(target_directory, db_conn)
db_conn.close()

return response

Expand Down Expand Up @@ -275,6 +277,7 @@ def hash_directory(self, directory: Path, out_sql_path: Path) -> None:

# Fully hash the known content directory and store hashes in the output directory's database
self._hash_directory(directory, db_conn, out_sql_path)
db_conn.close()

def _hash_directory(self, directory: Path, db_conn: sqlite3.Connection, out_path: Path) -> None:
"""
Expand Down
28 changes: 20 additions & 8 deletions test/rere.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,26 @@ def write_blob_field(f: BinaryIO, name: bytes, blob: bytes):
f.write(b'\n')

def capture(shell: str) -> dict:
print(f"CAPTURING: {shell}")
process = subprocess.run(['sh', '-c', shell], capture_output = True)
return {
'shell': shell,
'returncode': process.returncode,
'stdout': process.stdout,
'stderr': process.stderr,
}
if shell.startswith("(NOCAPTURE)"):
new_shell = " ".join(shell.split(" ")[1:])
print(f"RUNNING: {new_shell}")
process = subprocess.run(['sh', '-c', new_shell])
return {
'shell': shell,
'returncode': 0,
'stdout': b'',
'stderr': b'',
}

else:
print(f"CAPTURING: {shell}")
process = subprocess.run(['sh', '-c', shell], capture_output = True)
return {
'shell': shell,
'returncode': process.returncode,
'stdout': process.stdout,
'stderr': process.stderr,
}

def load_list(file_path: str) -> list[str]:
with open(file_path) as f:
Expand Down
6 changes: 6 additions & 0 deletions test/test.list
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,9 @@ python cmd_interface.py generate_hashes --output_sql ./examples/out/known_conten

# Run SBF on a pre-generated known content directory SQLite DB and target directory
python cmd_interface.py hash_random_blocks --input_sql ./examples/out/known_content_hashes.sqlite --target_directory ./examples/target_folder --block_size 4 | head -n -2

# Server-client test
(NOCAPTURE) hap run --check python -m small_blk_forensics.backend.server
sleep 3 && python client_example.py
hap kill --all
hap cleanall
48 changes: 47 additions & 1 deletion test/test.list.bi
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
:i count 8
:i count 14
:b shell 59
# Run SBF on a known content directory and target directory
:i returncode 0
Expand Down Expand Up @@ -77,3 +77,49 @@ Results:

:b stderr 0

:b shell 0

:i returncode 0
:b stdout 0

:b stderr 0

:b shell 20
# Server-client test
:i returncode 0
:b stdout 0

:b stderr 0

:b shell 72
(NOCAPTURE) hap run --check python -m small_blk_forensics.backend.server
:i returncode 0
:b stdout 0

:b stderr 0

:b shell 35
sleep 3 && python client_example.py
:i returncode 0
:b stdout 591
INFO: Received a response
[{'result': {'found': True, 'target_file': '/Users/atharvakale/workspace/umass/596e-cs/individual-project/small-block-forensics/examples/target_folder/sample.txt', 'known_dataset_file': '/Users/atharvakale/workspace/umass/596e-cs/individual-project/small-block-forensics/examples/known_dataset/sample.txt', 'block_num_in_known_dataset': 0, 'block_num_in_target': 1}, 'text': 'RESULTS'}, {'result': '/Users/atharvakale/workspace/umass/596e-cs/individual-project/small-block-forensics/examples/out/known_content_hashes.sqlite', 'text': 'Successfully stored hashes'}]

:b stderr 0

:b shell 14
hap kill --all
:i returncode 0
:b stdout 26
💀 Killed 1 active haps

:b stderr 0

:b shell 12
hap cleanall
:i returncode 0
:b stdout 29
🧲 Deleted 1 finished haps

:b stderr 0

0 comments on commit 0205a23

Please sign in to comment.