Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/book/src/usage/monitor.rst
Original file line number Diff line number Diff line change
Expand Up @@ -211,3 +211,8 @@ One example procedure is as follow:

.. image:: ../_images/screenshots/debugger2disassembler.png
:align: center


The Art of Detonation Debugging: A Strategic Guide for CAPE Sandbox
===================================================================
Coming soon
15 changes: 8 additions & 7 deletions lib/cuckoo/common/cape_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,13 @@ def hash_file(method, path: str) -> str:
@param path: file path
@return: computed hash string
"""
f = open(path, "rb")
h = method()
while True:
buf = f.read(BUFSIZE)
if not buf:
break
h.update(buf)
with open(path, "rb") as f:
while True:
buf = f.read(BUFSIZE)
if not buf:
break
h.update(buf)
return h.hexdigest()


Expand Down Expand Up @@ -335,7 +335,8 @@ def static_config_lookup(file_path: str, sha256: str = False) -> dict:
dict or None: A dictionary containing the configuration information if found, otherwise None.
"""
if not sha256:
sha256 = hashlib.sha256(open(file_path, "rb").read()).hexdigest()
with open(file_path, "rb") as f:
sha256 = hashlib.sha256(f.read()).hexdigest()

if repconf.mongodb.enabled:
document_dict = mongo_find_one(
Expand Down
11 changes: 9 additions & 2 deletions lib/cuckoo/common/integrations/file_extra_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,8 @@ def static_file_info(

# ToDo we need type checking as it wont work for most of static jobs
if HAVE_PEFILE and ("PE32" in data_dictionary["type"] or "MS-DOS executable" in data_dictionary["type"]):
data_dictionary["pe"] = PortableExecutable(file_path).run(task_id)
with PortableExecutable(file_path) as pe:
data_dictionary["pe"] = pe.run(task_id)

if HAVE_FLARE_CAPA:
# https://github.com/mandiant/capa/issues/2620
Expand Down Expand Up @@ -965,7 +966,13 @@ def RarSFX_extract(file, *, data_dictionary, options: dict, **_) -> ExtractorRet

@time_tracker
def office_one(file, **_) -> ExtractorReturnType:
if not HAVE_ONE or open(file, "rb").read(16) not in (
if not HAVE_ONE:
return

with open(file, "rb") as f:
header = f.read(16)

if header not in (
b"\xE4\x52\x5C\x7B\x8C\xD8\xA7\x4D\xAE\xB1\x53\x78\xD0\x29\x96\xD3",
b"\xA1\x2F\xFF\x43\xD9\xEF\x76\x4C\x9E\xE2\x10\xEA\x57\x22\x76\x5F",
):
Expand Down
10 changes: 10 additions & 0 deletions lib/cuckoo/common/integrations/parse_pe.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,16 @@ def __init__(self, file_path: str = False, data: bytes = False):
log.debug("PE type not recognised: %s", e)
# self.results = results

def __enter__(self):
return self

def __exit__(self, exc_type, exc_val, exc_tb):
self.close()

def close(self):
if self.pe:
self.pe.close()

@property
def file_data(self):
if not self._file_data and path_exists(self.file_path):
Expand Down
3 changes: 2 additions & 1 deletion lib/cuckoo/common/mapTTPs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
ttps_map_file = os.path.join(CUCKOO_ROOT, "data", "mitre", "TTPs.json")
if os.path.exists(ttps_map_file):
try:
ttpDict = json.loads(open(ttps_map_file, "r").read())
with open(ttps_map_file, "r") as f:
ttpDict = json.load(f)
except Exception as e:
print("Can't load TTPs.json file", e)

Expand Down
6 changes: 4 additions & 2 deletions lib/cuckoo/common/quarantine.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,8 +721,10 @@ def unquarantine(f):

tmp_path = unquarantine(sys.argv[1])
if tmp_path:
original = hashlib.sha256(open(sys.argv[1], "rb").read()).hexdigest()
unq = hashlib.sha256(open(tmp_path, "rb").read()).hexdigest()
with open(sys.argv[1], "rb") as f:
original = hashlib.sha256(f.read()).hexdigest()
with open(tmp_path, "rb") as f:
unq = hashlib.sha256(f.read()).hexdigest()
if original == unq:
print("Unsuported quarantine file format")
else:
Expand Down
12 changes: 8 additions & 4 deletions lib/cuckoo/common/saztopcap.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,13 +163,15 @@ def saz_to_pcap(sazpath):
src = m.group("clientip")
elif m and m.group("hostip"):
dst = m.group("hostip")
req = open(f"{fiddler_raw_dir}{fid}_c.txt").read()
with open(f"{fiddler_raw_dir}{fid}_c.txt") as f:
req = f.read()
m = re.match(r"^(?P<verb>[^\r\n\s]+)\s+(?P<host_and_port>https?\:\/\/[^\/\r\n\:]+(\:(?P<dport>\d{1,5}))?)\/", req)
if m and m.group("verb") != "CONNECT":
req = req.replace(m.group("host_and_port"), "", 1)
if m.group("dport") and int(m.group("dport")) <= 65535:
dport = int(m.group("dport"))
resp = open(f"{fiddler_raw_dir}{fid}_s.txt").read()
with open(f"{fiddler_raw_dir}{fid}_s.txt") as f:
resp = f.read()
(seq, ack) = build_handshake(src, dst, sport, dport, pktdump, smac, dmac)
(seq, ack) = make_pkts(src, dst, sport, dport, seq, ack, req, pktdump, smac, dmac)
(seq, ack) = make_pkts(dst, src, dport, sport, seq, ack, resp, pktdump, dmac, smac)
Expand All @@ -192,13 +194,15 @@ def saz_to_pcap(sazpath):
log.error("Failed to find fiddler ID tag")
return None

req = open(f"{fiddler_raw_dir}{fid}_c.txt").read()
with open(f"{fiddler_raw_dir}{fid}_c.txt") as f:
req = f.read()
m = re.match(r"^(?P<verb>[^\r\n\s]+)\s+(?P<host_and_port>https?\:\/\/[^\/\r\n\:]+(\:(?P<dport>\d{1,5}))?)\/", req)
if m and m.group("verb") != "CONNECT":
req = req.replace(m.group("host_and_port"), "", 1)
if m.group("dport") and int(m.group("dport")) <= 65535:
dport = int(m.group("dport"))
resp = open(f"{fiddler_raw_dir}{fid}_s.txt").read()
with open(f"{fiddler_raw_dir}{fid}_s.txt") as f:
resp = f.read()
(seq, ack) = build_handshake(src, dst, sport, dport, pktdump, smac, dmac)
(seq, ack) = make_pkts(src, dst, sport, dport, seq, ack, req, pktdump, smac, dmac)
(seq, ack) = make_pkts(dst, src, dport, sport, seq, ack, resp, pktdump, dmac, smac)
Expand Down
7 changes: 4 additions & 3 deletions lib/cuckoo/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,10 +602,11 @@ def store_temp_file(filedata: bytes, filename: str, path=None) -> bytes:
with open(tmp_file_path, "wb") as tmp_file:
# If filedata is file object, do chunked copy.
if hasattr(filedata, "read"):
chunk = filedata.read(1024)
while chunk:
tmp_file.write(chunk)
with filedata:
chunk = filedata.read(1024)
while chunk:
tmp_file.write(chunk)
chunk = filedata.read(1024)
else:
tmp_file.write(filedata)

Expand Down
155 changes: 42 additions & 113 deletions lib/cuckoo/common/web_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1605,53 +1605,54 @@ def process_new_task_files(request, samples: list, details: dict, opt_filename:
"""
list_of_files = []
for sample in samples:
# Error if there was only one submitted sample, and it's empty.
# But if there are multiple and one was empty, just ignore it.
if not sample.size:
details["errors"].append({sample.name: "You uploaded an empty file."})
continue
with sample:
# Error if there was only one submitted sample, and it's empty.
# But if there are multiple and one was empty, just ignore it.
if not sample.size:
details["errors"].append({sample.name: "You uploaded an empty file."})
continue

size = sample.size
if size > web_cfg.general.max_sample_size and not (
web_cfg.general.allow_ignore_size and "ignore_size_check" in details["options"]
):
if not web_cfg.general.enable_trim:
size = sample.size
if size > web_cfg.general.max_sample_size and not (
web_cfg.general.allow_ignore_size and "ignore_size_check" in details["options"]
):
if not web_cfg.general.enable_trim:
details["errors"].append(
{
sample.name: f"Uploaded file exceeds the maximum allowed size in conf/web.conf. Sample size is: {size / float(1 << 20):,.0f} Allowed size is: {web_cfg.general.max_sample_size / float(1 << 20):,.0f}"
}
)
continue

data = sample.read()

if opt_filename:
filename = opt_filename
else:
filename = sanitize_filename(sample.name)

# Moving sample from django temporary file to CAPE temporary storage for persistence, if configured by user.
try:
path = store_temp_file(data, filename)
target_file = File(path)
sha256 = target_file.get_sha256()
except OSError:
details["errors"].append(
{
sample.name: f"Uploaded file exceeds the maximum allowed size in conf/web.conf. Sample size is: {size / float(1 << 20):,.0f} Allowed size is: {web_cfg.general.max_sample_size / float(1 << 20):,.0f}"
}
{filename: "Temp folder from cuckoo.conf, disk is out of space. Clean some space before continue."}
)
continue

data = sample.read()

if opt_filename:
filename = opt_filename
else:
filename = sanitize_filename(sample.name)

# Moving sample from django temporary file to CAPE temporary storage for persistence, if configured by user.
try:
path = store_temp_file(data, filename)
target_file = File(path)
sha256 = target_file.get_sha256()
except OSError:
details["errors"].append(
{filename: "Temp folder from cuckoo.conf, disk is out of space. Clean some space before continue."}
)
continue

if (
not request.user.is_staff
and (web_cfg.uniq_submission.enabled or unique)
and db.check_file_uniq(sha256, hours=web_cfg.uniq_submission.hours)
):
details["errors"].append(
{filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission"}
)
continue
if (
not request.user.is_staff
and (web_cfg.uniq_submission.enabled or unique)
and db.check_file_uniq(sha256, hours=web_cfg.uniq_submission.hours)
):
details["errors"].append(
{filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission"}
)
continue

list_of_files.append((data, path, sha256))
list_of_files.append((data, path, sha256))

return list_of_files, details

Expand Down Expand Up @@ -1690,78 +1691,6 @@ def process_new_dlnexec_task(url: str, route: str, options: str, custom: str):
return path, response, ""


def submit_task(
target: str,
package: str = "",
timeout: int = 0,
task_options: str = "",
priority: int = 1,
machine: str = "",
platform: str = "",
memory: bool = False,
enforce_timeout: bool = False,
clock: str = None,
tags: str = None,
parent_id: int = None,
tlp: bool = None,
distributed: bool = False,
filename: str = "",
server_url: str = "",
):
"""
ToDo add url support in future
"""
if not path_exists(target):
log.info("File doesn't exist")
return

task_id = False
if distributed:
options = {
"package": package,
"timeout": timeout,
"options": task_options,
"priority": priority,
# "machine": machine,
"platform": platform,
"memory": memory,
"enforce_timeout": enforce_timeout,
"clock": clock,
"tags": tags,
"parent_id": parent_id,
"filename": filename,
}

multipart_file = [("file", (os.path.basename(target), open(target, "rb")))]
try:
res = requests.post(server_url, files=multipart_file, data=options)
if res and res.ok:
task_id = res.json()["data"]["task_ids"][0]
except Exception as e:
log.error(e)
else:
task_id = db.add_path(
file_path=target,
package=package,
timeout=timeout,
options=task_options,
priority=priority,
machine=machine,
platform=platform,
memory=memory,
enforce_timeout=enforce_timeout,
parent_id=parent_id,
tlp=tlp,
filename=filename,
)
if not task_id:
log.warning("Error adding CAPE task to database: %s", package)
return task_id

log.info('CAPE detection on file "%s": %s - added as CAPE task with ID %s', target, package, task_id)
return task_id


# https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/68215738#68215738
def get_running_commit() -> str:
"""
Expand Down
3 changes: 2 additions & 1 deletion lib/cuckoo/core/analysis_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,8 @@ def build_options(self):
options["file_name"] = file_obj.get_name()
options["file_type"] = file_obj.get_type()
# if it's a PE file, collect export information to use in more smartly determining the right package to use
options["exports"] = PortableExecutable(self.task.target).get_dll_exports()
with PortableExecutable(self.task.target) as pe:
options["exports"] = pe.get_dll_exports()
del file_obj

# options from auxiliary.conf
Expand Down
3 changes: 2 additions & 1 deletion lib/cuckoo/core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -1654,7 +1654,8 @@ def demux_sample_and_add_to_db(
log.info("Do sandbox packages need an update? Sflock identifies as: %s - %s", tmp_package, file)

if package == "dll" and "function" not in options:
dll_export = PortableExecutable(file.decode()).choose_dll_export()
with PortableExecutable(file.decode()) as pe:
dll_export = pe.choose_dll_export()
if dll_export == "DllRegisterServer":
package = "regsvr"
elif dll_export == "xlAutoOpen":
Expand Down
27 changes: 27 additions & 0 deletions lib/cuckoo/core/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,33 @@ def task_log_stop(task_id):
_tasks_lock.release()


def task_log_stop_force(task_id):
"""Force disassociate all threads from a task and close the log file."""
_tasks_lock.acquire()
try:
if task_id not in _task_threads:
return

# Close the file handle (shared by all threads for this task)
# We can take it from any associated thread
if _task_threads[task_id]:
first_key = _task_threads[task_id][0]
if first_key in _tasks:
_, fp = _tasks[first_key]
try:
fp.close()
except Exception as e:
logging.warning("Failed to force-close log for task %d: %s", task_id, e)

# Cleanup all references
for thread_key in _task_threads[task_id]:
_tasks.pop(thread_key, None)

_task_threads.pop(task_id, None)
finally:
_tasks_lock.release()


def init_logger(name, level=None):
formatter = logging.Formatter("%(asctime)s [%(name)s] %(levelname)s: %(message)s")

Expand Down
3 changes: 3 additions & 0 deletions lib/cuckoo/core/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -813,6 +813,9 @@ def __init__(self, task, results, reprocess=False):
self.task = task

if results.get("pefiles"):
for pe in results["pefiles"].values():
with suppress(Exception):
pe.close()
del results["pefiles"]

# remove unwanted/duplicate information from reporting
Expand Down
Loading