Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 79 additions & 72 deletions lib/cuckoo/common/abstracts.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,82 +887,89 @@ def set_path(self, analysis_path):
CuckooReportError(e)

def yara_detected(self, name):
target = self.results.get("target", {})
if target.get("category") in ("file", "static") and target.get("file"):
name_pattern = re.compile(name, re.I)

def _check_matches(data_block, path, label_override=None):
if not isinstance(data_block, dict):
return

for keyword in ("cape_yara", "yara"):
for yara_block in self.results["target"]["file"].get(keyword, []):
if re.findall(name, yara_block["name"], re.I):
yield "sample", self.results["target"]["file"]["path"], yara_block, self.results["target"]["file"]

if target["file"].get("selfextract"):
for _, toolsblock in target["file"]["selfextract"].items():
for block in toolsblock.get("extracted_files", []):
for keyword in ("cape_yara", "yara"):
for yara_block in block[keyword]:
if re.findall(name, yara_block["name"], re.I):
# we can't use here values from set_path
yield "sample", block["path"], yara_block, block

for block in self.results.get("CAPE", {}).get("payloads", []) or []:
for sub_keyword in ("cape_yara", "yara"):
for yara_block in block.get(sub_keyword, []):
if re.findall(name, yara_block["name"], re.I):
yield sub_keyword, block["path"], yara_block, block

if block.get("selfextract", {}):
for _, toolsblock in block["selfextract"].items():
for subblock in toolsblock.get("extracted_files", []):
for keyword in ("cape_yara", "yara"):
for yara_block in subblock[keyword]:
if re.findall(name, yara_block["name"], re.I):
yield "sample", subblock["path"], yara_block, block

for keyword in ("procdump", "procmemory", "extracted", "dropped"):
if self.results.get(keyword) is not None:
for block in self.results.get(keyword, []):
if not isinstance(block, dict):
continue
for sub_keyword in ("cape_yara", "yara"):
for yara_block in block.get(sub_keyword, []):
if re.findall(name, yara_block["name"], re.I):
path = block["path"] if block.get("path", False) else ""
yield keyword, path, yara_block, block

if keyword == "procmemory":
for pe in block.get("extracted_pe", []) or []:
for sub_keyword in ("cape_yara", "yara"):
for yara_block in pe.get(sub_keyword, []) or []:
if re.findall(name, yara_block["name"], re.I):
yield "extracted_pe", pe["path"], yara_block, block

if block.get("selfextract", {}):
for _, toolsblock in block["selfextract"].items():
for subblock in toolsblock.get("extracted_files", []):
for keyword in ("cape_yara", "yara"):
for yara_block in subblock[keyword]:
if re.findall(name, yara_block["name"], re.I):
yield "sample", subblock["path"], yara_block, block

macro_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.results["info"]["id"]), "macros")
for macroname in self.results.get("static", {}).get("office", {}).get("Macro", {}).get("info", []) or []:
for yara_block in self.results["static"]["office"]["Macro"]["info"].get("macroname", []) or []:
for sub_block in self.results["static"]["office"]["Macro"]["info"]["macroname"].get(yara_block, []) or []:
if re.findall(name, sub_block["name"], re.I):
yield (
"macro",
os.path.join(macro_path, macroname),
sub_block,
self.results["static"]["office"]["Macro"]["info"],
)

if self.results.get("static", {}).get("office", {}).get("XLMMacroDeobfuscator", False):
for yara_block in self.results["static"]["office"]["XLMMacroDeobfuscator"].get("info", []).get("yara_macro", []) or []:
if re.findall(name, yara_block["name"], re.I):
for yara_block in data_block.get(keyword, []):
if name_pattern.search(yara_block.get("name", "")):
label = label_override if label_override else keyword
yield label, path, yara_block, data_block

def _process_selfextract(parent_block):
selfextract = parent_block.get("selfextract")
if not selfextract:
return

tools_iter = selfextract.values() if isinstance(selfextract, dict) else []

for toolsblock in tools_iter:
for extracted_file in toolsblock.get("extracted_files", []) or []:
yield from _check_matches(
extracted_file,
path=extracted_file.get("path"),
label_override="sample"
)

results = self.results
target = results.get("target", {})

# 1. Procesar Target
if target.get("category") in ("file", "static") and target.get("file"):
file_info = target["file"]
yield from _check_matches(file_info, file_info.get("path"), label_override="sample")
yield from _process_selfextract(file_info)

cape_payloads = results.get("CAPE", {}).get("payloads", []) or []
for block in cape_payloads:
yield from _check_matches(block, block.get("path"))
yield from _process_selfextract(block)

search_keys = ("procdump", "procmemory", "extracted", "dropped")
for keyword in search_keys:
blocks = results.get(keyword, []) or []
if not blocks:
continue

for block in blocks:
if not isinstance(block, dict):
continue

path = block.get("path", "")
yield from _check_matches(block, path, label_override=keyword)

if keyword == "procmemory":
for pe in block.get("extracted_pe", []) or []:
yield from _check_matches(pe, pe.get("path"), label_override="extracted_pe")

yield from _process_selfextract(block)

# ToDo not sure if static still exist
office_info = results.get("static", {}).get("office", {})
macro_info = office_info.get("Macro", {}).get("info", [])
analysis_id = str(results.get("info", {}).get("id", "unknown"))
macro_base_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", analysis_id, "macros")

if macro_info:
if isinstance(macro_info, list):
for item in macro_info:
yield from _check_matches(item, os.path.join(macro_base_path, item.get("name", "macro")), label_override="macro")
elif isinstance(macro_info, dict):
for macroname, macro_data in macro_info.items():
yield from _check_matches(macro_data, os.path.join(macro_base_path, macroname), label_override="macro")

xlm_info = office_info.get("XLMMacroDeobfuscator", {}).get("info", {})
if xlm_info:
for yara_block in xlm_info.get("yara_macro", []) or []:
if name_pattern.search(yara_block.get("name", "")):
yield (
"macro",
os.path.join(macro_path, "xlm_macro"),
os.path.join(macro_base_path, "xlm_macro"),
yara_block,
self.results["static"]["office"]["XLMMacroDeobfuscator"]["info"],
xlm_info
)

def signature_matched(self, signame: str) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion lib/cuckoo/core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -2223,7 +2223,7 @@ def clean_timed_out_tasks(self, timeout: int):
return

# Calculate the cutoff time before which tasks are considered timed out.
timeout_threshold = datetime.now() - timedelta(seconds=timeout)
timeout_threshold = datetime.utcnow() - timedelta(seconds=timeout)

# Build a single, efficient DELETE statement that filters in the database.
delete_stmt = delete(Task).where(Task.status == TASK_PENDING).where(Task.added_on < timeout_threshold)
Expand Down
2 changes: 1 addition & 1 deletion lib/cuckoo/core/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def do_main_loop_work(self, error_queue: queue.Queue) -> SchedulerCycleDelay:
if self.next_timeout_time < time.time():
self.next_timeout_time = time.time() + self.cfg.cuckoo.get("task_timeout_scan_interval", 30)
with self.db.session.begin():
self.db.check_tasks_timeout(self.cfg.cuckoo.get("task_pending_timeout", 0))
self.db.clean_timed_out_tasks(self.cfg.cuckoo.get("task_pending_timeout", 0))

analysis_manager: Optional[AnalysisManager] = None
with self.db.session.begin():
Expand Down
10 changes: 5 additions & 5 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ paramiko = "3.5.0"
psutil = "6.1.1"
peepdf-3 = "5.0.0"
pyre2-updated = ">=0.3.8"
Werkzeug = "3.1.3"
Werkzeug = "3.1.4"
packaging = "24.2"
setuptools = "78.1.1"
# command line config manipulation
Expand Down
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2179,9 +2179,9 @@ websockets==14.2 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:f390024a47d904613577df83ba700bd189eedc09c57af0a904e5c39624621270 \
--hash=sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03 \
--hash=sha256:fd475a974d5352390baf865309fe37dec6831aafc3014ffac1eea99e84e83fc2
werkzeug==3.1.3 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e \
--hash=sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746
werkzeug==3.1.4 ; python_version >= "3.10" and python_version < "4.0" \
--hash=sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905 \
--hash=sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e
win-unicode-console==0.5 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" and platform_python_implementation != "PyPy" \
--hash=sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e
xmltodict==0.14.2 ; python_version >= "3.10" and python_version < "4.0" \
Expand Down