Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reduce false positives in safe PDF analysis (Adobe Reader behaviour) #430

Merged
merged 2 commits into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 126 additions & 0 deletions data/malicioustlds.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
.link
.cam
.bar
.surf
.xyz
.click
.buzz
.gq
.ga
.rest
.ml
.cc
.cfd
.cyou
.accountant
.ar
.bg
.bid
.biz
.biz.ua
.br
.camera
.cf
.club
.co
.co.ua
.co.in
.co.mz
.co.nz
.com.au
.com.tw
.computer
.cricket
.date
.diet
.download
.email
.es
.faith
.gdn
.global
.guru
.help
.in
.info
.kz
.lol
.loan
.media
.men
.news
.ninja
.nyc
.party
.photography
.pt
.pw
.racing
.reise
.review
.rocks
.ru
.science
.site
.solutions
.space
.stream
.tech
.today
.top
.tr
.trade
.uno
.us
.vn
.webcam
.website
.win
.work
.africa
.autos
.best
.bet
.bio
.boats
.bond
.boston
.boutique
.center
.charity
.christmas
.coupons
.dance
.finance
.fishing
.giving
.hair
.haus
.homes
.icu
.kim
.lat
.llp
.loans
.love
.ltd
.mom
.motorcycles
.name
.okinawa
.promo
.rehab
.rugby
.run
.sale
.sew
.skin
.store
.sz
.tattoo
.tokyo
.voto
.wang
.wf
.yachts
.you
20 changes: 20 additions & 0 deletions data/yara/CAPE/Sliver.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
rule Sliver {
meta:
author = "ditekSHen"
description = "Detects Sliver implant cross-platform adversary emulation/red team"
cape_type = "Sliver Payload"
strings:
$x1 = "github.com/bishopfox/sliver/protobuf/sliverpbb." ascii
$s1 = ".commonpb.ResponseR" ascii
$s2 = ".PortfwdProtocol" ascii
$s3 = ".WGTCPForwarder" ascii
$s4 = ".WGSocksServerR" ascii
$s5 = ".PivotEntryR" ascii
$s6 = ".BackdoorReq" ascii
$s7 = ".ProcessDumpReq" ascii
$s8 = ".InvokeSpawnDllReq" ascii
$s9 = ".SpawnDll" ascii
$s10 = ".TCPPivotReq" ascii
condition:
(uint16(0) == 0x5a4d or uint16(0) == 0x457f or uint16(0) == 0xfacf) and (1 of ($x*) or 5 of ($s*))
}
20 changes: 0 additions & 20 deletions data/yara/binaries/indicator_tools.yar
Original file line number Diff line number Diff line change
Expand Up @@ -848,26 +848,6 @@ rule INDICATOR_TOOL_ChromeCookiesView {
uint16(0) == 0x5a4d and (5 of ($s*) or (($pdb) and 2 of ($s*)))
}

rule INDICATOR_TOOL_Sliver {
meta:
author = "ditekSHen"
description = "Detects Sliver implant cross-platform adversary emulation/red team"
strings:
$x1 = "github.com/bishopfox/sliver/protobuf/sliverpbb." ascii
$s1 = ".commonpb.ResponseR" ascii
$s2 = ".PortfwdProtocol" ascii
$s3 = ".WGTCPForwarder" ascii
$s4 = ".WGSocksServerR" ascii
$s5 = ".PivotEntryR" ascii
$s6 = ".BackdoorReq" ascii
$s7 = ".ProcessDumpReq" ascii
$s8 = ".InvokeSpawnDllReq" ascii
$s9 = ".SpawnDll" ascii
$s10 = ".TCPPivotReq" ascii
condition:
(uint16(0) == 0x5a4d or uint16(0) == 0x457f or uint16(0) == 0xfacf) and (1 of ($x*) or 5 of ($s*))
}

rule INDICATOR_TOOL_OwlProxy {
meta:
author = "ditekSHen"
Expand Down
91 changes: 62 additions & 29 deletions modules/signatures/all/pdf_annot_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,36 +16,69 @@
from lib.cuckoo.common.abstracts import Signature


class PDF_Annot_URLs(Signature):
name = "pdf_annot_urls"
description = "The PDF contains a Link Annotation to a compressed archive or executable file"
severity = 3
class PDF_Annot_URLs_Checker(Signature):
name = "pdf_annot_urls_checker"
description = "The PDF contains a Link Annotation"
severity = 2 # Default severity
categories = ["static"]
authors = ["Optiv"]
minimum = "1.3"
authors = ["Wassime BATTA"]
minimum = "0.5"

filter_analysistypes = set(["file"])
filter_analysistypes = set(["file","static"])

malicious_tlds_file = "/opt/CAPEv2/data/malicioustlds.txt"

def __init__(self, *args, **kwargs):
super(PDF_Annot_URLs_Checker, self).__init__(*args, **kwargs)
self.malicious_tlds = self.load_malicious_tlds()

def load_malicious_tlds(self):
malicious_tlds = set()
with open(self.malicious_tlds_file, "r") as f:
for line in f:
line = line.strip()
if line.startswith("."):
malicious_tlds.add(line)
return malicious_tlds

def run(self):
found_URLs = False
if "static" in self.results and "pdf" in self.results["static"]:
if "PDF" in self.results["target"]["file"].get("type", ""):
if "Annot_URLs" in self.results["static"]["pdf"]:
for entry in self.results["static"]["pdf"]["Annot_URLs"]:
entrylower = entry.lower()
if entrylower.endswith(
(".zip", ".exe", ".msi", ".bat", ".scr", ".rar", ".com")
) and not entrylower.startswith(
"mailto:"
): # skip mailto: as it can't add attachments
skip = False
# skip triggering on http:// and https:// links that don't have anything after the domain name
# so http://foo.com will be skipped, but http://foo.com/malware.com will not be
if entrylower.startswith("http://") and not entrylower.find("/", 8):
skip = True
elif entrylower.startswith("https://") and not entrylower.find("/", 9):
skip = True
if skip:
self.data.append({"url": entry})
found_URLs = True
return found_URLs
found_malicious_extension = False
found_malicious_domain = False
found_domain_only = False
suspect = False

if "PDF" in self.results["target"]["file"].get("type", ""):
if "Annot_URLs" in self.results["target"]["file"]["pdf"]:
for entry in self.results["target"]["file"]["pdf"]["Annot_URLs"]:
entry_lower = entry.lower()
self.data.append({"url": entry})
if entry_lower.endswith((".exe", ".php", ".bat", ".cmd", ".js", ".jse", ".vbs", ".vbe", ".ps1", ".psm1", ".sh")) \
and not entry_lower.startswith("mailto:"):
found_malicious_extension = True

if entry_lower.startswith("http://") or entry_lower.startswith("https://"):
domain_start = entry_lower.find("//") + 2
domain_end = entry_lower.find("/", domain_start)
if domain_end == -1:
domain = entry_lower[domain_start:]
else:
domain = entry_lower[domain_start:domain_end]

for malicious_tld in self.malicious_tlds:
if domain.endswith(malicious_tld):
found_malicious_domain = True
break
else:
# If no malicious TLDs detected, set found_domain_only to True
found_domain_only = True

if found_malicious_domain or found_malicious_extension:
self.severity = 6
self.description = "The PDF contains a Malicious Link Annotation"
suspect = True
elif found_domain_only:
self.severity = 2
self.description = "The PDF contains a Link Annotation"
suspect = True

return suspect
4 changes: 3 additions & 1 deletion modules/signatures/windows/credential_dumping.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,9 @@ def run(self):
if match:
self.data.append({"regkey": match})
ret = True

# Tweak
if "PDF" in self.results["target"]["file"].get("type", ""):
self.severity = 1
return ret


Expand Down
Loading