File size: 12,567 Bytes
9320ddd
 
 
 
 
 
b4dbfc9
9320ddd
 
 
 
 
 
8b2adc4
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b4dbfc9
 
9320ddd
b4dbfc9
 
 
 
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0e077b9
b4dbfc9
9320ddd
 
 
 
f809d9b
b4dbfc9
 
 
 
f809d9b
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b4dbfc9
9320ddd
 
 
 
8b2adc4
9320ddd
 
b4dbfc9
 
 
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0e077b9
9320ddd
 
 
8b2adc4
9320ddd
 
f809d9b
9320ddd
 
 
 
 
 
 
 
 
 
 
 
f809d9b
 
 
 
 
9320ddd
 
 
f809d9b
b4dbfc9
 
 
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0e077b9
 
 
b4dbfc9
9320ddd
 
 
8b2adc4
b4dbfc9
 
 
9320ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b4dbfc9
9320ddd
 
 
0e077b9
9320ddd
 
 
 
 
 
 
 
 
 
 
3e2bf63
 
9320ddd
 
 
3e2bf63
9320ddd
 
 
 
3e2bf63
 
9320ddd
 
 
3e2bf63
9320ddd
 
 
 
3e2bf63
 
9320ddd
 
 
3e2bf63
9320ddd
 
 
 
3e2bf63
 
9320ddd
 
0e077b9
9320ddd
 
 
3e2bf63
 
9320ddd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
import json
import os
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Any

import cachetools
import gradio as gr
import requests
import urllib3
from dns import message


_DNS_SERVER = "https://dns.google/dns-query"  # can use others
_DNS_RECORD_TYPES = [
    "A",
    "AAAA",
    "CNAME",
    "MX",
    "NS",
    "SOA",
    "TXT",
    "RP",
    "LOC",
    "CAA",
    "SPF",
    "SRV",
    "NSEC",
    "RRSIG",
]

_COMMON_SUBDOMAINS_TXT_PATH = Path("./subdomains/subdomains.txt")

_CACHE_MAX_SIZE = 4096
_CACHE_TTL_SECONDS = 3600


@cachetools.cached(
    cache=cachetools.TTLCache(maxsize=_CACHE_MAX_SIZE, ttl=_CACHE_TTL_SECONDS),
)
def get_geolocation(ip: str) -> dict[str, Any] | str:
    """Get location information from an ip address.

    Returns the following information on an ip address:
        1. IPv4
        2. city
        4. country_code
        5. country_name
        6. latitude
        7. longitude
        8. postal
        9. state

    Example:
    >>> from pprint import pprint
    >>> pprint(get_location("103.100.104.0"))
    ... {'IPv4': '103.100.104.0',
        'city': None,
        'country_code': 'NZ',
        'country_name': 'New Zealand',
        'latitude': -41,
        'longitude': 174,
        'postal': None,
        'state': None}

    Args:
        ip: ip address

    Returns:
        Location information on the ip address.
    """
    try:
        return requests.get(
            f"https://geolocation-db.com/json/{ip.strip()}",
            timeout=1,
        ).json()
    except Exception as e:  # noqa: BLE001
        return str(e)


def _request_dns_record(  # noqa: D417
    domain: str,
    record_type: str,
    timeout: float = 0.5,
) -> list[str]:
    """Utility to build dns resolve requests that do not use port 53.

    Args:
        domain: domain to investigate
        record_type: record type

    Returns:
        Information about the dns record type for the domain.
    """
    q = message.make_query(domain, record_type)
    response = requests.post(
        _DNS_SERVER,
        headers={
            "Content-Type": "application/dns-message",
            "Accept": "application/dns-message",
        },
        data=q.to_wire(),
        verify=True,
        timeout=timeout,
    )
    dns_message = message.from_wire(response.content)
    return [str(rdata) for rdata in dns_message.answer[0]] if dns_message.answer else []


# see: https://thepythoncode.com/article/dns-enumeration-with-python
# https://dnspython.readthedocs.io
@cachetools.cached(
    cache=cachetools.TTLCache(maxsize=_CACHE_MAX_SIZE, ttl=_CACHE_TTL_SECONDS),
)
def enumerate_dns(domain_name: str) -> dict[str, Any] | None:
    r"""Enumerates information about a specific domain's DNS configuration.

    Information collected about the domain name:
        1. A records: the IPv4 associated with the domain
        2. AAAA records: the IPv6 associated with the domain
        3. CAA records: used by owners to specify which Certificate Authorities
            are authorized to issue SSL/TLS certificates for their domains.
        4. CNAME records: alias of one name to another - the DNS lookup will
            continue by retrying the lookup with the new name.
        5. LOC records: geographic location associated with a domain name.
        6. MX records: associated email servers to the domain.
        7. NS records: DNS servers that are authoritative for a particular domain.
            These may be use to inquire information about the domain.
        8. SOA records: defines authoritative information about a DNS zone,
            including zone transfers and cache expiration.
        9. TXT records: used for domain verification and email security.
        10. RP records: the responsible person for a domain.
        11. SPF records: defines authorized email servers.
        12. SRV records: specifies location of specific services
            (port and host) for the domain.
        14. NSEC records: proves non-existence of DNS records
            and prevents zone enumeration.
        15. RRSIG records: contains cryptographic signatures for DNSSEC-signed
            records, providing authentication and integrity.

    Example:
    >>> from pprint import pprint
    >>> pprint(enumerate_dns("youtube.com"))
    ... {'A': 'youtube.com. 300 IN A 142.250.200.142',
        'AAAA': 'youtube.com. 286 IN AAAA 2a00:1450:4003:80f::200e',
        'CAA': 'youtube.com. 14352 IN CAA 0 issue "pki.goog"',
        'CNAME': None,
        'LOC': None,
        'MX': 'youtube.com. 300 IN MX 0 smtp.google.com.',
        'NS': 'youtube.com. 21600 IN NS ns4.google.com.\n'
            'youtube.com. 21600 IN NS ns1.google.com.\n'
            'youtube.com. 21600 IN NS ns2.google.com.\n'
            'youtube.com. 21600 IN NS ns3.google.com.',
        'NSEC': None,
        'RP': None,
        'RRSIG': None,
        'SOA': 'youtube.com. 60 IN SOA ns1.google.com. dns-admin.google.com. '
                '766113658 900 900 1800 60',
        'SPF': None,
        'SRV': None,
        'TXT': 'youtube.com. 3586 IN TXT "v=spf1 include:google.com mx -all"\n'
                'youtube.com. 3586 IN TXT '
                '"facebook-domain-verification=64jdes7le4h7e7lfpi22rijygx58j1"\n'
                'youtube.com. 3586 IN TXT '
                '"google-site-verification=QtQWEwHWM8tHiJ4s-jJWzEQrD_fF3luPnpzNDH-Nw-w"'}

    Args:
        domain_name: domain name for which to
            enumerate the DNS configuration.

    Returns:
        The domain's DNS configuration.
    """
    enumeration = {}
    for record_type in _DNS_RECORD_TYPES:
        try:
            record = _request_dns_record(domain_name.strip(), record_type, timeout=1)
            if record:
                enumeration[record_type] = record
        except Exception as e:  # noqa: BLE001, PERF203
            enumeration[record_type] = [str(e)]
    return enumeration if enumeration else None


def resolve_subdomain_ipv4(domain: str) -> str | None:
    """Resolve the IPv4 address of a domain.

    Args:
        domain: domain name

    Returns:
        The domain is returned provided
            it was resolved. Otherwise nothing
            is returned.
    """
    try:
        ipv4 = _request_dns_record(domain, "A", timeout=0.6)
        if ipv4:
            return domain
        msg = "Cannot resolve it: it is likely non-existing"
        raise Exception(msg)  # noqa: TRY002, TRY301
    except Exception:  # noqa: BLE001
        return None


@cachetools.cached(
    cache=cachetools.TTLCache(maxsize=_CACHE_MAX_SIZE, ttl=_CACHE_TTL_SECONDS),
)
def scrap_subdomains_for_domain(domain_name: str) -> list[str]:
    """Retrieves subdomains associated to a domain if any.

    The information retrieved from a domain is its subdomains
    provided they are the top 1000 subdomain prefixes as
    indicated by https://github.com/rbsec/dnscan/tree/master

    Importantly, it finds subdomains only if their prefixes
    are along the top 1000 most common. Hence, it may not
    yield all the subdomains associated to the domain.

    Example:
    >>> scrap_subdomains_for_domain("github.com")
    ... ['www.github.com', 'smtp.github.com', 'ns1.github.com',
        'ns2.github.com','autodiscover.github.com', 'test.github.com',
        'blog.github.com', 'admin.github.com', 'support.github.com',
        'docs.github.com', 'shop.github.com', 'wiki.github.com',
        'api.github.com', 'live.github.com', 'help.github.com',
        'jobs.github.com', 'services.github.com', 'de.github.com',
        'cs.github.com', 'fr.github.com', 'ssh.github.com',
        'partner.github.com', 'community.github.com',
        'mailer.github.com', 'training.github.com', ...]

    Args:
        domain_name: domain name for which to retrieve a
            list of subdomains

    Returns:
        List of subdomains if any.
    """
    try:
        with open(_COMMON_SUBDOMAINS_TXT_PATH) as file:  # noqa: PTH123
            subdomains = [line.strip() for line in file if line.strip()]
    except FileNotFoundError:
        return []

    potential_subdomains = [
        f"{subdomain}.{domain_name.strip()}" for subdomain in subdomains
    ]
    with ThreadPoolExecutor(max_workers=None) as executor:
        results = executor.map(resolve_subdomain_ipv4, potential_subdomains)
        return [domain for domain in results if domain]


@cachetools.cached(
    cache=cachetools.TTLCache(maxsize=_CACHE_MAX_SIZE, ttl=_CACHE_TTL_SECONDS),
)
def retrieve_ioc_from_threatfox(potentially_ioc: str) -> str:
    r"""Retrieves information about a potential IoC from ThreatFox.

    It may be used to retrieve information of indicators of compromise
    (IOCs) associated with malware, with the infosec community, AV
    vendors and cyber threat intelligence providers.

    Examples:
    >>> retrieve_ioc_from_threatfox("139.180.203.104")
    ... {
    "query_status": "ok",
    "data": [
        {
            "id": "12",
            "ioc": "139.180.203.104:443",
            "threat_type": "botnet_cc",
            "threat_type_desc": "Indicator that identifies a botnet command&control...",
            "ioc_type": "ip:port",
            "ioc_type_desc": "ip:port combination that is used for botnet Command&...,
            "malware": "win.cobalt_strike",
            "malware_printable": "Cobalt Strike",
            "malware_alias": "Agentemis,BEACON,CobaltStrike",
            "malware_malpedia": "https:\/\/malpedia.caad.fkie.fraunhofer.de\/...",
            "confidence_level": 75,
            "first_seen": "2020-12-06 09:10:23 UTC",
            "last_seen": null,
            "reference": null,
            "reporter": "abuse_ch",
            "tags": null,
            "malware_samples": [
                {
                    "time_stamp": "2021-03-23 08:18:06 UTC",
                    "md5_hash": "5b7e82e051ade4b14d163eea2a17bf8b",
                    "sha256_hash": "b325c92fa540edeb89b95dbfd4400c1cb33599c66859....",
                    "malware_bazaar": "https:\/\/bazaar.abuse.ch\/sample\/b325c...\/"
                },
            ]

        }
    ]
    }

    Args:
        potentially_ioc: this can be a url, a domain, a hash,
            or any other type of IoC.

    Returns:
        Information of the input as an IoC: threat type, malware type andsamples,
            confidence level, first/last seen dates, and more IoC information.
    """
    headers = {"Auth-Key": os.environ["THREATFOX_APIKEY"]}
    pool = urllib3.HTTPSConnectionPool(
        "threatfox-api.abuse.ch",
        port=443,
        maxsize=50,
        headers=headers,
        timeout=5,
    )
    data = {
        "query": "search_ioc",
        "search_term": potentially_ioc.strip(),
    }
    json_data = json.dumps(data)
    try:
        response = pool.request("POST", "/api/v1/", body=json_data)
        return response.data.decode("utf-8", "ignore")
    except Exception as e:  # noqa: BLE001
        return str(e)


geo_location_tool = gr.Interface(
    fn=get_geolocation,
    inputs=gr.Textbox(label="ip"),
    outputs=gr.JSON(label="Geolocation of IP"),
    title="Domain Associated Geolocation Finder",
    description="Retrieves the geolocation associated to an input ip address",
    theme="default",
    examples=["1.0.3.255", "59.34.7.3"],
)

dns_enumeration_tool = gr.Interface(
    fn=enumerate_dns,
    inputs=gr.Textbox(label="domain"),
    outputs=gr.JSON(label="DNS records"),
    title="DNS record enumerator of domains",
    description="Retrieves several dns record types for the input domain names",
    theme="default",
    examples=["owasp.org", "nist.gov"],
)

scrap_subdomains_tool = gr.Interface(
    fn=scrap_subdomains_for_domain,
    inputs=gr.Textbox(label="domain"),
    outputs=gr.JSON(label="Subdomains managed by domain"),
    title="Subdomains Extractor of domains",
    description="Retrieves the subdomains for the input domain if they are common",
    theme="default",
    examples=["github.com", "netacea.com"],
)

extractor_of_ioc_from_threatfox_tool = gr.Interface(
    fn=retrieve_ioc_from_threatfox,
    inputs=gr.Textbox(label="IoC - url, domains or hash"),
    outputs=gr.Text(label="Entity information as an IoC"),
    title="IoC information extractor associated to particular entities",
    description=(
        "If information as an Indicator of Compromise (IoC) exists "
        "for the input url, domain or hash, it retrieves it"
    ),
    theme="default",
    examples=["advertipros.com", "dev.couplesparks.com"],
    example_labels=["👾 IoC 1", "👾 IoC 2"],
)