httpz

- Hyper-fast HTTP Scraping Tool
git clone git://git.acid.vegas/httpz.git
Log | Files | Refs | Archive | README | LICENSE

commit d773bb6b0c65ce1909371c56e67c2a37ed005b04
parent 91d016299d36d384516cac7cfc232ffe6e19da3a
Author: acidvegas <acid.vegas@acid.vegas>
Date: Tue, 11 Feb 2025 02:46:01 -0500

fuck

Diffstat:
Dhttpz-scanner/__init__.py | 10----------
Dhttpz-scanner/__main__.py | 14--------------
Dhttpz-scanner/cli.py | 175-------------------------------------------------------------------------------
Dhttpz-scanner/colors.py | 21---------------------
Dhttpz-scanner/dns.py | 99-------------------------------------------------------------------------------
Dhttpz-scanner/formatters.py | 108-------------------------------------------------------------------------------
Dhttpz-scanner/parsers.py | 141-------------------------------------------------------------------------------
Dhttpz-scanner/scanner.py | 240-------------------------------------------------------------------------------
Dhttpz-scanner/utils.py | 116-------------------------------------------------------------------------------
Ahttpz_scanner/__init__.py | 10++++++++++
Ahttpz_scanner/__main__.py | 14++++++++++++++
Ahttpz_scanner/cli.py | 175+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Ahttpz_scanner/colors.py | 21+++++++++++++++++++++
Ahttpz_scanner/dns.py | 99+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Ahttpz_scanner/formatters.py | 108+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Ahttpz_scanner/parsers.py | 141+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Ahttpz_scanner/scanner.py | 240+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Ahttpz_scanner/utils.py | 116+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Msetup.py | 6+++---

19 files changed, 927 insertions(+), 927 deletions(-)

diff --git a/httpz-scanner/__init__.py b/httpz-scanner/__init__.py
@@ -1,9 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/__init__.py
-
-from .scanner import HTTPZScanner
-from .colors  import Colors
-
-
-__version__ = '1.0.6' 
-\ No newline at end of file
diff --git a/httpz-scanner/__main__.py b/httpz-scanner/__main__.py
@@ -1,13 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/__main__.py
-
-import asyncio
-import sys
-from .cli import main
-
-if __name__ == '__main__':
-    try:
-        asyncio.run(main())
-    except KeyboardInterrupt:
-        sys.exit(1) 
-\ No newline at end of file
diff --git a/httpz-scanner/cli.py b/httpz-scanner/cli.py
@@ -1,174 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/cli.py
-
-import argparse
-import asyncio
-import logging
-import os
-import sys
-
-from .colors  import Colors
-from .scanner import HTTPZScanner
-from .utils   import SILENT_MODE, info
-
-def setup_logging(level='INFO', log_to_disk=False):
-    '''
-    Setup logging configuration
-    
-    :param level: Logging level (INFO or DEBUG)
-    :param log_to_disk: Whether to also log to file
-    '''
-    class ColoredFormatter(logging.Formatter):
-        def formatTime(self, record, datefmt=None):
-            # Format: MM-DD HH:MM
-            from datetime import datetime
-            dt = datetime.fromtimestamp(record.created)
-            return f"{Colors.GRAY}{dt.strftime('%m-%d %H:%M')}{Colors.RESET}"
-        
-        def format(self, record):
-            return f'{self.formatTime(record)} {record.getMessage()}'
-    
-    handlers = []
-    
-    # Console handler
-    console = logging.StreamHandler()
-    console.setFormatter(ColoredFormatter())
-    handlers.append(console)
-    
-    # File handler
-    if log_to_disk:
-        os.makedirs('logs', exist_ok=True)
-        file_handler = logging.FileHandler(f'logs/httpz.log')
-        file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
-        handlers.append(file_handler)
-    
-    # Setup logger
-    logging.basicConfig(
-        level=getattr(logging, level.upper()),
-        handlers=handlers
-    )
-
-def parse_status_codes(codes_str: str) -> set:
-    '''
-    Parse comma-separated status codes and ranges into a set of integers
-    
-    :param codes_str: Comma-separated status codes (e.g., "200,301-399,404,500-503")
-    '''
-    
-    codes = set()
-    try:
-        for part in codes_str.split(','):
-            if '-' in part:
-                start, end = map(int, part.split('-'))
-                codes.update(range(start, end + 1))
-            else:
-                codes.add(int(part))
-        return codes
-    except ValueError:
-        raise argparse.ArgumentTypeError('Invalid status code format. Use comma-separated numbers or ranges (e.g., 200,301-399,404,500-503)')
-
-async def main():
-    parser = argparse.ArgumentParser(
-        description=f'{Colors.GREEN}Hyper-fast HTTP Scraping Tool{Colors.RESET}',
-        formatter_class=argparse.RawDescriptionHelpFormatter
-    )
-
-    # Add arguments
-    parser.add_argument('file', nargs='?', default='-', help='File containing domains to check (one per line), use - for stdin')
-    parser.add_argument('-all', '--all-flags', action='store_true', help='Enable all output flags')
-    parser.add_argument('-d', '--debug', action='store_true', help='Show error states and debug information')
-    parser.add_argument('-c', '--concurrent', type=int, default=100, help='Number of concurrent checks')
-    parser.add_argument('-j', '--jsonl', action='store_true', help='Output JSON Lines format to console')
-    parser.add_argument('-o', '--output', help='Output file path (JSONL format)')
-    
-    # Output field flags
-    parser.add_argument('-b', '--body', action='store_true', help='Show body preview')
-    parser.add_argument('-cn', '--cname', action='store_true', help='Show CNAME records')
-    parser.add_argument('-cl', '--content-length', action='store_true', help='Show content length')
-    parser.add_argument('-ct', '--content-type', action='store_true', help='Show content type')
-    parser.add_argument('-f', '--favicon', action='store_true', help='Show favicon hash')
-    parser.add_argument('-fr', '--follow-redirects', action='store_true', help='Follow redirects (max 10)')
-    parser.add_argument('-hr', '--headers', action='store_true', help='Show response headers')
-    parser.add_argument('-i', '--ip', action='store_true', help='Show IP addresses')
-    parser.add_argument('-sc', '--status-code', action='store_true', help='Show status code')
-    parser.add_argument('-ti', '--title', action='store_true', help='Show page title')
-    parser.add_argument('-tls', '--tls-info', action='store_true', help='Show TLS certificate information')
-    
-    # Other arguments
-    parser.add_argument('-ax', '--axfr', action='store_true', help='Try AXFR transfer against nameservers')
-    parser.add_argument('-ec', '--exclude-codes', type=parse_status_codes, help='Exclude these status codes (comma-separated, e.g., 404,500)')
-    parser.add_argument('-mc', '--match-codes', type=parse_status_codes, help='Only show these status codes (comma-separated, e.g., 200,301,404)')
-    parser.add_argument('-p', '--progress', action='store_true', help='Show progress counter')
-    parser.add_argument('-r', '--resolvers', help='File containing DNS resolvers (one per line)')
-    parser.add_argument('-to', '--timeout', type=int, default=5, help='Request timeout in seconds')
-    
-    args = parser.parse_args()
-
-    # Setup logging based on arguments
-    global SILENT_MODE
-    SILENT_MODE = args.jsonl
-
-    if not SILENT_MODE:
-        if args.debug:
-            setup_logging(level='DEBUG', log_to_disk=True)
-        else:
-            setup_logging(level='INFO')
-
-        if args.file == '-':
-            info('Reading domains from stdin')
-        else:
-            info(f'Processing file: {args.file}')
-
-    # Setup show_fields
-    show_fields = {
-        'status_code'      : args.all_flags or args.status_code,
-        'content_type'     : args.all_flags or args.content_type,
-        'content_length'   : args.all_flags or args.content_length,
-        'title'            : args.all_flags or args.title,
-        'body'             : args.all_flags or args.body,
-        'ip'               : args.all_flags or args.ip,
-        'favicon'          : args.all_flags or args.favicon,
-        'headers'          : args.all_flags or args.headers,
-        'follow_redirects' : args.all_flags or args.follow_redirects,
-        'cname'            : args.all_flags or args.cname,
-        'tls'              : args.all_flags or args.tls_info
-    }
-
-    # If no fields specified show all
-    if not any(show_fields.values()):
-        show_fields = {k: True for k in show_fields}
-
-    try:
-        # Create scanner instance
-        scanner = HTTPZScanner(
-            concurrent_limit=args.concurrent,
-            timeout=args.timeout,
-            follow_redirects=args.all_flags or args.follow_redirects,
-            check_axfr=args.axfr,
-            resolver_file=args.resolvers,
-            output_file=args.output,
-            show_progress=args.progress,
-            debug_mode=args.debug,
-            jsonl_output=args.jsonl,
-            show_fields=show_fields,
-            match_codes=args.match_codes,
-            exclude_codes=args.exclude_codes
-        )
-
-        # Run the scanner with file/stdin input
-        await scanner.scan(args.file)
-
-    except KeyboardInterrupt:
-        logging.warning('Process interrupted by user')
-        sys.exit(1)
-    except Exception as e:
-        logging.error(f'Unexpected error: {str(e)}')
-        sys.exit(1)
-
-def run():
-    '''Entry point for the CLI'''
-    asyncio.run(main())
-
-if __name__ == '__main__':
-    run() 
-\ No newline at end of file
diff --git a/httpz-scanner/colors.py b/httpz-scanner/colors.py
@@ -1,20 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/colors.py
-
-class Colors:
-    '''ANSI color codes for terminal output'''
-    HEADER     = '\033[95m' # Light purple
-    BLUE       = '\033[94m'
-    GREEN      = '\033[92m'
-    YELLOW     = '\033[93m'
-    RED        = '\033[91m'
-    BOLD       = '\033[1m'
-    UNDERLINE  = '\033[4m'
-    RESET      = '\033[0m'
-    PURPLE     = '\033[35m'       # Dark purple
-    LIGHT_RED  = '\033[38;5;203m' # Light red
-    DARK_GREEN = '\033[38;5;22m'  # Dark green
-    PINK       = '\033[38;5;198m' # Bright pink
-    GRAY       = '\033[90m'       # Gray color
-    CYAN       = '\033[96m'       # Cyan color 
-\ No newline at end of file
diff --git a/httpz-scanner/dns.py b/httpz-scanner/dns.py
@@ -1,98 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/dns.py
-
-import asyncio
-import os
-import aiohttp
-import dns.asyncresolver
-import dns.query
-import dns.resolver
-import dns.zone
-
-from .utils import debug, info, SILENT_MODE
-
-async def resolve_all_dns(domain: str, timeout: int = 5, nameserver: str = None, check_axfr: bool = False) -> tuple:
-    '''
-    Resolve all DNS records for a domain
-    
-    :param domain: Domain to resolve
-    :param timeout: Timeout in seconds
-    :param nameserver: Specific nameserver to use
-    :param check_axfr: Whether to attempt zone transfer
-    '''
-    resolver = dns.asyncresolver.Resolver()
-    resolver.lifetime = timeout
-    if nameserver:
-        resolver.nameservers = [nameserver]
-    
-    results = await asyncio.gather(*[resolver.resolve(domain, rtype) 
-                                   for rtype in ('NS', 'A', 'AAAA', 'CNAME')], 
-                                 return_exceptions=True)
-    
-    nameservers = [str(ns).rstrip('.') for ns in results[0]] if isinstance(results[0], dns.resolver.Answer) else []
-    ips = ([str(ip) for ip in results[1]] if isinstance(results[1], dns.resolver.Answer) else []) + \
-          ([str(ip) for ip in results[2]] if isinstance(results[2], dns.resolver.Answer) else [])
-    cname = str(results[3][0].target).rstrip('.') if isinstance(results[3], dns.resolver.Answer) else None
-    
-    ns_ips = {}
-    if nameservers:
-        ns_results = await asyncio.gather(*[resolver.resolve(ns, rtype) 
-                                          for ns in nameservers 
-                                          for rtype in ('A', 'AAAA')], 
-                                        return_exceptions=True)
-        for i, ns in enumerate(nameservers):
-            ns_ips[ns] = [str(ip) for records in ns_results[i*2:i*2+2] 
-                         if isinstance(records, dns.resolver.Answer) 
-                         for ip in records]
-
-    if check_axfr:
-        await attempt_axfr(domain, ns_ips, timeout)
-
-    return sorted(set(ips)), cname, nameservers, ns_ips
-
-async def attempt_axfr(domain: str, ns_ips: dict, timeout: int = 5) -> None:
-    '''
-    Attempt zone transfer for a domain
-    
-    :param domain: Domain to attempt AXFR transfer
-    :param ns_ips: Dictionary of nameserver hostnames to their IPs
-    :param timeout: Timeout in seconds
-    '''
-    try:
-        os.makedirs('axfrout', exist_ok=True)
-        
-        for ns_host, ips in ns_ips.items():
-            for ns_ip in ips:
-                try:
-                    zone = dns.zone.from_xfr(dns.query.xfr(ns_ip, domain, lifetime=timeout))
-                    with open(f'axfrout/{domain}_{ns_ip}.zone', 'w') as f:
-                        zone.to_text(f)
-                    info(f'[AXFR SUCCESS] {domain} from {ns_host} ({ns_ip})')
-                except Exception as e:
-                    debug(f'AXFR failed for {domain} from {ns_ip}: {str(e)}')
-    except Exception as e:
-        debug(f'Failed AXFR for {domain}: {str(e)}')
-
-async def load_resolvers(resolver_file: str = None) -> list:
-    '''
-    Load DNS resolvers from file or default source
-    
-    :param resolver_file: Path to file containing resolver IPs
-    :return: List of resolver IPs
-    '''
-    if resolver_file:
-        try:
-            with open(resolver_file) as f:
-                resolvers = [line.strip() for line in f if line.strip()]
-            if resolvers:
-                return resolvers
-        except Exception as e:
-            debug(f'Error loading resolvers from {resolver_file}: {str(e)}')
-
-    async with aiohttp.ClientSession() as session:
-        async with session.get('https://raw.githubusercontent.com/trickest/resolvers/refs/heads/main/resolvers.txt') as response:
-            resolvers = await response.text()
-            if not SILENT_MODE:
-                info(f'Loaded {len(resolvers.splitlines()):,} resolvers.')
-            return [resolver.strip() for resolver in resolvers.splitlines()] 
-\ No newline at end of file
diff --git a/httpz-scanner/formatters.py b/httpz-scanner/formatters.py
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/formatters.py
-
-from .colors import Colors
-from .utils  import human_size
-
-def format_console_output(result: dict, debug: bool = False, show_fields: dict = None, match_codes: set = None, exclude_codes: set = None) -> str:
-    '''
-    Format the output with colored sections
-    
-    :param result: Dictionary containing domain check results
-    :param debug: Whether to show error states
-    :param show_fields: Dictionary of fields to show
-    :param match_codes: Set of status codes to match
-    :param exclude_codes: Set of status codes to exclude
-    '''
-    if result['status'] < 0 and not debug:
-        return ''
-        
-    if match_codes and result['status'] not in match_codes:
-        return ''
-    if exclude_codes and result['status'] in exclude_codes:
-        return ''
-
-    parts = []
-    
-    # Status code
-    if show_fields.get('status_code'):
-        if result['status'] < 0:
-            status = f"{Colors.RED}[{result['status']}]{Colors.RESET}"
-        elif 200 <= result['status'] < 300:
-            status = f"{Colors.GREEN}[{result['status']}]{Colors.RESET}"
-        elif 300 <= result['status'] < 400:
-            status = f"{Colors.YELLOW}[{result['status']}]{Colors.RESET}"
-        else:
-            status = f"{Colors.RED}[{result['status']}]{Colors.RESET}"
-        parts.append(status)
-    
-    # Domain (always shown)
-    parts.append(f"[{result['url']}]")
-    
-    # Title
-    if show_fields.get('title') and result.get('title'):
-        parts.append(f"{Colors.DARK_GREEN}[{result['title']}]{Colors.RESET}")
-    
-    # Body preview
-    if show_fields.get('body') and result.get('body'):
-        body = result['body'][:100] + ('...' if len(result['body']) > 100 else '')
-        parts.append(f"{Colors.BLUE}[{body}]{Colors.RESET}")
-    
-    # IPs
-    if show_fields.get('ip') and result.get('ips'):
-        ips_text = ', '.join(result['ips'])
-        parts.append(f"{Colors.YELLOW}[{ips_text}]{Colors.RESET}")
-
-    # Favicon hash
-    if show_fields.get('favicon') and result.get('favicon_hash'):
-        parts.append(f"{Colors.PURPLE}[{result['favicon_hash']}]{Colors.RESET}")
-
-    # Headers
-    if show_fields.get('headers') and result.get('headers'):
-        headers_text = [f"{k}: {v}" for k, v in result['headers'].items()]
-        parts.append(f"{Colors.CYAN}[{', '.join(headers_text)}]{Colors.RESET}")
-    else:
-        if show_fields.get('content_type') and result.get('content_type'):
-            parts.append(f"{Colors.HEADER}[{result['content_type']}]{Colors.RESET}")
-        
-        if show_fields.get('content_length') and result.get('content_length'):
-            try:
-                size = human_size(int(result['content_length']))
-                parts.append(f"{Colors.PINK}[{size}]{Colors.RESET}")
-            except (ValueError, TypeError):
-                parts.append(f"{Colors.PINK}[{result['content_length']}]{Colors.RESET}")
-    
-    # CNAME
-    if show_fields.get('cname') and result.get('cname'):
-        parts.append(f"{Colors.PURPLE}[CNAME: {result['cname']}]{Colors.RESET}")
-    
-    # Redirect Chain
-    if show_fields.get('follow_redirects') and result.get('redirect_chain'):
-        chain = ' -> '.join(result['redirect_chain'])
-        parts.append(f"{Colors.YELLOW}[Redirects: {chain}]{Colors.RESET}")
-
-    # TLS Certificate Info
-    if result.get('tls'):
-        cert = result['tls']
-        tls_parts = []
-        if cert.get('common_name'):
-            tls_parts.append(f"Subject: {cert['common_name']}")
-        if cert.get('issuer'):
-            tls_parts.append(f"Issuer: {cert['issuer']}")
-        if cert.get('fingerprint'):
-            tls_parts.append(f"Fingerprint: {cert['fingerprint'][:16]}...")
-        if cert.get('alt_names'):
-            tls_parts.append(f"SANs: {', '.join(cert['alt_names'][:3])}")
-        if cert.get('not_before') and cert.get('not_after'):
-            tls_parts.append(f"Valid: {cert['not_before'].split('T')[0]} to {cert['not_after'].split('T')[0]}")
-        if cert.get('version'):
-            tls_parts.append(f"Version: {cert['version']}")
-        if cert.get('serial_number'):
-            tls_parts.append(f"Serial: {cert['serial_number'][:16]}...")
-        
-        if tls_parts:  # Only add TLS info if we have any parts
-            parts.append(f"{Colors.GREEN}[{' | '.join(tls_parts)}]{Colors.RESET}")
-
-    return ' '.join(parts) 
-\ No newline at end of file
diff --git a/httpz-scanner/parsers.py b/httpz-scanner/parsers.py
@@ -1,140 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/parsers.py
-
-try:
-    import bs4
-except ImportError:
-    raise ImportError('missing bs4 module (pip install beautifulsoup4)')
-
-try:
-    from cryptography import x509
-    from cryptography.hazmat.primitives import hashes
-    from cryptography.x509.oid import NameOID
-except ImportError:
-    raise ImportError('missing cryptography module (pip install cryptography)')
-
-try:
-    import mmh3
-except ImportError:
-    raise ImportError('missing mmh3 module (pip install mmh3)')
-
-from .utils import debug, error
-
-
-def parse_domain_url(domain: str) -> tuple:
-    '''
-    Parse domain string into base domain, port, and protocol list
-    
-    :param domain: Raw domain string to parse
-    :return: Tuple of (base_domain, port, protocols)
-    '''
-    port = None
-    base_domain = domain.rstrip('/')
-    
-    if base_domain.startswith(('http://', 'https://')):
-        protocol = 'https://' if base_domain.startswith('https://') else 'http://'
-        base_domain = base_domain.split('://', 1)[1]
-        if ':' in base_domain.split('/')[0]:
-            base_domain, port_str = base_domain.split(':', 1)
-            try:
-                port = int(port_str.split('/')[0])
-            except ValueError:
-                port = 443 if protocol == 'https://' else 80
-        else:
-            port = 443 if protocol == 'https://' else 80
-        protocols = [f'{protocol}{base_domain}{":" + str(port) if port else ""}']
-    else:
-        if ':' in base_domain.split('/')[0]:
-            base_domain, port_str = base_domain.split(':', 1)
-            port = int(port_str.split('/')[0]) if port_str.split('/')[0].isdigit() else 443
-        else:
-            port = 443
-        protocols = [
-            f'https://{base_domain}{":" + str(port) if port else ""}',
-            f'http://{base_domain}{":"  + str(port) if port else ""}'
-        ]
-    
-    return base_domain, port, protocols
-
-async def get_cert_info(ssl_object, url: str) -> dict:
-    '''
-    Get SSL certificate information for a domain
-    
-    :param ssl_object: SSL object to get certificate info from
-    :param url: URL to get certificate info from
-    '''
-    try:            
-        if not ssl_object or not (cert_der := ssl_object.getpeercert(binary_form=True)):
-            return None
-
-        cert = x509.load_der_x509_certificate(cert_der)
-
-        try:
-            san_extension = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
-            alt_names     = [name.value for name in san_extension.value] if san_extension else []
-        except x509.extensions.ExtensionNotFound:
-            alt_names = []
-
-        try:
-            common_name = cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value
-        except IndexError:
-            common_name = None
-
-        try:
-            issuer = cert.issuer.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value
-        except IndexError:
-            issuer = None
-
-        return {
-            'fingerprint'   : cert.fingerprint(hashes.SHA256()).hex(),
-            'common_name'   : common_name,
-            'issuer'        : issuer,
-            'alt_names'     : alt_names,
-            'not_before'    : cert.not_valid_before_utc.isoformat(),
-            'not_after'     : cert.not_valid_after_utc.isoformat(),
-            'version'       : cert.version.value,
-            'serial_number' : format(cert.serial_number, 'x'),
-        }
-    except Exception as e:
-        error(f'Error getting cert info for {url}: {str(e)}')
-        return None
-
-async def get_favicon_hash(session, base_url: str, html: str) -> str:
-    '''
-    Get favicon hash from a webpage
-    
-    :param session: aiohttp client session
-    :param base_url: base URL of the website
-    :param html: HTML content of the page
-    '''
-    try:
-        soup = bs4.BeautifulSoup(html, 'html.parser')
-        
-        favicon_url = None
-        for link in soup.find_all('link'):
-            if link.get('rel') and any(x.lower() == 'icon' for x in link.get('rel')):
-                favicon_url = link.get('href')
-                break
-        
-        if not favicon_url:
-            favicon_url = '/favicon.ico'
-        
-        if favicon_url.startswith('//'):
-            favicon_url = 'https:' + favicon_url
-        elif favicon_url.startswith('/'):
-            favicon_url = base_url + favicon_url
-        elif not favicon_url.startswith(('http://', 'https://')):
-            favicon_url = base_url + '/' + favicon_url
-
-        async with session.get(favicon_url, timeout=10) as response:
-            if response.status == 200:
-                content    = (await response.read())[:1024*1024]
-                hash_value = mmh3.hash64(content)[0]
-                if hash_value != 0:
-                    return str(hash_value)
-
-    except Exception as e:
-        debug(f'Error getting favicon for {base_url}: {str(e)}')
-    
-    return None 
-\ No newline at end of file
diff --git a/httpz-scanner/scanner.py b/httpz-scanner/scanner.py
@@ -1,239 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/scanner.py
-
-import asyncio
-import json
-import random
-import sys
-
-try:
-    import aiohttp
-except ImportError:
-    raise ImportError('missing aiohttp module (pip install aiohttp)')
-
-try:
-    import bs4
-except ImportError:
-    raise ImportError('missing bs4 module (pip install beautifulsoup4)')
-
-from .dns        import resolve_all_dns, load_resolvers
-from .formatters import format_console_output
-from .colors     import Colors
-from .parsers    import parse_domain_url, get_cert_info, get_favicon_hash
-from .utils      import debug, info, USER_AGENTS, input_generator
-
-
-class HTTPZScanner:
-    '''Core scanner class for HTTP domain checking'''
-    
-    def __init__(self, concurrent_limit = 100, timeout = 5, follow_redirects = False, check_axfr = False, resolver_file = None, output_file = None, show_progress = False, debug_mode = False, jsonl_output = False, show_fields = None, match_codes = None, exclude_codes = None):
-        '''
-        Initialize the HTTPZScanner class
-        
-        :param concurrent_limit: Maximum number of concurrent requests
-        :param timeout: Request timeout in seconds
-        :param follow_redirects: Follow redirects
-        :param check_axfr: Check for AXFR
-        :param resolver_file: Path to resolver file
-        :param output_file: Path to output file
-        :param show_progress: Show progress bar
-        :param debug_mode: Enable debug mode
-        :param jsonl_output: Output in JSONL format
-        :param show_fields: Fields to show
-        :param match_codes: Status codes to match
-        :param exclude_codes: Status codes to exclude
-        '''
-
-        self.concurrent_limit = concurrent_limit
-        self.timeout          = timeout
-        self.follow_redirects = follow_redirects
-        self.check_axfr       = check_axfr
-        self.resolver_file    = resolver_file
-        self.output_file      = output_file
-        self.show_progress    = show_progress
-        self.debug_mode       = debug_mode
-        self.jsonl_output     = jsonl_output
-
-        self.show_fields = show_fields or {
-            'status_code'      : True,
-            'content_type'     : True,
-            'content_length'   : True,
-            'title'            : True,
-            'body'             : True,
-            'ip'               : True,
-            'favicon'          : True,
-            'headers'          : True,
-            'follow_redirects' : True,
-            'cname'            : True,
-            'tls'              : True
-        }
-
-        self.match_codes       = match_codes
-        self.exclude_codes     = exclude_codes
-        self.resolvers         = None
-        self.processed_domains = 0
-
-
-    async def init(self):
-        '''Initialize resolvers - must be called before scanning'''
-        self.resolvers = await load_resolvers(self.resolver_file)
-
-
-    async def check_domain(self, session: aiohttp.ClientSession, domain: str):
-        '''Check a single domain and return results'''
-        nameserver = random.choice(self.resolvers) if self.resolvers else None
-        base_domain, port, protocols = parse_domain_url(domain)
-        
-        result = {
-            'domain'  : base_domain,
-            'status'  : 0,
-            'url'     : protocols[0],
-            'port'    : port,
-        }
-
-        # Try each protocol
-        for url in protocols:
-            try:
-                # Set random user agent for each request
-                headers = {'User-Agent': random.choice(USER_AGENTS)}
-                
-                async with session.get(url, timeout=self.timeout, 
-                                     allow_redirects=self.follow_redirects,
-                                     max_redirects=10 if self.follow_redirects else 0,
-                                     headers=headers) as response:
-                    
-                    result['status'] = response.status
-                    
-                    # Early exit if status code doesn't match criteria
-                    if self.match_codes and result['status'] not in self.match_codes:
-                        return result
-                    if self.exclude_codes and result['status'] in self.exclude_codes:
-                        return result
-
-                    # Continue with full processing only if status code matches criteria
-                    result['url'] = str(response.url)
-                    
-                    # Add headers if requested
-                    headers = dict(response.headers)
-                    if headers and (self.show_fields.get('headers') or self.show_fields.get('all_flags')):
-                        result['headers'] = headers
-                    else:
-                        # Only add content type/length if headers aren't included
-                        if content_type := response.headers.get('content-type', '').split(';')[0]:
-                            result['content_type'] = content_type
-                        if content_length := response.headers.get('content-length'):
-                            result['content_length'] = content_length
-                    
-                    # Only add redirect chain if it exists
-                    if self.follow_redirects and response.history:
-                        result['redirect_chain'] = [str(h.url) for h in response.history] + [str(response.url)]
-
-                    # Do DNS lookups only if we're going to use the result
-                    ips, cname, nameservers, _ = await resolve_all_dns(
-                        base_domain, self.timeout, nameserver, self.check_axfr
-                    )
-                    
-                    # Only add DNS fields if they have values
-                    if ips:
-                        result['ips'] = ips
-                    if cname:
-                        result['cname'] = cname
-                    if nameservers:
-                        result['nameservers'] = nameservers
-
-                    # Only add TLS info if available
-                    if response.url.scheme == 'https':
-                        try:
-                            if ssl_object := response._protocol.transport.get_extra_info('ssl_object'):
-                                if tls_info := await get_cert_info(ssl_object, str(response.url)):
-                                    # Only add TLS fields that have values
-                                    result['tls'] = {k: v for k, v in tls_info.items() if v}
-                        except AttributeError:
-                            debug(f'Failed to get SSL info for {url}')
-
-                    html = (await response.text())[:1024*1024]
-                    soup = bs4.BeautifulSoup(html, 'html.parser')
-                    
-                    # Only add title if it exists
-                    if soup.title and soup.title.string:
-                        result['title'] = ' '.join(soup.title.string.strip().split()).rstrip('.')[:300]
-                    
-                    # Only add body if it exists
-                    if body_text := soup.get_text():
-                        result['body'] = ' '.join(body_text.split()).rstrip('.')[:500]
-                    
-                    # Only add favicon hash if it exists
-                    if favicon_hash := await get_favicon_hash(session, url, html):
-                        result['favicon_hash'] = favicon_hash
-                    
-                    break
-            except Exception as e:
-                debug(f'Error checking {url}: {str(e)}')
-                result['status'] = -1
-                continue
-
-        return result
-
-
-    async def process_result(self, result):
-        '''
-        Process and output a single result
-        
-        :param result: result to process
-        '''
-
-        formatted = format_console_output(result, self.debug_mode, self.show_fields, self.match_codes, self.exclude_codes)
-        
-        if formatted:
-            # Write to file if specified
-            if self.output_file:
-                if (not self.match_codes or result['status'] in self.match_codes) and \
-                   (not self.exclude_codes or result['status'] not in self.exclude_codes):
-                    async with aiohttp.ClientSession() as session:
-                        with open(self.output_file, 'a') as f:
-                            json.dump(result, f, ensure_ascii=False)
-                            f.write('\n')
-            
-            # Console output
-            if self.jsonl_output:
-                print(json.dumps(result))
-            else:
-                self.processed_domains += 1
-                if self.show_progress:
-                    info(f"{Colors.GRAY}[{self.processed_domains:,}]{Colors.RESET} {formatted}")
-                else:
-                    info(formatted)
-
-
-    async def scan(self, input_source):
-        '''
-        Scan domains from a file or stdin
-        
-        :param input_source: Path to file or '-' for stdin
-        '''
-        if not self.resolvers:
-            await self.init()
-
-        async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
-            tasks = set()
-            
-            # Process domains with concurrent limit
-            for domain in input_generator(input_source):
-                if len(tasks) >= self.concurrent_limit:
-                    done, tasks = await asyncio.wait(
-                        tasks, return_when=asyncio.FIRST_COMPLETED
-                    )
-                    for task in done:
-                        result = await task
-                        await self.process_result(result)
-
-                task = asyncio.create_task(self.check_domain(session, domain))
-                tasks.add(task)
-
-            # Process remaining tasks
-            if tasks:
-                done, _ = await asyncio.wait(tasks)
-                for task in done:
-                    result = await task
-                    await self.process_result(result) 
-\ No newline at end of file
diff --git a/httpz-scanner/utils.py b/httpz-scanner/utils.py
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
-# httpz/utils.py
-
-import logging
-import sys
-
-
-# Global for silent mode
-SILENT_MODE = False
-
-# List of user agents to randomize requests
-USER_AGENTS = [
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 Edg/132.0.0.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 Edg/132.0.0.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
-    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.6.5 Chrome/124.0.6367.243 Electron/30.1.2 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 OPR/116.0.0.0",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:134.0) Gecko/20100101 Firefox/134.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.8.3 Chrome/130.0.6723.191 Electron/33.3.2 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.2 Safari/605.1.15",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.3 Safari/605.1.15",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.2 Safari/605.1.15",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.6613.137 Safari/537.36",
-    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.6 Safari/605.1.15",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.15",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.5.12 Chrome/120.0.6099.283 Electron/28.2.3 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0",
-    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0",
-    "Mozilla/5.0 (X11; CrOS x86_64 14541.0.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.0",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1 Safari/605.1.15",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.5 Safari/605.1.15",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
-    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
-    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.7.7 Chrome/128.0.6613.186 Electron/32.2.5 Safari/537.36"
-]
-
-
-def debug(msg: str): 
-    if not SILENT_MODE: logging.debug(msg)
-def error(msg: str):
-    if not SILENT_MODE: logging.error(msg)
-def info(msg: str):
-    if not SILENT_MODE: logging.info(msg)
-def warning(msg: str):
-    if not SILENT_MODE: logging.warning(msg)
-
-
-def human_size(size_bytes: int) -> str:
-    '''
-    Convert bytes to human readable string
-    
-    :param size_bytes: size in bytes
-    '''
-
-    if not size_bytes:
-        return '0B'
-    
-    units      = ('B', 'KB', 'MB', 'GB')
-    size       = float(size_bytes)
-    unit_index = 0
-    
-    while size >= 1024 and unit_index < len(units) - 1:
-        size /= 1024
-        unit_index += 1
-    
-    return f'{size:.1f}{units[unit_index]}'
-
-
-def input_generator(input_source: str):
-    '''
-    Generator function to yield domains from file or stdin
-    
-    :param input_source: file or stdin
-    '''
-    
-    if input_source == '-' or input_source is None:
-        for line in sys.stdin:
-            if line.strip():
-                yield line.strip()
-    else:
-        with open(input_source, 'r') as f:
-            for line in f:
-                if line.strip():
-                    yield line.strip() 
-\ No newline at end of file
diff --git a/httpz_scanner/__init__.py b/httpz_scanner/__init__.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/__init__.py
+
+from .scanner import HTTPZScanner
+from .colors  import Colors
+
+
+__version__ = '1.0.7' 
+\ No newline at end of file
diff --git a/httpz_scanner/__main__.py b/httpz_scanner/__main__.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/__main__.py
+
+import asyncio
+import sys
+from .cli import main
+
+if __name__ == '__main__':
+    try:
+        asyncio.run(main())
+    except KeyboardInterrupt:
+        sys.exit(1) 
+\ No newline at end of file
diff --git a/httpz_scanner/cli.py b/httpz_scanner/cli.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/cli.py
+
+import argparse
+import asyncio
+import logging
+import os
+import sys
+
+from .colors  import Colors
+from .scanner import HTTPZScanner
+from .utils   import SILENT_MODE, info
+
+def setup_logging(level='INFO', log_to_disk=False):
+    '''
+    Setup logging configuration
+    
+    :param level: Logging level (INFO or DEBUG)
+    :param log_to_disk: Whether to also log to file
+    '''
+    class ColoredFormatter(logging.Formatter):
+        def formatTime(self, record, datefmt=None):
+            # Format: MM-DD HH:MM
+            from datetime import datetime
+            dt = datetime.fromtimestamp(record.created)
+            return f"{Colors.GRAY}{dt.strftime('%m-%d %H:%M')}{Colors.RESET}"
+        
+        def format(self, record):
+            return f'{self.formatTime(record)} {record.getMessage()}'
+    
+    handlers = []
+    
+    # Console handler
+    console = logging.StreamHandler()
+    console.setFormatter(ColoredFormatter())
+    handlers.append(console)
+    
+    # File handler
+    if log_to_disk:
+        os.makedirs('logs', exist_ok=True)
+        file_handler = logging.FileHandler(f'logs/httpz.log')
+        file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
+        handlers.append(file_handler)
+    
+    # Setup logger
+    logging.basicConfig(
+        level=getattr(logging, level.upper()),
+        handlers=handlers
+    )
+
+def parse_status_codes(codes_str: str) -> set:
+    '''
+    Parse comma-separated status codes and ranges into a set of integers
+    
+    :param codes_str: Comma-separated status codes (e.g., "200,301-399,404,500-503")
+    '''
+    
+    codes = set()
+    try:
+        for part in codes_str.split(','):
+            if '-' in part:
+                start, end = map(int, part.split('-'))
+                codes.update(range(start, end + 1))
+            else:
+                codes.add(int(part))
+        return codes
+    except ValueError:
+        raise argparse.ArgumentTypeError('Invalid status code format. Use comma-separated numbers or ranges (e.g., 200,301-399,404,500-503)')
+
+async def main():
+    parser = argparse.ArgumentParser(
+        description=f'{Colors.GREEN}Hyper-fast HTTP Scraping Tool{Colors.RESET}',
+        formatter_class=argparse.RawDescriptionHelpFormatter
+    )
+
+    # Add arguments
+    parser.add_argument('file', nargs='?', default='-', help='File containing domains to check (one per line), use - for stdin')
+    parser.add_argument('-all', '--all-flags', action='store_true', help='Enable all output flags')
+    parser.add_argument('-d', '--debug', action='store_true', help='Show error states and debug information')
+    parser.add_argument('-c', '--concurrent', type=int, default=100, help='Number of concurrent checks')
+    parser.add_argument('-j', '--jsonl', action='store_true', help='Output JSON Lines format to console')
+    parser.add_argument('-o', '--output', help='Output file path (JSONL format)')
+    
+    # Output field flags
+    parser.add_argument('-b', '--body', action='store_true', help='Show body preview')
+    parser.add_argument('-cn', '--cname', action='store_true', help='Show CNAME records')
+    parser.add_argument('-cl', '--content-length', action='store_true', help='Show content length')
+    parser.add_argument('-ct', '--content-type', action='store_true', help='Show content type')
+    parser.add_argument('-f', '--favicon', action='store_true', help='Show favicon hash')
+    parser.add_argument('-fr', '--follow-redirects', action='store_true', help='Follow redirects (max 10)')
+    parser.add_argument('-hr', '--headers', action='store_true', help='Show response headers')
+    parser.add_argument('-i', '--ip', action='store_true', help='Show IP addresses')
+    parser.add_argument('-sc', '--status-code', action='store_true', help='Show status code')
+    parser.add_argument('-ti', '--title', action='store_true', help='Show page title')
+    parser.add_argument('-tls', '--tls-info', action='store_true', help='Show TLS certificate information')
+    
+    # Other arguments
+    parser.add_argument('-ax', '--axfr', action='store_true', help='Try AXFR transfer against nameservers')
+    parser.add_argument('-ec', '--exclude-codes', type=parse_status_codes, help='Exclude these status codes (comma-separated, e.g., 404,500)')
+    parser.add_argument('-mc', '--match-codes', type=parse_status_codes, help='Only show these status codes (comma-separated, e.g., 200,301,404)')
+    parser.add_argument('-p', '--progress', action='store_true', help='Show progress counter')
+    parser.add_argument('-r', '--resolvers', help='File containing DNS resolvers (one per line)')
+    parser.add_argument('-to', '--timeout', type=int, default=5, help='Request timeout in seconds')
+    
+    args = parser.parse_args()
+
+    # Setup logging based on arguments
+    global SILENT_MODE
+    SILENT_MODE = args.jsonl
+
+    if not SILENT_MODE:
+        if args.debug:
+            setup_logging(level='DEBUG', log_to_disk=True)
+        else:
+            setup_logging(level='INFO')
+
+        if args.file == '-':
+            info('Reading domains from stdin')
+        else:
+            info(f'Processing file: {args.file}')
+
+    # Setup show_fields
+    show_fields = {
+        'status_code'      : args.all_flags or args.status_code,
+        'content_type'     : args.all_flags or args.content_type,
+        'content_length'   : args.all_flags or args.content_length,
+        'title'            : args.all_flags or args.title,
+        'body'             : args.all_flags or args.body,
+        'ip'               : args.all_flags or args.ip,
+        'favicon'          : args.all_flags or args.favicon,
+        'headers'          : args.all_flags or args.headers,
+        'follow_redirects' : args.all_flags or args.follow_redirects,
+        'cname'            : args.all_flags or args.cname,
+        'tls'              : args.all_flags or args.tls_info
+    }
+
+    # If no fields specified show all
+    if not any(show_fields.values()):
+        show_fields = {k: True for k in show_fields}
+
+    try:
+        # Create scanner instance
+        scanner = HTTPZScanner(
+            concurrent_limit=args.concurrent,
+            timeout=args.timeout,
+            follow_redirects=args.all_flags or args.follow_redirects,
+            check_axfr=args.axfr,
+            resolver_file=args.resolvers,
+            output_file=args.output,
+            show_progress=args.progress,
+            debug_mode=args.debug,
+            jsonl_output=args.jsonl,
+            show_fields=show_fields,
+            match_codes=args.match_codes,
+            exclude_codes=args.exclude_codes
+        )
+
+        # Run the scanner with file/stdin input
+        await scanner.scan(args.file)
+
+    except KeyboardInterrupt:
+        logging.warning('Process interrupted by user')
+        sys.exit(1)
+    except Exception as e:
+        logging.error(f'Unexpected error: {str(e)}')
+        sys.exit(1)
+
+def run():
+    '''Entry point for the CLI'''
+    asyncio.run(main())
+
+if __name__ == '__main__':
+    run() 
+\ No newline at end of file
diff --git a/httpz_scanner/colors.py b/httpz_scanner/colors.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/colors.py
+
+class Colors:
+    '''ANSI color codes for terminal output'''
+    HEADER     = '\033[95m' # Light purple
+    BLUE       = '\033[94m'
+    GREEN      = '\033[92m'
+    YELLOW     = '\033[93m'
+    RED        = '\033[91m'
+    BOLD       = '\033[1m'
+    UNDERLINE  = '\033[4m'
+    RESET      = '\033[0m'
+    PURPLE     = '\033[35m'       # Dark purple
+    LIGHT_RED  = '\033[38;5;203m' # Light red
+    DARK_GREEN = '\033[38;5;22m'  # Dark green
+    PINK       = '\033[38;5;198m' # Bright pink
+    GRAY       = '\033[90m'       # Gray color
+    CYAN       = '\033[96m'       # Cyan color 
+\ No newline at end of file
diff --git a/httpz_scanner/dns.py b/httpz_scanner/dns.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/dns.py
+
+import asyncio
+import os
+import aiohttp
+import dns.asyncresolver
+import dns.query
+import dns.resolver
+import dns.zone
+
+from .utils import debug, info, SILENT_MODE
+
+async def resolve_all_dns(domain: str, timeout: int = 5, nameserver: str = None, check_axfr: bool = False) -> tuple:
+    '''
+    Resolve all DNS records for a domain
+    
+    :param domain: Domain to resolve
+    :param timeout: Timeout in seconds
+    :param nameserver: Specific nameserver to use
+    :param check_axfr: Whether to attempt zone transfer
+    '''
+    resolver = dns.asyncresolver.Resolver()
+    resolver.lifetime = timeout
+    if nameserver:
+        resolver.nameservers = [nameserver]
+    
+    results = await asyncio.gather(*[resolver.resolve(domain, rtype) 
+                                   for rtype in ('NS', 'A', 'AAAA', 'CNAME')], 
+                                 return_exceptions=True)
+    
+    nameservers = [str(ns).rstrip('.') for ns in results[0]] if isinstance(results[0], dns.resolver.Answer) else []
+    ips = ([str(ip) for ip in results[1]] if isinstance(results[1], dns.resolver.Answer) else []) + \
+          ([str(ip) for ip in results[2]] if isinstance(results[2], dns.resolver.Answer) else [])
+    cname = str(results[3][0].target).rstrip('.') if isinstance(results[3], dns.resolver.Answer) else None
+    
+    ns_ips = {}
+    if nameservers:
+        ns_results = await asyncio.gather(*[resolver.resolve(ns, rtype) 
+                                          for ns in nameservers 
+                                          for rtype in ('A', 'AAAA')], 
+                                        return_exceptions=True)
+        for i, ns in enumerate(nameservers):
+            ns_ips[ns] = [str(ip) for records in ns_results[i*2:i*2+2] 
+                         if isinstance(records, dns.resolver.Answer) 
+                         for ip in records]
+
+    if check_axfr:
+        await attempt_axfr(domain, ns_ips, timeout)
+
+    return sorted(set(ips)), cname, nameservers, ns_ips
+
+async def attempt_axfr(domain: str, ns_ips: dict, timeout: int = 5) -> None:
+    '''
+    Attempt zone transfer for a domain
+    
+    :param domain: Domain to attempt AXFR transfer
+    :param ns_ips: Dictionary of nameserver hostnames to their IPs
+    :param timeout: Timeout in seconds
+    '''
+    try:
+        os.makedirs('axfrout', exist_ok=True)
+        
+        for ns_host, ips in ns_ips.items():
+            for ns_ip in ips:
+                try:
+                    zone = dns.zone.from_xfr(dns.query.xfr(ns_ip, domain, lifetime=timeout))
+                    with open(f'axfrout/{domain}_{ns_ip}.zone', 'w') as f:
+                        zone.to_text(f)
+                    info(f'[AXFR SUCCESS] {domain} from {ns_host} ({ns_ip})')
+                except Exception as e:
+                    debug(f'AXFR failed for {domain} from {ns_ip}: {str(e)}')
+    except Exception as e:
+        debug(f'Failed AXFR for {domain}: {str(e)}')
+
+async def load_resolvers(resolver_file: str = None) -> list:
+    '''
+    Load DNS resolvers from file or default source
+    
+    :param resolver_file: Path to file containing resolver IPs
+    :return: List of resolver IPs
+    '''
+    if resolver_file:
+        try:
+            with open(resolver_file) as f:
+                resolvers = [line.strip() for line in f if line.strip()]
+            if resolvers:
+                return resolvers
+        except Exception as e:
+            debug(f'Error loading resolvers from {resolver_file}: {str(e)}')
+
+    async with aiohttp.ClientSession() as session:
+        async with session.get('https://raw.githubusercontent.com/trickest/resolvers/refs/heads/main/resolvers.txt') as response:
+            resolvers = await response.text()
+            if not SILENT_MODE:
+                info(f'Loaded {len(resolvers.splitlines()):,} resolvers.')
+            return [resolver.strip() for resolver in resolvers.splitlines()] 
+\ No newline at end of file
diff --git a/httpz_scanner/formatters.py b/httpz_scanner/formatters.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/formatters.py
+
+from .colors import Colors
+from .utils  import human_size
+
+def format_console_output(result: dict, debug: bool = False, show_fields: dict = None, match_codes: set = None, exclude_codes: set = None) -> str:
+    '''
+    Format the output with colored sections
+    
+    :param result: Dictionary containing domain check results
+    :param debug: Whether to show error states
+    :param show_fields: Dictionary of fields to show
+    :param match_codes: Set of status codes to match
+    :param exclude_codes: Set of status codes to exclude
+    '''
+    if result['status'] < 0 and not debug:
+        return ''
+        
+    if match_codes and result['status'] not in match_codes:
+        return ''
+    if exclude_codes and result['status'] in exclude_codes:
+        return ''
+
+    parts = []
+    
+    # Status code
+    if show_fields.get('status_code'):
+        if result['status'] < 0:
+            status = f"{Colors.RED}[{result['status']}]{Colors.RESET}"
+        elif 200 <= result['status'] < 300:
+            status = f"{Colors.GREEN}[{result['status']}]{Colors.RESET}"
+        elif 300 <= result['status'] < 400:
+            status = f"{Colors.YELLOW}[{result['status']}]{Colors.RESET}"
+        else:
+            status = f"{Colors.RED}[{result['status']}]{Colors.RESET}"
+        parts.append(status)
+    
+    # Domain (always shown)
+    parts.append(f"[{result['url']}]")
+    
+    # Title
+    if show_fields.get('title') and result.get('title'):
+        parts.append(f"{Colors.DARK_GREEN}[{result['title']}]{Colors.RESET}")
+    
+    # Body preview
+    if show_fields.get('body') and result.get('body'):
+        body = result['body'][:100] + ('...' if len(result['body']) > 100 else '')
+        parts.append(f"{Colors.BLUE}[{body}]{Colors.RESET}")
+    
+    # IPs
+    if show_fields.get('ip') and result.get('ips'):
+        ips_text = ', '.join(result['ips'])
+        parts.append(f"{Colors.YELLOW}[{ips_text}]{Colors.RESET}")
+
+    # Favicon hash
+    if show_fields.get('favicon') and result.get('favicon_hash'):
+        parts.append(f"{Colors.PURPLE}[{result['favicon_hash']}]{Colors.RESET}")
+
+    # Headers
+    if show_fields.get('headers') and result.get('headers'):
+        headers_text = [f"{k}: {v}" for k, v in result['headers'].items()]
+        parts.append(f"{Colors.CYAN}[{', '.join(headers_text)}]{Colors.RESET}")
+    else:
+        if show_fields.get('content_type') and result.get('content_type'):
+            parts.append(f"{Colors.HEADER}[{result['content_type']}]{Colors.RESET}")
+        
+        if show_fields.get('content_length') and result.get('content_length'):
+            try:
+                size = human_size(int(result['content_length']))
+                parts.append(f"{Colors.PINK}[{size}]{Colors.RESET}")
+            except (ValueError, TypeError):
+                parts.append(f"{Colors.PINK}[{result['content_length']}]{Colors.RESET}")
+    
+    # CNAME
+    if show_fields.get('cname') and result.get('cname'):
+        parts.append(f"{Colors.PURPLE}[CNAME: {result['cname']}]{Colors.RESET}")
+    
+    # Redirect Chain
+    if show_fields.get('follow_redirects') and result.get('redirect_chain'):
+        chain = ' -> '.join(result['redirect_chain'])
+        parts.append(f"{Colors.YELLOW}[Redirects: {chain}]{Colors.RESET}")
+
+    # TLS Certificate Info
+    if result.get('tls'):
+        cert = result['tls']
+        tls_parts = []
+        if cert.get('common_name'):
+            tls_parts.append(f"Subject: {cert['common_name']}")
+        if cert.get('issuer'):
+            tls_parts.append(f"Issuer: {cert['issuer']}")
+        if cert.get('fingerprint'):
+            tls_parts.append(f"Fingerprint: {cert['fingerprint'][:16]}...")
+        if cert.get('alt_names'):
+            tls_parts.append(f"SANs: {', '.join(cert['alt_names'][:3])}")
+        if cert.get('not_before') and cert.get('not_after'):
+            tls_parts.append(f"Valid: {cert['not_before'].split('T')[0]} to {cert['not_after'].split('T')[0]}")
+        if cert.get('version'):
+            tls_parts.append(f"Version: {cert['version']}")
+        if cert.get('serial_number'):
+            tls_parts.append(f"Serial: {cert['serial_number'][:16]}...")
+        
+        if tls_parts:  # Only add TLS info if we have any parts
+            parts.append(f"{Colors.GREEN}[{' | '.join(tls_parts)}]{Colors.RESET}")
+
+    return ' '.join(parts) 
+\ No newline at end of file
diff --git a/httpz_scanner/parsers.py b/httpz_scanner/parsers.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/parsers.py
+
+try:
+    import bs4
+except ImportError:
+    raise ImportError('missing bs4 module (pip install beautifulsoup4)')
+
+try:
+    from cryptography import x509
+    from cryptography.hazmat.primitives import hashes
+    from cryptography.x509.oid import NameOID
+except ImportError:
+    raise ImportError('missing cryptography module (pip install cryptography)')
+
+try:
+    import mmh3
+except ImportError:
+    raise ImportError('missing mmh3 module (pip install mmh3)')
+
+from .utils import debug, error
+
+
+def parse_domain_url(domain: str) -> tuple:
+    '''
+    Parse domain string into base domain, port, and protocol list
+    
+    :param domain: Raw domain string to parse
+    :return: Tuple of (base_domain, port, protocols)
+    '''
+    port = None
+    base_domain = domain.rstrip('/')
+    
+    if base_domain.startswith(('http://', 'https://')):
+        protocol = 'https://' if base_domain.startswith('https://') else 'http://'
+        base_domain = base_domain.split('://', 1)[1]
+        if ':' in base_domain.split('/')[0]:
+            base_domain, port_str = base_domain.split(':', 1)
+            try:
+                port = int(port_str.split('/')[0])
+            except ValueError:
+                port = 443 if protocol == 'https://' else 80
+        else:
+            port = 443 if protocol == 'https://' else 80
+        protocols = [f'{protocol}{base_domain}{":" + str(port) if port else ""}']
+    else:
+        if ':' in base_domain.split('/')[0]:
+            base_domain, port_str = base_domain.split(':', 1)
+            port = int(port_str.split('/')[0]) if port_str.split('/')[0].isdigit() else 443
+        else:
+            port = 443
+        protocols = [
+            f'https://{base_domain}{":" + str(port) if port else ""}',
+            f'http://{base_domain}{":"  + str(port) if port else ""}'
+        ]
+    
+    return base_domain, port, protocols
+
+async def get_cert_info(ssl_object, url: str) -> dict:
+    '''
+    Get SSL certificate information for a domain
+    
+    :param ssl_object: SSL object to get certificate info from
+    :param url: URL to get certificate info from
+    '''
+    try:            
+        if not ssl_object or not (cert_der := ssl_object.getpeercert(binary_form=True)):
+            return None
+
+        cert = x509.load_der_x509_certificate(cert_der)
+
+        try:
+            san_extension = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
+            alt_names     = [name.value for name in san_extension.value] if san_extension else []
+        except x509.extensions.ExtensionNotFound:
+            alt_names = []
+
+        try:
+            common_name = cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value
+        except IndexError:
+            common_name = None
+
+        try:
+            issuer = cert.issuer.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value
+        except IndexError:
+            issuer = None
+
+        return {
+            'fingerprint'   : cert.fingerprint(hashes.SHA256()).hex(),
+            'common_name'   : common_name,
+            'issuer'        : issuer,
+            'alt_names'     : alt_names,
+            'not_before'    : cert.not_valid_before_utc.isoformat(),
+            'not_after'     : cert.not_valid_after_utc.isoformat(),
+            'version'       : cert.version.value,
+            'serial_number' : format(cert.serial_number, 'x'),
+        }
+    except Exception as e:
+        error(f'Error getting cert info for {url}: {str(e)}')
+        return None
+
+async def get_favicon_hash(session, base_url: str, html: str) -> str:
+    '''
+    Get favicon hash from a webpage
+    
+    :param session: aiohttp client session
+    :param base_url: base URL of the website
+    :param html: HTML content of the page
+    '''
+    try:
+        soup = bs4.BeautifulSoup(html, 'html.parser')
+        
+        favicon_url = None
+        for link in soup.find_all('link'):
+            if link.get('rel') and any(x.lower() == 'icon' for x in link.get('rel')):
+                favicon_url = link.get('href')
+                break
+        
+        if not favicon_url:
+            favicon_url = '/favicon.ico'
+        
+        if favicon_url.startswith('//'):
+            favicon_url = 'https:' + favicon_url
+        elif favicon_url.startswith('/'):
+            favicon_url = base_url + favicon_url
+        elif not favicon_url.startswith(('http://', 'https://')):
+            favicon_url = base_url + '/' + favicon_url
+
+        async with session.get(favicon_url, timeout=10) as response:
+            if response.status == 200:
+                content    = (await response.read())[:1024*1024]
+                hash_value = mmh3.hash64(content)[0]
+                if hash_value != 0:
+                    return str(hash_value)
+
+    except Exception as e:
+        debug(f'Error getting favicon for {base_url}: {str(e)}')
+    
+    return None 
+\ No newline at end of file
diff --git a/httpz_scanner/scanner.py b/httpz_scanner/scanner.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/scanner.py
+
+import asyncio
+import json
+import random
+import sys
+
+try:
+    import aiohttp
+except ImportError:
+    raise ImportError('missing aiohttp module (pip install aiohttp)')
+
+try:
+    import bs4
+except ImportError:
+    raise ImportError('missing bs4 module (pip install beautifulsoup4)')
+
+from .dns        import resolve_all_dns, load_resolvers
+from .formatters import format_console_output
+from .colors     import Colors
+from .parsers    import parse_domain_url, get_cert_info, get_favicon_hash
+from .utils      import debug, info, USER_AGENTS, input_generator
+
+
+class HTTPZScanner:
+    '''Core scanner class for HTTP domain checking'''
+    
+    def __init__(self, concurrent_limit = 100, timeout = 5, follow_redirects = False, check_axfr = False, resolver_file = None, output_file = None, show_progress = False, debug_mode = False, jsonl_output = False, show_fields = None, match_codes = None, exclude_codes = None):
+        '''
+        Initialize the HTTPZScanner class
+        
+        :param concurrent_limit: Maximum number of concurrent requests
+        :param timeout: Request timeout in seconds
+        :param follow_redirects: Follow redirects
+        :param check_axfr: Check for AXFR
+        :param resolver_file: Path to resolver file
+        :param output_file: Path to output file
+        :param show_progress: Show progress bar
+        :param debug_mode: Enable debug mode
+        :param jsonl_output: Output in JSONL format
+        :param show_fields: Fields to show
+        :param match_codes: Status codes to match
+        :param exclude_codes: Status codes to exclude
+        '''
+
+        self.concurrent_limit = concurrent_limit
+        self.timeout          = timeout
+        self.follow_redirects = follow_redirects
+        self.check_axfr       = check_axfr
+        self.resolver_file    = resolver_file
+        self.output_file      = output_file
+        self.show_progress    = show_progress
+        self.debug_mode       = debug_mode
+        self.jsonl_output     = jsonl_output
+
+        self.show_fields = show_fields or {
+            'status_code'      : True,
+            'content_type'     : True,
+            'content_length'   : True,
+            'title'            : True,
+            'body'             : True,
+            'ip'               : True,
+            'favicon'          : True,
+            'headers'          : True,
+            'follow_redirects' : True,
+            'cname'            : True,
+            'tls'              : True
+        }
+
+        self.match_codes       = match_codes
+        self.exclude_codes     = exclude_codes
+        self.resolvers         = None
+        self.processed_domains = 0
+
+
+    async def init(self):
+        '''Initialize resolvers - must be called before scanning'''
+        self.resolvers = await load_resolvers(self.resolver_file)
+
+
+    async def check_domain(self, session: aiohttp.ClientSession, domain: str):
+        '''Check a single domain and return results'''
+        nameserver = random.choice(self.resolvers) if self.resolvers else None
+        base_domain, port, protocols = parse_domain_url(domain)
+        
+        result = {
+            'domain'  : base_domain,
+            'status'  : 0,
+            'url'     : protocols[0],
+            'port'    : port,
+        }
+
+        # Try each protocol
+        for url in protocols:
+            try:
+                # Set random user agent for each request
+                headers = {'User-Agent': random.choice(USER_AGENTS)}
+                
+                async with session.get(url, timeout=self.timeout, 
+                                     allow_redirects=self.follow_redirects,
+                                     max_redirects=10 if self.follow_redirects else 0,
+                                     headers=headers) as response:
+                    
+                    result['status'] = response.status
+                    
+                    # Early exit if status code doesn't match criteria
+                    if self.match_codes and result['status'] not in self.match_codes:
+                        return result
+                    if self.exclude_codes and result['status'] in self.exclude_codes:
+                        return result
+
+                    # Continue with full processing only if status code matches criteria
+                    result['url'] = str(response.url)
+                    
+                    # Add headers if requested
+                    headers = dict(response.headers)
+                    if headers and (self.show_fields.get('headers') or self.show_fields.get('all_flags')):
+                        result['headers'] = headers
+                    else:
+                        # Only add content type/length if headers aren't included
+                        if content_type := response.headers.get('content-type', '').split(';')[0]:
+                            result['content_type'] = content_type
+                        if content_length := response.headers.get('content-length'):
+                            result['content_length'] = content_length
+                    
+                    # Only add redirect chain if it exists
+                    if self.follow_redirects and response.history:
+                        result['redirect_chain'] = [str(h.url) for h in response.history] + [str(response.url)]
+
+                    # Do DNS lookups only if we're going to use the result
+                    ips, cname, nameservers, _ = await resolve_all_dns(
+                        base_domain, self.timeout, nameserver, self.check_axfr
+                    )
+                    
+                    # Only add DNS fields if they have values
+                    if ips:
+                        result['ips'] = ips
+                    if cname:
+                        result['cname'] = cname
+                    if nameservers:
+                        result['nameservers'] = nameservers
+
+                    # Only add TLS info if available
+                    if response.url.scheme == 'https':
+                        try:
+                            if ssl_object := response._protocol.transport.get_extra_info('ssl_object'):
+                                if tls_info := await get_cert_info(ssl_object, str(response.url)):
+                                    # Only add TLS fields that have values
+                                    result['tls'] = {k: v for k, v in tls_info.items() if v}
+                        except AttributeError:
+                            debug(f'Failed to get SSL info for {url}')
+
+                    html = (await response.text())[:1024*1024]
+                    soup = bs4.BeautifulSoup(html, 'html.parser')
+                    
+                    # Only add title if it exists
+                    if soup.title and soup.title.string:
+                        result['title'] = ' '.join(soup.title.string.strip().split()).rstrip('.')[:300]
+                    
+                    # Only add body if it exists
+                    if body_text := soup.get_text():
+                        result['body'] = ' '.join(body_text.split()).rstrip('.')[:500]
+                    
+                    # Only add favicon hash if it exists
+                    if favicon_hash := await get_favicon_hash(session, url, html):
+                        result['favicon_hash'] = favicon_hash
+                    
+                    break
+            except Exception as e:
+                debug(f'Error checking {url}: {str(e)}')
+                result['status'] = -1
+                continue
+
+        return result
+
+
+    async def process_result(self, result):
+        '''
+        Process and output a single result
+        
+        :param result: result to process
+        '''
+
+        formatted = format_console_output(result, self.debug_mode, self.show_fields, self.match_codes, self.exclude_codes)
+        
+        if formatted:
+            # Write to file if specified
+            if self.output_file:
+                if (not self.match_codes or result['status'] in self.match_codes) and \
+                   (not self.exclude_codes or result['status'] not in self.exclude_codes):
+                    async with aiohttp.ClientSession() as session:
+                        with open(self.output_file, 'a') as f:
+                            json.dump(result, f, ensure_ascii=False)
+                            f.write('\n')
+            
+            # Console output
+            if self.jsonl_output:
+                print(json.dumps(result))
+            else:
+                self.processed_domains += 1
+                if self.show_progress:
+                    info(f"{Colors.GRAY}[{self.processed_domains:,}]{Colors.RESET} {formatted}")
+                else:
+                    info(formatted)
+
+
+    async def scan(self, input_source):
+        '''
+        Scan domains from a file or stdin
+        
+        :param input_source: Path to file or '-' for stdin
+        '''
+        if not self.resolvers:
+            await self.init()
+
+        async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
+            tasks = set()
+            
+            # Process domains with concurrent limit
+            for domain in input_generator(input_source):
+                if len(tasks) >= self.concurrent_limit:
+                    done, tasks = await asyncio.wait(
+                        tasks, return_when=asyncio.FIRST_COMPLETED
+                    )
+                    for task in done:
+                        result = await task
+                        await self.process_result(result)
+
+                task = asyncio.create_task(self.check_domain(session, domain))
+                tasks.add(task)
+
+            # Process remaining tasks
+            if tasks:
+                done, _ = await asyncio.wait(tasks)
+                for task in done:
+                    result = await task
+                    await self.process_result(result) 
+\ No newline at end of file
diff --git a/httpz_scanner/utils.py b/httpz_scanner/utils.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python3
+# HTTPZ Web Scanner - Developed by acidvegas in Python (https://github.com/acidvegas/httpz)
+# httpz_scanner/utils.py
+
+import logging
+import sys
+
+
+# Global for silent mode
+SILENT_MODE = False
+
+# List of user agents to randomize requests
+USER_AGENTS = [
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 Edg/132.0.0.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 Edg/132.0.0.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
+    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.6.5 Chrome/124.0.6367.243 Electron/30.1.2 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 OPR/116.0.0.0",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:134.0) Gecko/20100101 Firefox/134.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.8.3 Chrome/130.0.6723.191 Electron/33.3.2 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.2 Safari/605.1.15",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.3 Safari/605.1.15",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.2 Safari/605.1.15",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.6613.137 Safari/537.36",
+    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.6 Safari/605.1.15",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.15",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.5.12 Chrome/120.0.6099.283 Electron/28.2.3 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0",
+    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0",
+    "Mozilla/5.0 (X11; CrOS x86_64 14541.0.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.0",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1 Safari/605.1.15",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.5 Safari/605.1.15",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
+    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
+    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) obsidian/1.7.7 Chrome/128.0.6613.186 Electron/32.2.5 Safari/537.36"
+]
+
+
+def debug(msg: str): 
+    if not SILENT_MODE: logging.debug(msg)
+def error(msg: str):
+    if not SILENT_MODE: logging.error(msg)
+def info(msg: str):
+    if not SILENT_MODE: logging.info(msg)
+def warning(msg: str):
+    if not SILENT_MODE: logging.warning(msg)
+
+
+def human_size(size_bytes: int) -> str:
+    '''
+    Convert bytes to human readable string
+    
+    :param size_bytes: size in bytes
+    '''
+
+    if not size_bytes:
+        return '0B'
+    
+    units      = ('B', 'KB', 'MB', 'GB')
+    size       = float(size_bytes)
+    unit_index = 0
+    
+    while size >= 1024 and unit_index < len(units) - 1:
+        size /= 1024
+        unit_index += 1
+    
+    return f'{size:.1f}{units[unit_index]}'
+
+
+def input_generator(input_source: str):
+    '''
+    Generator function to yield domains from file or stdin
+    
+    :param input_source: file or stdin
+    '''
+    
+    if input_source == '-' or input_source is None:
+        for line in sys.stdin:
+            if line.strip():
+                yield line.strip()
+    else:
+        with open(input_source, 'r') as f:
+            for line in f:
+                if line.strip():
+                    yield line.strip() 
+\ No newline at end of file
diff --git a/setup.py b/setup.py
@@ -8,8 +8,8 @@ with open('README.md', 'r', encoding='utf-8') as f:
     long_description = f.read()
 
 setup(
-    name='httpz-scanner',
-    version='1.0.6',
+    name='httpz_scanner',
+    version='1.0.7',
     author='acidvegas',
     author_email='acid.vegas@acid.vegas',
     description='Hyper-fast HTTP Scraping Tool',
@@ -41,7 +41,7 @@ setup(
     ],
     entry_points={
         'console_scripts': [
-            'httpz=httpz-scanner.__main__:cli',  # Updated import path
+            'httpz=httpz_scanner.__main__:cli',
         ],
     },
 ) 
 \ No newline at end of file