summaryrefslogtreecommitdiffstats
path: root/Reaktor
diff options
context:
space:
mode:
Diffstat (limited to 'Reaktor')
-rwxr-xr-xReaktor/IRC/asybot.py50
-rwxr-xr-xReaktor/IRC/index3
-rw-r--r--Reaktor/Makefile23
l---------Reaktor/commands/revip1
l---------Reaktor/commands/subdomains1
-rwxr-xr-xReaktor/commands/whatweb7
-rw-r--r--Reaktor/repos/bxfr/bxfr.py238
-rw-r--r--Reaktor/repos/bxfr/bxfr_api.py238
-rwxr-xr-xReaktor/repos/consolidate_dns/index85
m---------Reaktor/repos/dnsrecon0
m---------Reaktor/repos/gxfr0
-rwxr-xr-xReaktor/repos/revip/revip48
m---------Reaktor/repos/whatweb0
-rw-r--r--Reaktor/startup/conf.d/reaktor2
-rwxr-xr-xReaktor/startup/init.d/reaktor-debian102
-rw-r--r--Reaktor/startup/supervisor/Reaktor.conf6
16 files changed, 781 insertions, 23 deletions
diff --git a/Reaktor/IRC/asybot.py b/Reaktor/IRC/asybot.py
index 657cee40..df758ed6 100755
--- a/Reaktor/IRC/asybot.py
+++ b/Reaktor/IRC/asybot.py
@@ -9,12 +9,14 @@ def is_executable(x):
from asynchat import async_chat as asychat
from asyncore import loop
-from socket import AF_INET, SOCK_STREAM
+from socket import AF_INET, SOCK_STREAM,gethostname
from signal import SIGALRM, signal, alarm
from datetime import datetime as date, timedelta
+import shlex
+from time import sleep
from sys import exit
from re import split, search
-
+from textwrap import TextWrapper
import logging,logging.handlers
log = logging.getLogger('asybot')
hdlr = logging.handlers.SysLogHandler(facility=logging.handlers.SysLogHandler.LOG_DAEMON)
@@ -37,6 +39,7 @@ class asybot(asychat):
self.set_terminator('\r\n')
self.create_socket(AF_INET, SOCK_STREAM)
self.connect((self.server, self.port))
+ self.wrapper = TextWrapper(subsequent_indent=" ",width=400)
# When we don't receive data for alarm_timeout seconds then issue a
# PING every hammer_interval seconds until kill_timeout seconds have
@@ -105,8 +108,10 @@ class asybot(asychat):
def on_privmsg(self, prefix, command, params, rest):
def PRIVMSG(text):
- msg = 'PRIVMSG %s :%s' % (','.join(params), text)
- self.push(msg)
+ for line in self.wrapper.wrap(text):
+ msg = 'PRIVMSG %s :%s' % (','.join(params), line)
+ self.push(msg)
+ sleep(1)
def ME(text):
PRIVMSG('ACTION ' + text + '')
@@ -125,7 +130,7 @@ class asybot(asychat):
from os.path import realpath, dirname, join
from subprocess import Popen as popen, PIPE
-
+ from time import time
Reaktor_dir = dirname(realpath(dirname(__file__)))
public_commands = join(Reaktor_dir, 'public_commands')
command = join(public_commands, _command)
@@ -133,29 +138,28 @@ class asybot(asychat):
if is_executable(command):
env = {}
+ args = []
+ start = time()
if _argument != None:
env['argument'] = _argument
-
+ args = shlex.split(_argument)
try:
- p = popen([command], stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env)
+ p = popen([command] + args,bufsize=1, stdout=PIPE, stderr=PIPE, env=env)
except OSError, error:
ME('brain damaged')
log.error('OSError@%s: %s' % (command, error))
return
-
- stdout, stderr = [ x[:len(x)-1] for x in
- [ x.split('\n') for x in p.communicate()]]
- code = p.returncode
pid = p.pid
+ for line in iter(p.stdout.readline,""):
+ PRIVMSG(line)
+ log.debug('%s stdout: %s' % (pid, line))
+ p.wait()
+ elapsed = time() - start
+ code = p.returncode
+ log.info('command: %s -> %s in %d seconds' % (command, code,elapsed))
+ [log.debug('%s stderr: %s' % (pid, x)) for x in p.stderr.readlines()]
- log.info('command: %s -> %s' % (command, code))
- [log.debug('%s stdout: %s' % (pid, x)) for x in stdout]
- [log.debug('%s stderr: %s' % (pid, x)) for x in stderr]
-
- if code == 0:
- [PRIVMSG(x) for x in stdout]
- [PRIVMSG(x) for x in stderr]
- else:
+ if code != 0:
ME('mimimi')
else:
@@ -181,8 +185,12 @@ if __name__ == "__main__":
lol = logging.DEBUG if env.get('debug',False) else logging.INFO
logging.basicConfig(level=lol)
- name = getconf1('Name', '/etc/tinc/retiolum/tinc.conf')
- hostname = '%s.retiolum' % name
+ try:
+ name = getconf1('Name', '/etc/tinc/retiolum/tinc.conf')
+ hostname = '%s.retiolum' % name
+ except:
+ name = gethostname()
+ hostname = name
nick = str(env.get('nick', name))
host = str(env.get('host', 'supernode'))
port = int(env.get('port', 6667))
diff --git a/Reaktor/IRC/index b/Reaktor/IRC/index
index 24982c76..50022ec9 100755
--- a/Reaktor/IRC/index
+++ b/Reaktor/IRC/index
@@ -3,5 +3,4 @@ set -xeuf
# cd //Reaktor
cd $(dirname $(readlink -f $0))/..
-
-exec python IRC/asybot.py "$@"
+host=irc.freenode.net target='#krebsco' python IRC/asybot.py "$@"
diff --git a/Reaktor/Makefile b/Reaktor/Makefile
new file mode 100644
index 00000000..2241dba6
--- /dev/null
+++ b/Reaktor/Makefile
@@ -0,0 +1,23 @@
+submodules = gxfr dnsrecon bxfr whatweb
+security_modules = subdomains revip whatweb
+
+all: init all-mods
+
+init: init-submodules $(submodules)
+init-submodules:
+ cd ..;git submodule init; git submodule update
+$(submodules):
+ cd repos/$@ ; git checkout master;git pull
+
+all-mods: $(addprefix public_commands/,$(security_modules))
+public_commands/%:commands/%
+ ln -s ../$< $@
+
+debian-autostart:
+ useradd reaktor ||:
+ cp startup/init.d/reaktor-debian /etc/init.d/reaktor
+ cp startup/conf.d/reaktor /etc/default/
+ update-rc.d reaktor defaults
+supervisor-autostart:
+ useradd reaktor ||:
+ cp startup/supervisor/Reaktor.conf /etc/supervisor/conf.d/
diff --git a/Reaktor/commands/revip b/Reaktor/commands/revip
new file mode 120000
index 00000000..e2c3b7ab
--- /dev/null
+++ b/Reaktor/commands/revip
@@ -0,0 +1 @@
+../repos/revip/revip \ No newline at end of file
diff --git a/Reaktor/commands/subdomains b/Reaktor/commands/subdomains
new file mode 120000
index 00000000..0489555f
--- /dev/null
+++ b/Reaktor/commands/subdomains
@@ -0,0 +1 @@
+../repos/consolidate_dns/index \ No newline at end of file
diff --git a/Reaktor/commands/whatweb b/Reaktor/commands/whatweb
new file mode 100755
index 00000000..84130d5c
--- /dev/null
+++ b/Reaktor/commands/whatweb
@@ -0,0 +1,7 @@
+#!/bin/sh
+#wrapper for WhatWeb
+here=$(dirname `readlink -f $0`)
+whatweb_bin="$here/../repos/whatweb/whatweb"
+[ -e "$whatweb_bin" ] || ( echo "!! Whatweb app does not exist" && exit 1 )
+[ "balls$1" = "balls" ] && ( echo "!! no host given" && exit 1)
+exec $whatweb_bin -a 3 "$1"
diff --git a/Reaktor/repos/bxfr/bxfr.py b/Reaktor/repos/bxfr/bxfr.py
new file mode 100644
index 00000000..8e6bd101
--- /dev/null
+++ b/Reaktor/repos/bxfr/bxfr.py
@@ -0,0 +1,238 @@
+#!/usr/bin/python -tt
+
+# gxfr replicates dns zone transfers by enumerating subdomains using advanced search engine queries and conducting dns lookups.
+# By Tim Tomes (LaNMaSteR53)
+# Available for download at http://LaNMaSteR53.com or http://code.google.com/p/gxfr/
+
+import sys, os.path, urllib, urllib2, re, time, socket, random, socket
+
+
+def help():
+ print """ Syntax: ./gxfr.py domain [options]
+
+ -h, --help this screen
+ -v enable verbose mode
+ -t [num of seconds] set number of seconds to wait between queries (default=15)
+ -q [max num of queries] restrict to maximum number of queries (default=0, indefinite)
+ --dns-lookup enable dns lookups of all subdomains
+ --proxy [file|ip:port|-] use a proxy or list of open proxies to send queries (@random w/list)
+ - [file] must consist of 1 or more ip:port pairs
+ - replace filename with '-' (dash) to accept stdin
+ --user-agent ['string'] set custom user-agent string
+ --timeout [seconds] set socket timeout (default=system default)
+ --csv [file]
+
+ Examples:
+ $ ./gxfr.py foxnews.com --dns-lookup -v
+ $ ./gxfr.py foxnews.com --dns-lookup --proxy open_proxies.txt --timeout 10
+ $ ./gxfr.py foxnews.com --dns-lookup -t 5 -q 5 -v --proxy 127.0.0.1:8080
+ $ curl http://rmccurdy.com/scripts/proxy/good.txt | ./gxfr.py website.com -v -t 3 --proxy -
+ """
+ sys.exit(2)
+
+if len(sys.argv) < 2:
+ help()
+
+if '-h' in sys.argv or '--help' in sys.argv:
+ help()
+
+# declare vars and process arguments
+#http://www.bing.com/search?q=site%3agoogle.de&qs=n&filt=all&pq=site%3agoogle.d&sc=8-5&sp=-1&sk=&first=1&FORM=PORE
+query_cnt = 0
+csvname = False
+domain = sys.argv[1]
+sys.argv = sys.argv[2:]
+lookup = False
+encrypt = True
+base_url = 'http://www.bing.com'
+base_uri = '/search?qs=n&form=QBRE&sc=0-0&sp=-1&sk='
+base_query = 'site:' + domain
+pattern = '//([\.\w-]*)\.%s.+?' % (domain)
+proxy = False
+user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)'
+verbose = False
+secs = 15
+max_queries = 10 # default = 10 queries
+# process command line arguments
+if len(sys.argv) > 0:
+ if '--dns-lookup' in sys.argv:
+ lookup = True
+ if '--csv' in sys.argv:
+ csvname = sys.argv[sys.argv.index('--csv') + 1]
+ if '--proxy' in sys.argv:
+ proxy = True
+ filename = sys.argv[sys.argv.index('--proxy') + 1]
+ if filename == '-':
+ proxies = sys.stdin.read().split()
+ elif os.path.exists(filename):
+ content = open(filename).read()
+ proxies = re.findall('\d+\.\d+\.\d+\.\d+:\d+', content)
+ elif re.match(r'^\d+\.\d+\.\d+\.\d+:\d+$', filename):
+ proxies = [filename]
+ else:
+ help()
+ if '--timeout' in sys.argv:
+ timeout = int(sys.argv[sys.argv.index('--timeout') + 1])
+ socket.setdefaulttimeout(timeout)
+ if '--user-agent' in sys.argv:
+ user_agent = sys.argv[sys.argv.index('--user-agent') + 1]
+ if '-v' in sys.argv:
+ verbose = True
+ if '-t' in sys.argv:
+ secs = int(sys.argv[sys.argv.index('-t') + 1])
+ if '-q' in sys.argv:
+ max_queries = int(sys.argv[sys.argv.index('-q') + 1])
+subs = []
+new = True
+page = 0
+
+# --begin--
+print '[-] domain:', domain
+print '[-] user-agent:', user_agent
+# execute search engine queries and scrape results storing subdomains in a list
+print '[-] querying search engine, please wait...'
+# loop until no new subdomains are found
+while new == True:
+ try:
+ query = ''
+ # build query based on results of previous results
+ for sub in subs:
+ query += ' -site:%s.%s' % (sub, domain)
+ full_query = base_query + query
+ start_param = '&first=%s' % (str(page*10))
+ query_param = '&q=%s&pq=%s' % (urllib.quote_plus(full_query),urllib.quote_plus(full_query))
+ if len(base_uri) + len(query_param) + len(start_param) < 2048:
+ last_query_param = query_param
+ params = query_param + start_param
+ else:
+ params = last_query_param[:2047-len(start_param)-len(base_uri)] + start_param
+ full_url = base_url + base_uri + params
+ # note: query character limit is passive in mobile, but seems to be ~794
+ # note: query character limit seems to be 852 for desktop queries
+ # note: typical URI max length is 2048 (starts after top level domain)
+ if verbose: print '[+] using query: %s...' % (full_url)
+ # build web request and submit query
+ request = urllib2.Request(full_url)
+ # spoof user-agent string
+ request.add_header('User-Agent', user_agent)
+ # if proxy is enabled, use the correct handler
+ if proxy == True:
+ # validate proxies at runtime
+ while True:
+ try:
+ # select a proxy from list at random
+ num = random.randint(0,len(proxies)-1)
+ host = proxies[num]
+ opener = urllib2.build_opener(urllib2.ProxyHandler({'http': host}))
+ if verbose: print '[+] sending query to', host
+ # send query to proxy server
+ result = opener.open(request).read()
+ # exit while loop if successful
+ break
+ except Exception as inst:
+ print '[!] %s failed: %s' % (host, inst)
+ if len(proxies) == 1:
+ # exit of no proxy servers from list are valid
+ print '[-] valid proxy server not found'
+ sys.exit(2)
+ else:
+ # remove host from list of proxies and try again
+ del proxies[num]
+ else:
+ opener = urllib2.build_opener(urllib2.HTTPHandler(), urllib2.HTTPSHandler())
+ # send query to search engine
+ try:
+ result = opener.open(request).read()
+ except Exception as inst:
+ print '[!] {0}'.format(inst)
+ if str(inst).index('503') != -1: print '[!] possible shun: use --proxy or find something else to do for 24 hours :)'
+ sys.exit(2)
+ if not verbose: sys.stdout.write('.'); sys.stdout.flush()
+ #if not verbose: sys.stdout.write('\n'); sys.stdout.flush()
+ # iterate query count
+ query_cnt += 1
+ sites = re.findall(pattern, result)
+ # create a uniq list
+ sites = list(set(sites))
+ new = False
+ # add subdomain to list if not already exists
+ for site in sites:
+ if site not in subs:
+ if verbose: print '[!] subdomain found:', site
+ subs.append(site)
+ new = True
+ # exit if maximum number of queries has been made
+ if query_cnt == max_queries:
+ print '[-] maximum number of queries made...'
+ break
+ # start going through all pages if querysize is maxed out
+ if new == False:
+ # exit if all subdomains have been found
+ if not 'Next page' in result:
+ #import pdb; pdb.set_trace() # curl to stdin breaks pdb
+ print '[-] all available subdomains found...'
+ break
+ else:
+ page += 1
+ new = True
+ if verbose: print '[+] no new subdomains found on page. jumping to result %d.' % (page*10)
+ # sleep script to avoid lock-out
+ if verbose: print '[+] sleeping to avoid lock-out...'
+ time.sleep(secs)
+ except KeyboardInterrupt:
+ # catch keyboard interrupt and gracefull complete script
+ break
+
+# print list of subdomains
+print '[-] successful queries made:', str(query_cnt)
+if verbose:
+ # rebuild and display final query if in verbose mode
+ #final_query = ''
+ #for sub in subs:
+ # final_query += '+-site:%s.%s' % (sub, domain)
+ #print '[+] final query string: %sstart=%s&%s%s' % (base_url, str(page*10), base_query, query)
+ print '[+] final query string: %s' % (full_url)
+print ' '
+print '[subdomains] -', str(len(subs))
+csvwriter = False
+try:
+ if csvname:
+ import csv
+ csvwriter = csv.writer(open(csvname,'wb'))
+except:
+ print "[!] Cannot open CSV"
+for sub in subs:
+ dom = '%s.%s' % (sub, domain )
+
+ #host resolution makes this computer more visible
+ hostname,aliases,ips = socket.gethostbyname_ex(dom)
+ #print hostname,aliases,ip
+ print dom,",".join(ips)
+ try:
+ line = [dom] + ips
+ csvwriter.writerow([dom] + ips)
+ except: pass
+
+
+# conduct dns lookup if argument is present
+if lookup == True:
+ print ' '
+ print '[-] querying dns, please wait...'
+ dict = {}
+ # create a dictionary where the subdomain is the key and a list of all associated ips is the value
+ for sub in subs:
+ sub = '%s.%s' % (sub, domain)
+ if verbose: print '[+] querying dns for %s...' % (sub)
+ # dns query and dictionary assignment
+ try:
+ dict[sub] = list(set([item[4][0] for item in socket.getaddrinfo(sub, 80)]))
+ except socket.gaierror:
+ # dns lookup failure
+ dict[sub] = list(set(['no entry']))
+ # print table of subdomains and ips
+ print ' '
+ print '[ip]'.ljust(16, ' ') + '[subdomain]'
+ for key in dict.keys():
+ for ip in dict[key]:
+ print ip.ljust(16, ' ') + key
+# --end--
diff --git a/Reaktor/repos/bxfr/bxfr_api.py b/Reaktor/repos/bxfr/bxfr_api.py
new file mode 100644
index 00000000..245ea92a
--- /dev/null
+++ b/Reaktor/repos/bxfr/bxfr_api.py
@@ -0,0 +1,238 @@
+#!/usr/bin/python -tt
+
+# gxfr replicates dns zone transfers by enumerating subdomains using advanced search engine queries and conducting dns lookups.
+# Original code By Tim Tomes (LaNMaSteR53)
+# rewrite for bing.com,csv output by makefu
+# Available for download at http://LaNMaSteR53.com or
+# http://code.google.com/p/gxfr/ and https://github.com/krebscode/painload in Reaktor/repos
+
+import sys, os.path, urllib, urllib2, re, time, socket, random, socket
+
+
+def help():
+ print """ Syntax: %s domain [options]
+
+ -h, --help this screen
+ -v enable verbose mode
+ -t [num of seconds] set number of seconds to wait between queries (default=15)
+ -q [max num of queries] restrict to maximum number of queries (default=0, indefinite)
+ --dns-lookup enable dns lookups of all subdomains
+ --proxy [file|ip:port|-] use a proxy or list of open proxies to send queries (@random w/list)
+ - [file] must consist of 1 or more ip:port pairs
+ - replace filename with '-' (dash) to accept stdin
+ --user-agent ['string'] set custom user-agent string
+ --timeout [seconds] set socket timeout (default=system default)
+ --csv [file]
+
+ Examples:
+ $ ./gxfr.py foxnews.com --dns-lookup -v
+ $ ./gxfr.py foxnews.com --dns-lookup --proxy open_proxies.txt --timeout 10
+ $ ./gxfr.py foxnews.com --dns-lookup -t 5 -q 5 -v --proxy 127.0.0.1:8080
+ $ curl http://rmccurdy.com/scripts/proxy/good.txt | ./gxfr.py website.com -v -t 3 --proxy -
+ """ % sys.argv[0]
+ sys.exit(2)
+
+if len(sys.argv) < 2:
+ help()
+
+if '-h' in sys.argv or '--help' in sys.argv:
+ help()
+
+# declare vars and process arguments
+query_cnt = 0
+csvname = False
+domain = sys.argv[1]
+sys.argv = sys.argv[2:]
+lookup = False
+encrypt = True
+base_url = 'http://api.bing.net'
+bing_appid = "01CDBCA91C590493EE4E91FAF83E5239FEF6ADFD" #from darkb0t, thanks
+base_uri = '/xml.aspx?AppID=%s&Sources=Web&Version=2.0&Web.Count=50&Web.Options=DisableHostCollapsing+DisableQueryAlterations' %bing_appid
+base_query = 'site:' + domain
+pattern = '>([\.\w-]*)\.%s.+?<' % (domain)
+proxy = False
+user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)'
+verbose = False
+secs = 10
+max_queries = 10
+# process command line arguments
+if len(sys.argv) > 0:
+ if '--dns-lookup' in sys.argv:
+ lookup = True
+ if '--csv' in sys.argv:
+ csvname = sys.argv[sys.argv.index('--csv') + 1]
+ if '--proxy' in sys.argv:
+ proxy = True
+ filename = sys.argv[sys.argv.index('--proxy') + 1]
+ if filename == '-':
+ proxies = sys.stdin.read().split()
+ elif os.path.exists(filename):
+ content = open(filename).read()
+ proxies = re.findall('\d+\.\d+\.\d+\.\d+:\d+', content)
+ elif re.match(r'^\d+\.\d+\.\d+\.\d+:\d+$', filename):
+ proxies = [filename]
+ else:
+ help()
+ if '--timeout' in sys.argv:
+ timeout = int(sys.argv[sys.argv.index('--timeout') + 1])
+ socket.setdefaulttimeout(timeout)
+ if '--user-agent' in sys.argv:
+ user_agent = sys.argv[sys.argv.index('--user-agent') + 1]
+ if '-v' in sys.argv:
+ verbose = True
+ if '-t' in sys.argv:
+ secs = int(sys.argv[sys.argv.index('-t') + 1])
+ if '-q' in sys.argv:
+ max_queries = int(sys.argv[sys.argv.index('-q') + 1])
+subs = []
+new = True
+page = 0
+
+# --begin--
+print '[-] domain:', domain
+print '[-] user-agent:', user_agent
+# execute search engine queries and scrape results storing subdomains in a list
+print '[-] querying search engine, please wait...'
+# loop until no new subdomains are found
+while new == True:
+ try:
+ query = ''
+ # build query based on results of previous results
+ for sub in subs:
+ query += ' -site:%s.%s' % (sub, domain)
+ full_query = base_query + query
+ start_param = '&Web.Offset=%s' % (str(page*10))
+ query_param = '&Query=%s' % (urllib.quote_plus(full_query))
+ if len(base_uri) + len(query_param) + len(start_param) < 2048:
+ last_query_param = query_param
+ params = query_param + start_param
+ else:
+ params = last_query_param[:2047-len(start_param)-len(base_uri)] + start_param
+ full_url = base_url + base_uri + params
+ # note: query character limit is passive in mobile, but seems to be ~794
+ # note: query character limit seems to be 852 for desktop queries
+ # note: typical URI max length is 2048 (starts after top level domain)
+ if verbose: print '[+] using query: %s...' % (full_url)
+ # build web request and submit query
+ request = urllib2.Request(full_url)
+ # spoof user-agent string
+ request.add_header('User-Agent', user_agent)
+ # if proxy is enabled, use the correct handler
+ if proxy == True:
+ # validate proxies at runtime
+ while True:
+ try:
+ # select a proxy from list at random
+ num = random.randint(0,len(proxies)-1)
+ host = proxies[num]
+ opener = urllib2.build_opener(urllib2.ProxyHandler({'http': host}))
+ if verbose: print '[+] sending query to', host
+ # send query to proxy server
+ result = opener.open(request).read()
+ # exit while loop if successful
+ break
+ except Exception as inst:
+ print '[!] %s failed: %s' % (host, inst)
+ if len(proxies) == 1:
+ # exit of no proxy servers from list are valid
+ print '[-] valid proxy server not found'
+ sys.exit(2)
+ else:
+ # remove host from list of proxies and try again
+ del proxies[num]
+ else:
+ opener = urllib2.build_opener(urllib2.HTTPHandler(), urllib2.HTTPSHandler())
+ # send query to search engine
+ try:
+ result = opener.open(request).read()
+ except Exception as inst:
+ print '[!] {0}'.format(inst)
+ if str(inst).index('503') != -1: print '[!] possible shun: use --proxy or find something else to do for 24 hours :)'
+ sys.exit(2)
+ if not verbose: sys.stdout.write('.'); sys.stdout.flush()
+ #if not verbose: sys.stdout.write('\n'); sys.stdout.flush()
+ # iterate query count
+ query_cnt += 1
+ sites = re.findall(pattern, result)
+ # create a uniq list
+ sites = list(set(sites))
+ new = False
+ # add subdomain to list if not already exists
+ for site in sites:
+ if site not in subs:
+ if verbose: print '[!] subdomain found:', site
+ subs.append(site)
+ new = True
+ # exit if maximum number of queries has been made
+ if query_cnt == max_queries:
+ print '[-] maximum number of queries made...'
+ break
+ # start going through all pages if querysize is maxed out
+ if new == False:
+ # exit if all subdomains have been found
+ if not 'Next page' in result:
+ #import pdb; pdb.set_trace() # curl to stdin breaks pdb
+ print '[-] all available subdomains found...'
+ break
+ else:
+ page += 1
+ new = True
+ if verbose: print '[+] no new subdomains found on page. jumping to result %d.' % (page*10)
+ # sleep script to avoid lock-out
+ if verbose: print '[+] sleeping to avoid lock-out...'
+ time.sleep(secs)
+ except KeyboardInterrupt:
+ # catch keyboard interrupt and gracefull complete script
+ break
+
+# print list of subdomains
+print '[-] successful queries made:', str(query_cnt)
+if verbose:
+ # rebuild and display final query if in verbose mode
+ #final_query = ''
+ #for sub in subs:
+ # final_query += '+-site:%s.%s' % (sub, domain)
+ #print '[+] final query string: %sstart=%s&%s%s' % (base_url, str(page*10), base_query, query)
+ print '[+] final query string: %s' % (full_url)
+print ' '
+print '[subdomains] -', str(len(subs))
+csvwriter = False
+try:
+ if csvname:
+ import csv
+ csvwriter = csv.writer(open(csvname,'wb'))
+except:
+ print "[!] Cannot open CSV"
+for sub in subs:
+ dom = '%s.%s' % (sub, domain )
+ hostname,aliases,ips = socket.gethostbyname_ex(dom)
+ #print hostname,aliases,ip
+ print dom,",".join(ips)
+ try:
+ line = [dom] + ips
+ csvwriter.writerow([dom] + ips)
+ except: pass
+
+
+# conduct dns lookup if argument is present
+if lookup == True:
+ print ' '
+ print '[-] querying dns, please wait...'
+ dict = {}
+ # create a dictionary where the subdomain is the key and a list of all associated ips is the value
+ for sub in subs:
+ sub = '%s.%s' % (sub, domain)
+ if verbose: print '[+] querying dns for %s...' % (sub)
+ # dns query and dictionary assignment
+ try:
+ dict[sub] = list(set([item[4][0] for item in socket.getaddrinfo(sub, 80)]))
+ except socket.gaierror:
+ # dns lookup failure
+ dict[sub] = list(set(['no entry']))
+ # print table of subdomains and ips
+ print ' '
+ print '[ip]'.ljust(16, ' ') + '[subdomain]'
+ for key in dict.keys():
+ for ip in dict[key]:
+ print ip.ljust(16, ' ') + key
+# --end--
diff --git a/Reaktor/repos/consolidate_dns/index b/Reaktor/repos/consolidate_dns/index
new file mode 100755
index 00000000..1a0dd81e
--- /dev/null
+++ b/Reaktor/repos/consolidate_dns/index
@@ -0,0 +1,85 @@
+#!/usr/bin/python -u
+import os
+from subprocess import Popen
+import csv
+import sys
+import tempfile
+
+
+os.chdir (os.path.dirname (os.path.realpath (sys.argv[0])))
+dnsrecon_enabled = False
+DNSRECON = "../dnsrecon/dnsrecon.py"
+dnsrecon_wordlist="namelist.txt"
+silent=open("/dev/null","w")
+gxfr_enabled = False
+GXFR = "../gxfr/gxfr.py"
+bxfr_enabled = False
+BXFR = "../bxfr/bxfr.py"
+domains = {}
+try:
+ DOMAIN=sys.argv[1]
+except:
+ print ("Usage: %s [hostname]")
+ sys.exit(1)
+print("checking for back end")
+if os.path.isfile(DNSRECON) :
+ dnsrecon_enabled=True
+ print (" dnsrecon enabled")
+else:
+ print (" dnsrecon not available or not supported")
+if os.path.isfile(GXFR):
+ gxfr_enabled=True
+ print (" gxfr.py enabled")
+else:
+ print (" gxfr.py not available or not supported")
+if os.path.isfile(BXFR):
+ bxfr_enabled=True
+ print (" bxfr.py enabled")
+else:
+ print (" bxfr.py not available or not supported")
+
+
+if dnsrecon_enabled:
+ dnsrecon_tmp = tempfile.NamedTemporaryFile(delete=False).name
+ print ("Starting dnsrecon, this may take some time")
+ p = Popen([DNSRECON,"-d",DOMAIN,"--csv",dnsrecon_tmp,'-D',dnsrecon_wordlist,"-t","brt,srv,axfr","--skip"],stdout=silent,stderr=silent)
+ p.wait()
+ reader = csv.reader(open(dnsrecon_tmp))
+ for row in reader:
+ if not row[1] in domains:
+ domains[row[1]] = []
+ domains[row[1]] += row[2:]
+ print ("...finished with [%d] domains" %reader.line_num)
+ os.unlink(dnsrecon_tmp)
+
+if gxfr_enabled:
+ gxfr_tmp = tempfile.NamedTemporaryFile(delete=False).name
+ print ("Starting gxfr, this may take some time")
+ p = Popen(["/usr/bin/python",GXFR,DOMAIN,"-q","3","--csv",gxfr_tmp],stdout=silent,stderr=silent)
+ p.wait()
+ reader = csv.reader(open(gxfr_tmp))
+ for row in reader:
+ if not row[0] in domains:
+ domains[row[0]] = []
+ domains[row[0]] += row[1:]
+ print ("...finished with [%d] domains" %reader.line_num)
+ os.unlink(gxfr_tmp)
+if bxfr_enabled:
+ bxfr_tmp = tempfile.NamedTemporaryFile(delete=False).name
+ print ("Starting bxfr, this may take some time")
+ p = Popen(["/usr/bin/python",BXFR,DOMAIN,"-q","3","--csv",bxfr_tmp],stdout=silent,stderr=silent)
+ p.wait()
+ reader = csv.reader(open(bxfr_tmp))
+ for row in reader:
+ if not row[0] in domains:
+ domains[row[0]] = []
+ domains[row[0]] += row[1:]
+ print ("...finished with [%d] domains" %reader.line_num)
+ os.unlink(bxfr_tmp)
+
+print "found %d subdomain(s)" % len(domains)
+num = 1
+for dom in domains:
+ domains[dom] = set(domains[dom])
+ print "[%d/%d]" % (num,len(domains)),dom,":",", ".join(domains[dom])
+ num = num + 1
diff --git a/Reaktor/repos/dnsrecon b/Reaktor/repos/dnsrecon
new file mode 160000
+Subproject 31de30e4f6674585676c841c5612a330c22de94
diff --git a/Reaktor/repos/gxfr b/Reaktor/repos/gxfr
new file mode 160000
+Subproject 4606858e7814189c527ba912e1d8575248f719d
diff --git a/Reaktor/repos/revip/revip b/Reaktor/repos/revip/revip
new file mode 100755
index 00000000..d6acd669
--- /dev/null
+++ b/Reaktor/repos/revip/revip
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+# fork from darkb0t v0.4
+# modularized and extended
+import sys
+import os
+import json
+import socket
+import httplib
+from urlparse import urlparse
+try:
+ target = sys.argv[1]
+except:
+ print "Usage: %s [target]" % sys.argv[0]
+ exit(0)
+
+print "Reverse IP Search"
+print "Target: ",target
+try:
+ hostname,aliases,ip = socket.gethostbyname_ex(target)
+ ip = socket.gethostbyname(target)
+except:
+ print "Cannot resolve `%s`!" % target
+ exit (1)
+print "IP: ",ip
+sites = {target : "", hostname : ""} # make entries unique
+for a in aliases:
+ sites[a] = ""
+offset = 0
+appid = os.environ.get("BING_APPID",'7A0B8DA3E913BE5ECB4AF11C7BC398B43000DC1C')
+while offset < 300:
+ url ="/json.aspx?AppId=%s&Query=ip:%s&Sources=Web+RelatedSearch+News+Image+Video&Version=2.2&Market=en-us&Web.Count=50&Web.Offset=%s&Web.Options=DisableQueryAlterations" % (appid, ip, offset)
+ conn = httplib.HTTPConnection("api.bing.net")
+ conn.request("GET", url)
+ res = conn.getresponse()
+ doc = json.load(res)
+ try:
+ results = doc["SearchResponse"]["Web"]["Results"]
+ conn.close()
+ for res in results:
+ sites[urlparse(res['Url'])[1]] = ""
+ offset += 50
+ except:
+ break
+print "Total: ", len(sites), " dns name(s)\n"
+num = 1
+for s in sites:
+ print "["+str(num)+"/"+str(len(sites))+"] : "+s
+ num += 1
diff --git a/Reaktor/repos/whatweb b/Reaktor/repos/whatweb
new file mode 160000
+Subproject daab5f21f13024ee8ec47e88f668c5308d6b59d
diff --git a/Reaktor/startup/conf.d/reaktor b/Reaktor/startup/conf.d/reaktor
new file mode 100644
index 00000000..a4f3f8e1
--- /dev/null
+++ b/Reaktor/startup/conf.d/reaktor
@@ -0,0 +1,2 @@
+export target="#krebsco"
+export host="irc.freenode.com"
diff --git a/Reaktor/startup/init.d/reaktor-debian b/Reaktor/startup/init.d/reaktor-debian
new file mode 100755
index 00000000..a94384f4
--- /dev/null
+++ b/Reaktor/startup/init.d/reaktor-debian
@@ -0,0 +1,102 @@
+#!/bin/sh
+# uses template from /etc/init.d/skeleton
+### BEGIN INIT INFO
+# Provides: reaktor
+# Required-Start:
+# Required-Stop:
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: reaktor
+# Description: starts reaktor daemon
+#
+### END INIT INFO
+
+PATH=/sbin:/usr/sbin:/bin:/usr/bin
+NAME=reaktor
+USER=reaktor
+DESC="$NAME daemon"
+DAEMON=/usr/bin/python
+DAEMON_DIR="/krebs/Reaktor/IRC/"
+DAEMON_ARGS="${DAEMON_DIR}/asybot.py"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+
+[ -x "$DAEMON" ] || exit 0
+[ -r /etc/default/$NAME ] && . /etc/default/$NAME
+. /lib/init/vars.sh
+. /lib/lsb/init-functions
+
+do_start()
+{
+ # 0 if daemon has been started
+ # 1 if daemon was already running
+ # 2 if daemon could not be started
+ start-stop-daemon -b -d $DAEMON_DIR/.. -c $USER --start --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \
+ || return 1
+ start-stop-daemon -b -d $DAEMON_DIR/.. -c $USER --start --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON -- \
+ $DAEMON_ARGS \
+ || return 2
+}
+
+do_stop()
+{
+ # 0 if daemon has been stopped
+ # 1 if daemon was already stopped
+ # 2 if daemon could not be stopped
+ start-stop-daemon --stop --retry=TERM/30/KILL/5 --pidfile $PIDFILE
+ RETVAL="$?"
+ [ "$RETVAL" = 2 ] && return 2
+ rm -f $PIDFILE
+ return "$RETVAL"
+}
+
+do_reload() {
+ start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE
+ return 0
+}
+
+case "$1" in
+ start)
+ [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
+ do_start
+ case "$?" in
+ 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+ 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+ esac
+ ;;
+ stop)
+ [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
+ do_stop
+ case "$?" in
+ 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+ 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+ esac
+ ;;
+ status)
+ status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
+ ;;
+ restart|force-reload)
+ log_daemon_msg "Restarting $DESC" "$NAME"
+ do_stop
+ case "$?" in
+ 0|1)
+ do_start
+ case "$?" in
+ 0) log_end_msg 0 ;;
+ 1) log_end_msg 1 ;;
+ *) log_end_msg 1 ;;
+ esac
+ ;;
+ *)
+ # Failed to stop
+ log_end_msg 1
+ ;;
+ esac
+ ;;
+ *)
+ echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2
+ exit 3
+ ;;
+esac
+
+:
diff --git a/Reaktor/startup/supervisor/Reaktor.conf b/Reaktor/startup/supervisor/Reaktor.conf
new file mode 100644
index 00000000..497066e9
--- /dev/null
+++ b/Reaktor/startup/supervisor/Reaktor.conf
@@ -0,0 +1,6 @@
+[program:Reaktor]
+command=/usr/bin/python2.6 IRC/asybot.py
+environment=host='irc.freenode.net',target='#krebsco'
+redirect_stderr=true
+user=reaktor
+directory=/krebs/Reaktor