Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update docker file #13

Open
wants to merge 16 commits into
base: master
Choose a base branch
from
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,16 @@ Screenshot
Install
---------
```
$ git clone https://github.com/rhicks/bgp-dashboard.git
$ git clone https://github.com/justinthoms/bgp-dashboard.git
$ cd bgp-dashboard
$ # modify ./gobgp/gobgpd.conf to peer with your network
$ # modify ./flask/app/constants.py globals to use your ASN and BGP communities
$ docker-compose build
$ docker-compose up (watch the log to verify BGP peeering is established)
$ docker compose build
$ docker compose up (watch the log to verify BGP peeering is established)
```


Todo
---------
- ???
- Update gobgp
- Update Python Dependencys
1 change: 1 addition & 0 deletions bgp_attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
MP_REACH_NLRI = 14
MP_UNREACH_NLRI = 15
EXTENDED_COMMUNITIES = 16
LARGE_COMMUNITIES = 32
#
WITHDRAWAL = 11
AGE = 12
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '2'
services:
gobgp:
build: ./gobgp
Expand Down
38 changes: 20 additions & 18 deletions flask/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,27 +1,29 @@
FROM ubuntu:17.10
FROM ubuntu:22.04

ENV DEBIAN_FRONTEND noninteractive

RUN apt-get update
RUN apt-get remove -y binutils
RUN apt-get install -y python3.6
RUN apt-get install -y python3-pip
RUN apt-get install -y python-dev
RUN apt-get install -y uwsgi-plugin-python
RUN apt-get install -y nginx
RUN apt-get install -y supervisor
RUN echo 'Etc/UTC' >/etc/timezone
RUN apt-get install -y --reinstall tzdata
RUN apt-get update && \
apt-get install -y \
python3 \
python3-pip \
python3-dev \
uwsgi-plugin-python3 \
nginx \
supervisor && \
echo 'Etc/UTC' >/etc/timezone && \
apt-get install -y --reinstall tzdata && \
rm -rf /var/lib/apt/lists/*

COPY nginx/flask.conf /etc/nginx/sites-available/
COPY supervisor/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY app/requirements.txt /tmp/requirements.txt

RUN mkdir -p /var/log/nginx/app /var/log/uwsgi/app /var/log/supervisor /var/www/app \
&& rm /etc/nginx/sites-enabled/default \
&& ln -s /etc/nginx/sites-available/flask.conf /etc/nginx/sites-enabled/flask.conf \
&& echo "daemon off;" >> /etc/nginx/nginx.conf \
&& pip3 install -r /tmp/requirements.txt \
&& chown -R www-data:www-data /var/www/app \
&& chown -R www-data:www-data /var/log
RUN mkdir -p /var/log/nginx/app /var/log/uwsgi/app /var/log/supervisor /var/www/app && \
rm /etc/nginx/sites-enabled/default && \
ln -s /etc/nginx/sites-available/flask.conf /etc/nginx/sites-enabled/flask.conf && \
echo "daemon off;" >> /etc/nginx/nginx.conf && \
pip3 install -r /tmp/requirements.txt && \
chown -R www-data:www-data /var/www/app && \
chown -R www-data:www-data /var/log

CMD ["/usr/bin/supervisord"]
66 changes: 36 additions & 30 deletions flask/app/Stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self):

# @peer_counter.setter
# def peer_counter(self):
# self._peer_counter = len(self.db.bgp.distinct('nexthop_asn', {'active': True}))
# self._peer_counter = len(self.db['bgp'].distinct('nexthop_asn', {'active': True}))

def db_connect(self):
"""Return a connection to the Mongo Database."""
Expand All @@ -45,15 +45,15 @@ def take(self, n, iterable):

def peer_count(self):
"""Return the number of directly connected ASNs."""
return len(self.db.bgp.distinct('nexthop_asn', {'active': True}))
return len(self.db['bgp'].distinct('nexthop_asn', {'active': True}))

def prefix_count(self, version):
"""Given the IP version, return the number of prefixes in the database."""
return self.db.bgp.find({'ip_version': version, 'active': True}).count()
return self.db['bgp'].count_documents({'ip_version': version, 'active': True})

def nexthop_ip_count(self):
"""Return the number of unique next hop IPv4 and IPv6 addresses."""
return len(self.db.bgp.distinct('nexthop', {'active': True}))
return len(self.db['bgp'].distinct('nexthop', {'active': True}))

def epoch_to_date(self, epoch):
"""Given an *epoch* time stamp, return a human readable equivalent."""
Expand All @@ -63,60 +63,67 @@ def get_list_of(self, customers=False, peers=False, community=C.CUSTOMER_BGP_COM
"""Return a list of prefix dictionaries. Specify which type of prefix to
return by setting *customers* or *peers* to True."""
if peers:
query_results = {prefix['nexthop_asn'] for prefix in self.db.bgp.find({'active': True})}
if customers:
query_results = {prefix['nexthop_asn'] for prefix in self.db.bgp.find({'communities': community, 'active': True})}
query_results = {prefix['nexthop_asn'] for prefix in self.db['bgp'].find({'active': True})}
else: # customers
query_results = {prefix['nexthop_asn'] for prefix in
self.db['bgp'].find({'communities': community, 'active': True})}

return [{'asn': asn if asn is not None else C.DEFAULT_ASN, # Set "None" ASNs to default
'name': asn_name_query(asn),
'ipv4_origin_count': self.db.bgp.find({'origin_asn': asn, 'ip_version': 4, 'active': True}).count(),
'ipv6_origin_count': self.db.bgp.find({'origin_asn': asn, 'ip_version': 6, 'active': True}).count(),
'ipv4_nexthop_count': self.db.bgp.find({'nexthop_asn': asn, 'ip_version': 4, 'active': True}).count(),
'ipv6_nexthop_count': self.db.bgp.find({'nexthop_asn': asn, 'ip_version': 6, 'active': True}).count(),
'asn_count': len(self.db.bgp.distinct('as_path.1', {'nexthop_asn': asn, 'active': True}))}
'ipv4_origin_count': self.db['bgp'].count_documents(
{'origin_asn': asn, 'ip_version': 4, 'active': True}),
'ipv6_origin_count': self.db['bgp'].count_documents(
{'origin_asn': asn, 'ip_version': 6, 'active': True}),
'ipv4_nexthop_count': self.db['bgp'].count_documents(
{'nexthop_asn': asn, 'ip_version': 4, 'active': True}),
'ipv6_nexthop_count': self.db['bgp'].count_documents(
{'nexthop_asn': asn, 'ip_version': 6, 'active': True}),
'asn_count': len(self.db['bgp'].distinct('as_path.1', {'nexthop_asn': asn, 'active': True}))}
for asn in query_results]

def avg_as_path_len(self, decimal_point_accuracy=2):
"""Return the computed average *as_path* length of all prefixes in the
database. Using a python *set* to remove any AS prepending."""
as_path_counter = 0
all_prefixes = self.db.bgp.find({'active': True})
all_prefixes = list(self.db['bgp'].find({'active': True}))
for prefix in all_prefixes:
try:
as_path_counter += len(set(prefix['as_path'])) # sets remove duplicate ASN prepending
except Exception:
pass
return round(as_path_counter/(all_prefixes.count() * 1.0), decimal_point_accuracy)
return round(as_path_counter / (len(all_prefixes) * 1.0), decimal_point_accuracy)

def communities_count(self):
"""Return a list of BGP communities and their count"""
return [{'community': community,
'count': self.db.bgp.find({'communities': {'$regex': str(community)}, 'active': True}).count(),
# 'count': self.db['bgp'].count_documents({'communities': {'$regex': str(community)}, 'active': True}),
'count': self.db['bgp'].count_documents({'communities': str(community), 'active': True}),
'name': None if C.BGP_COMMUNITY_MAP.get(community) is None else C.BGP_COMMUNITY_MAP.get(community)}
for community in self.db.bgp.distinct('communities') if community is not None]
for community in self.db['bgp'].distinct('communities') if community is not None]

def cidrs(self):
""" Return a list of IPv4 and IPv6 network mask counters."""
ipv4_masks = [int(prefix['_id'].split('/', 1)[1])
for prefix in self.db.bgp.find({'ip_version': 4, 'active': True})]
for prefix in self.db['bgp'].find({'ip_version': 4, 'active': True})]
ipv6_masks = [int(prefix['_id'].split('/', 1)[1])
for prefix in self.db.bgp.find({'ip_version': 6, 'active': True})]
for prefix in self.db['bgp'].find({'ip_version': 6, 'active': True})]
# Use a *Counter* to count masks in the lists, then combine, sort on mask, and return results
return sorted(
[{'mask': mask,
'count': count,
'ip_version': 4}
for mask, count in list(Counter(ipv4_masks).items())]
+
[{'mask': mask,
'count': count,
'ip_version': 6}
for mask, count in list(Counter(ipv6_masks).items())], key=lambda x: x['mask'])
[{'mask': mask,
'count': count,
'ip_version': 4}
for mask, count in list(Counter(ipv4_masks).items())]
+
[{'mask': mask,
'count': count,
'ip_version': 6}
for mask, count in list(Counter(ipv6_masks).items())], key=lambda x: x['mask'])

def top_peers(self, count):
"""Return a sorted list of top peer dictionaries ordered by prefix count.
Limit to *count*."""
peers = {peer: self.db.bgp.find({'nexthop_asn': peer, 'active': True}).count()
for peer in self.db.bgp.distinct('nexthop_asn')}
peers = {peer: self.db['bgp'].count_documents({'nexthop_asn': peer, 'active': True})
for peer in self.db['bgp'].distinct('nexthop_asn')}
return [{'asn': asn[0],
'count': asn[1],
'name': asn_name_query(asn[0])}
Expand Down Expand Up @@ -150,7 +157,6 @@ def update_stats(self):
self.nexthop_ip_counter = self.nexthop_ip_count()
self.timestamp = self.epoch_to_date(time.time())


def update_advanced_stats(self):
self.avg_as_path_length = self.avg_as_path_len()
self.top_n_peers = self.top_peers(5)
Expand Down
26 changes: 13 additions & 13 deletions flask/app/bgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@ def get_asn_prefixes(asn):
prefixes = []

if asn == C.DEFAULT_ASN:
routes = db.bgp.find({'origin_asn': None, 'active': True})
routes = list(db['bgp'].find({'origin_asn': None, 'active': True}))
else:
routes = db.bgp.find({'origin_asn': asn, 'active': True})
routes = list(db['bgp'].find({'origin_asn': asn, 'active': True}))

for prefix in routes:
prefixes.append({'prefix': prefix['_id'],
Expand All @@ -77,7 +77,7 @@ def get_asn_prefixes(asn):

return jsonify({'asn': asn,
'name': asn_name_query(asn),
'origin_prefix_count': routes.count(),
'origin_prefix_count': len(routes),
'is_peer': is_peer(asn),
'origin_prefix_list': prefixes})

Expand All @@ -92,7 +92,7 @@ def get_downstream_asns(asn):
db = myStats.db
asn_list = []
large_query = 200
downstream_asns = db.bgp.distinct('as_path.1', {'nexthop_asn': asn, 'active': True})
downstream_asns = db['bgp'].distinct('as_path.1', {'nexthop_asn': asn, 'active': True})
for downstream in downstream_asns:
if len(downstream_asns) > large_query:
dns_name = "(LARGE QUERY - DNS LOOKUP DISABLED)"
Expand All @@ -112,7 +112,7 @@ def get_downstream_asns(asn):
def get_originated_prefixes(asn):
db = myStats.db
originated = []
prefixes = db.bgp.find({'origin_asn': asn, 'active': True})
prefixes = db['bgp'].find({'origin_asn': asn, 'active': True})
for prefix in prefixes:
originated.append(prefix['_id'])

Expand All @@ -129,7 +129,7 @@ def get_originated_prefixes_version(asn, version):
v = 4
if version.lower() == 'ipv6':
v = 6
prefixes = db.bgp.find({'origin_asn': asn, 'ip_version': v, 'active': True})
prefixes = db['bgp'].find({'origin_asn': asn, 'ip_version': v, 'active': True})
for prefix in prefixes:
originated.append(prefix['_id'])

Expand All @@ -143,7 +143,7 @@ def get_originated_prefixes_version(asn, version):
def get_nexthop_prefixes(asn):
db = myStats.db
nexthop = []
prefixes = db.bgp.find({'nexthop_asn': asn, 'active': True})
prefixes = db['bgp'].find({'nexthop_asn': asn, 'active': True})
for prefix in prefixes:
nexthop.append(prefix['_id'])

Expand All @@ -160,7 +160,7 @@ def get_nexthop_prefixes_version(asn, version):
v = 4
if version.lower() == 'ipv6':
v = 6
prefixes = db.bgp.find({'nexthop_asn': asn, 'ip_version': v, 'active': True})
prefixes = db['bgp'].find({'nexthop_asn': asn, 'ip_version': v, 'active': True})
for prefix in prefixes:
nexthop.append(prefix['_id'])

Expand All @@ -173,7 +173,7 @@ def get_nexthop_prefixes_version(asn, version):
@app.route('/bgp/api/v1.0/asn/<int:asn>/transit', methods=['GET'])
def get_transit_prefixes(asn):
db = myStats.db
all_asns = db.bgp.find({'active': True})
all_asns = db['bgp'].find({'active': True})
prefixes = []

for prefix in all_asns:
Expand Down Expand Up @@ -203,15 +203,15 @@ def get_domain(domain):
for ns in name_servers:
if org in ns.lower():
local_ns = ns.lower()
if local_ns is '':
if local_ns == '':
return jsonify({})
else:
domain_ip = str(dns_query(local_ns))
ip_data = get_ip_json(domain_ip)
asn = ip_data.get('origin_asn')
db = myStats.db
originated = []
prefixes = db.bgp.find({'origin_asn': asn, 'active': True})
prefixes = db['bgp'].find({'origin_asn': asn, 'active': True})
for prefix in prefixes:
originated.append(prefix['_id'])

Expand All @@ -229,8 +229,8 @@ def get_domain(domain):
myStats = Stats()
threading.Thread(target=myStats.update_stats).start()
threading.Thread(target=myStats.update_advanced_stats).start()
sched.add_job(myStats.update_stats, 'interval', seconds=5)
sched.add_job(myStats.update_advanced_stats, 'interval', seconds=60)
sched.add_job(myStats.update_stats, 'interval', seconds=60)
sched.add_job(myStats.update_advanced_stats, 'interval', seconds=300)
sched.start()

if __name__ == '__main__':
Expand Down
Loading