Commit 8a51a891 authored by Fabian Raab's avatar Fabian Raab 🇪🇺
Browse files

update +++ add inserting framework

parent 81a064a4
......@@ -23,6 +23,10 @@
{
"follow_symlinks": true,
"path": "."
},
{
"follow_symlinks": true,
"path": "/media/osshare-crypt/git/bgp-comm-db"
}
]
}
......@@ -13,11 +13,30 @@ import logging
import configparser
import os
# from . import csv_insert
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.orm import scoped_session, sessionmaker, Session
import sqlalchemy.engine.url
from .db.std_comm import StdCommRangeLocalPref
from .db import *
from .bgpdump import *
from .listinserts import *
class SessionManager(object):
"""docstring for SessionManager"""
def __init__(self, **dbconfig):
super(SessionManager).__init__()
self.engine = create_engine(sqlalchemy.engine.url.URL(**dbconfig))
def __enter__(self):
self.Session = sessionmaker(bind=engine)
return self.Session
def __exit__(self, exc_type, exc_val, exc_tb):
Session.close_all()
if __name__ == '__main__':
......@@ -38,19 +57,14 @@ if __name__ == '__main__':
level=logging.DEBUG)
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
# CSV
# csv_insert.insert_csv_as(
# os.path.realpath(proj_dir +
# "/../bgp-comm-db/data-ases/autnums-cut-new-4.txt"),
# mysql_db_configs)
# DB TEST
dbconfig = dict(config.items('client'))
engine = create_engine(sqlalchemy.engine.url.URL(**dbconfig))
Session = sessionmaker(bind=engine)
session = Session()
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
# session = Session()
# as1 = Ases(asn=56, name="Sensei")
......@@ -86,9 +100,32 @@ if __name__ == '__main__':
# print("Anno:" + str(anno1))
## Geographic
# Geographic
# geo1 = Geographic(continent=0b1,country='DE')
# session.add(geo1)
# print(geo1)
geo1 = Geographic(continent=0b1,country='DE')
# parser TEST
# parse_csv_open("/media/osshare-crypt/Datenbanken/bgpdump-test/rib.20150101.0200-mini.txt")
# CSV
session.add(geo1)
print(geo1)
inserts = {'ases':
dict(csv_file=os.path.realpath(
proj_dir + "/../bgp-comm-db/data/ases/autnums-cut-new-4.txt"),
**CsvInsert.config_as),
'continents':
dict(csv_file=os.path.realpath(
proj_dir + "/../bgp-comm-db/data/continents.csv"),
**CsvInsert.config_continets),
'countries':
dict(csv_file=os.path.realpath(
proj_dir + "/../bgp-comm-db/data/countries.csv"),
**CsvInsert.config_countries)
}
with CsvInsert(**inserts['continents']) as csvinsert:
csvinsert.insert()
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
\ No newline at end of file
"""
"""
from .parser import *
\ No newline at end of file
"""
"""
# 0: TABLE_DUMP2 or TABLE_DUMP2
# -H mode=0 multi-line, human-readable (the default)\n\
# -m mode=1 one-line per entry with unix timestamps\n\
# -M mode=2 one-line per entry with human readable timestamps\n\
# TABLE_DUMP:
# 1: time
# 2: 'A':Human Readable 'B': Unix timestamps
# 3: Peer IP
# 4: Peer AS (Without 'AS' prefix)
# 5: [Prefix]/[Prefix-Length]
# 6: AS Path (Space separated List (Without 'AS' prefix))
# 7: Origin: 'IGP', 'EGP', 'INCOMPLETE'
# 8: nexthop
# 9: LocalPref (npref)
# 10: MED (nmed)
# 11: Communities (space separated AS list (Without 'AS' prefix))
# 12: (Atomic) aggregate - 'AG' or 'NAG'
# 13: Aggregator AS
# 14: Aggregator Address (IP)
# 15: Empty
# TABLE_DUMP2:
# 1: time
# 2: 'A':Human Readable 'B': Unix timestamps
# 3: Peer IP
# 4: Peer AS
# 5: [Prefix]/[Prefix-Length]
# 6: AS Path (Space separated List (Without 'AS' prefix))
# 7: Origin: 'IGP', 'EGP', 'INCOMPLETE'
# 8: nexthop
# 9: LocalPref (npref)
# 10: MED (nmed)
# 11: Communities (space separated AS list (Without 'AS' prefix))
# 12: (Atomic) aggregate - 'AG' or 'NAG'
# 13: Aggregator AS
# 14: Aggregator Address (IP)
import csv
from ..annotation.typed import (typechecked)
def parse_csv_open(filepath):
with open(filepath, mode='r', buffering=-1, encoding=None, errors=None,
newline=None, closefd=True) as file:
parse_csv(file)
def parse_csv(file):
reader = csv.DictReader(file, fieldnames=('version', 'time', 'timeformat',
'peer_ip', 'peer_as', 'prefix',
'aspath', 'origin', 'nexthop',
'localpref', 'med', 'communities',
'aggregate', 'aggregator_as',
'aggregator_ip'),
delimiter='|', skipinitialspace=True)
for i, row in enumerate(reader):
row['communities'] = row['communities'].split()
print(row['communities'])
......@@ -19,9 +19,24 @@ from sqlalchemy.dialects.mysql import \
NUMERIC, NVARCHAR, REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, \
TINYBLOB, TINYINT, TINYTEXT, VARBINARY, VARCHAR, YEAR
from ..base import Base
# import os, configparser
# from sqlalchemy import create_engine
# from sqlalchemy.orm import sessionmaker, Session
# import sqlalchemy.engine.url
Base = declarative_base()
# proj_dir = os.path.abspath(
# os.path.dirname(os.path.realpath(__file__)) + "/../..")
# print(proj_dir)
# config = configparser.ConfigParser()
# config.read(proj_dir + '/config/config.ini')
# config.set('PATHS', 'proj_dir', proj_dir)
# config.read(config.get('PATHS', 'mysql_db_config'))
# dbconfig = dict(config.items('client'))
# engine = create_engine(sqlalchemy.engine.url.URL(**dbconfig))
# Session = sessionmaker(bind=engine)
class Scope(Base):
......@@ -117,6 +132,7 @@ class Geographic(Scope):
return self._continent
@continent.setter
@typechecked
def continent(self, value: union(int, str)) -> void:
rfc4384 = [(0b00001, 'AF'),
......
......@@ -8,7 +8,8 @@ __all__ = ['StdComm', 'StdCommRange', 'StdCommField']
from ..annotation.typed import (typechecked, void, optional)
from .scope import Base, Scope, Ases
from ..base import Base
from .scope import Scope, Ases
from abc import ABCMeta
from sqlalchemy.ext.declarative import AbstractConcreteBase
from sqlalchemy.ext.associationproxy import association_proxy
......@@ -57,8 +58,6 @@ class StdCommRange(StdComm):
asn = Column(INTEGER, ForeignKey('as.asn'))
from_val = Column('from', SMALLINT, nullable=False)
to_val = Column('to', SMALLINT)
_discriminator = Column('discriminator', ENUM('std_comm', 'localpref'),
nullable=False)
reference = Column(TEXT)
date = Column(DATE)
observed_only = Column(BOOLEAN, nullable=False, server_default='1')
......@@ -106,8 +105,6 @@ class StdCommField(StdComm):
'std_id', BIGINT, ForeignKey('std_comm.id'), nullable=False)
offset = Column(SMALLINT, nullable=False)
position = Column(TINYINT, nullable=False)
_discriminator = Column('discriminator', ENUM('std_field', 'localpref'),
nullable=False)
std_comm = relationship(StdCommRange,
backref=backref('fields', lazy='subquery'))
......
"""
"""
from ._csv import *
\ No newline at end of file
"""
Inserts into the db from various csv files.
"""
import csv
from ..annotation.typed import (typechecked, union)
from ..db import Ases, Geographic
from ..progressbar import (ProgressBar, Bar, FormatLabel, Percentage)
from sqlalchemy.orm import *
# from sqlalchemy.orm import Session
# from ..__main__ import SSession
class CsvInsert(object):
"""docstring for CsvInsert"""
def __init__(self,csv_file: str, classname: type, comment_field: str,
maxval: int, fields: tuple, reader_args: dict):
super(CsvInsert, self).__init__()
self.csv_file = csv_file
self.classname = classname
self.comment_field = comment_field
self.maxval = maxval
self.fields = fields
self.map = map
self.reader_args = reader_args
self.session = None
def __enter__(self):
self.session = Session()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.session.rollback()
else:
self.session.commit()
return
config_as = {
'classname': Ases,
'fields': (lambda s: {'asn': s[2:].strip()}, # `2:` remove "AS" prefix
lambda s: {'name': s},
lambda s: {'nicename': s}),
'reader_args': {'delimiter': ';', 'skipinitialspace': True},
'comment_field': 0,
'maxval': 72324,
}
config_countries = {
'classname': Geographic,
'fields': (lambda s: {'country': s},
lambda s: {'continent': s}),
'reader_args': {'delimiter': ',', 'skipinitialspace': True},
'comment_field': 0,
'maxval': 250,
}
config_continets = {
'classname': Geographic,
'fields': (lambda s: {'continent': s},),
'reader_args': {'delimiter': ';', 'skipinitialspace': True},
'comment_field': 0,
'maxval': 7,
}
@typechecked
def insert(self) -> type(None):
"""
Insert something from a csv list
"""
widgets = [FormatLabel('Inserting %(comment)s: '), Percentage(), Bar()]
pbar = ProgressBar(widgets=widgets, maxval=self.maxval).start()
with open(self.csv_file, mode='r', closefd=True) as file:
reader = csv.reader(file, **self.reader_args)
i = 0
for i, row in enumerate(reader):
pbar.update(i + 1, comment=row[self.comment_field])
args = dict()
for j, func in enumerate(self.fields):
args.update(func(row[j]))
obj = self.classname(**args)
self.session.add(obj)
pbar.clear(str(i) + " Objects inserted")
"""
Inserts into the db from various csv files.
"""
import csv
from .annotation.typed import (typechecked)
from .db.tables import Scope, Ases
from .progressbar import (ProgressBar, Bar, FormatLabel, Percentage)
COUNTRY_CONTINENT = {
'A1': '--', 'A2': '--', 'AD': 'EU', 'AE': 'AS', 'AF': 'AS', 'AG': 'NA',
'AI': 'NA', 'AL': 'EU', 'AM': 'AS', 'AN': 'NA', 'AO': 'AF', 'AP': 'AS', 'AQ': 'AN',
......@@ -44,33 +35,4 @@ COUNTRY_CONTINENT = {
'TW': 'AS', 'TZ': 'AF', 'UA': 'EU', 'UG': 'AF', 'UM': 'OC', 'US': 'NA', 'UY': 'SA',
'UZ': 'AS', 'VA': 'EU', 'VC': 'NA', 'VE': 'SA', 'VG': 'NA', 'VI': 'NA', 'VN': 'AS',
'VU': 'OC', 'WF': 'OC', 'WS': 'OC', 'YE': 'AS', 'YT': 'AF', 'ZA': 'AF', 'ZM': 'AF',
'ZW': 'AF', }
@typechecked
def insert_csv_as(csv_file: str, mysql_db_configs: list) -> type(None):
"""
Insert AS from a csv list
"""
widgets = [FormatLabel('Inserting %(comment)s: '), Percentage(), Bar()]
pbar = ProgressBar(widgets=widgets, maxval=72324).start()
with open(csv_file, mode='r', buffering=-1, encoding=None, errors=None,
newline=None, closefd=True) as file:
reader = csv.DictReader(file, fieldnames=('asn', 'name', 'nicename'),
delimiter=';', skipinitialspace=True)
i = 0
for i, row in enumerate(reader):
ases = Ases(None,
# `2:` remove "AS" prefix
asn=int(row['asn'][2:].strip()),
name=row['name'],
nicename=row['nicename'])
del ases
pbar.update(i + 1, comment=row['asn'])
pbar.clear(str(i)+" ASes inserted")
'ZW': 'AF', }
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment