Compare commits
No commits in common. 'feature/dualtest' and 'master' have entirely different histories.
feature/du
...
master
@ -1,65 +0,0 @@
|
|||||||
import core # Nexus core
|
|
||||||
import argparse, sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
def run(args, name="nexus"):
|
|
||||||
parser = argparse.ArgumentParser(description="Nexus daemon")
|
|
||||||
parser.add_argument("-c", "--config", dest="config_file", metavar="PATH", help="specifies the configuration file to use", default="config.yaml")
|
|
||||||
parser.add_argument("-d", "--debug", dest="debug_mode", action="store_true", help="enables debugging mode", default=False)
|
|
||||||
arguments = parser.parse_args(args)
|
|
||||||
|
|
||||||
if arguments.debug_mode == True:
|
|
||||||
loglevel = logging.DEBUG
|
|
||||||
else:
|
|
||||||
loglevel = logging.INFO
|
|
||||||
|
|
||||||
logformat = "[%(appname)s] %(asctime)s - %(levelname)s - %(message)s"
|
|
||||||
logging.basicConfig(level=loglevel, format=logformat)
|
|
||||||
|
|
||||||
logging.info("Application started", extra={"appname":name})
|
|
||||||
|
|
||||||
try:
|
|
||||||
config = core.config.ConfigurationReader(arguments.config_file)
|
|
||||||
except IOError, e:
|
|
||||||
sys.stderr.write("Failed to read configuration file (%s).\nCreate a configuration file that Nexus can access, or specify a different location using the -c switch.\n" % e.strerror)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
logging.info("Read configuration file at %s" % arguments.config_file, extra={"appname":name})
|
|
||||||
|
|
||||||
# Connect to node database
|
|
||||||
database = core.db.Database(config.database)
|
|
||||||
database.setup()
|
|
||||||
node_table = database.get_memory_table("nodes")
|
|
||||||
|
|
||||||
logging.info("Connected to database at %s" % config.database, extra={"appname":name})
|
|
||||||
|
|
||||||
# Read bootstrap/override node data
|
|
||||||
for uuid, node in config.nodes.iteritems():
|
|
||||||
existing_rows = [dbnode for rowid, dbnode in node_table.data.iteritems() if dbnode['uuid'] == uuid]
|
|
||||||
|
|
||||||
if len(existing_rows) > 0:
|
|
||||||
if node['override'] == True:
|
|
||||||
row = existing_rows[0]
|
|
||||||
row['uuid'] = uuid
|
|
||||||
row['host'] = node['host']
|
|
||||||
row['port'] = node['port']
|
|
||||||
row['pubkey'] = node['pubkey']
|
|
||||||
row['presupplied'] = 1
|
|
||||||
row['attributes'] = 0
|
|
||||||
row.commit()
|
|
||||||
logging.info("Updated data in database for node using configuration file (%s:%s, %s)" % (node['host'], node['port'], uuid), extra={"appname":name})
|
|
||||||
else:
|
|
||||||
pass # Already exists and no override flag set, ignore
|
|
||||||
else:
|
|
||||||
row = core.db.Row()
|
|
||||||
row['uuid'] = uuid
|
|
||||||
row['host'] = node['host']
|
|
||||||
row['port'] = node['port']
|
|
||||||
row['pubkey'] = node['pubkey']
|
|
||||||
row['presupplied'] = 1
|
|
||||||
row['attributes'] = 0
|
|
||||||
database['nodes'].append(row)
|
|
||||||
logging.info("Learned about new node from configuration file, inserted into database (%s:%s, %s)" % (node['host'], node['port'], uuid), extra={"appname":name})
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
run(sys.argv[1:])
|
|
@ -1 +0,0 @@
|
|||||||
import config, db, util
|
|
@ -1,117 +0,0 @@
|
|||||||
import yaml, glob, os, logging
|
|
||||||
|
|
||||||
from util import dict_combine_recursive
|
|
||||||
|
|
||||||
class ConfigurationError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ConfigurationReader(object):
|
|
||||||
def __init__(self, file_):
|
|
||||||
self.sources = []
|
|
||||||
self.configdata = self.read_config(file_)
|
|
||||||
self.config_includes(self.configdata)
|
|
||||||
|
|
||||||
self.process_config(self.configdata)
|
|
||||||
|
|
||||||
def read_config(self, file_):
|
|
||||||
try:
|
|
||||||
# File-like object
|
|
||||||
data = file_.read()
|
|
||||||
self.sources.append(":local:")
|
|
||||||
except AttributeError, e:
|
|
||||||
# Filename
|
|
||||||
f = open(file_, "r")
|
|
||||||
data = f.read()
|
|
||||||
f.close()
|
|
||||||
self.sources.append(file_)
|
|
||||||
|
|
||||||
return yaml.safe_load(data)
|
|
||||||
|
|
||||||
def process_config(self, configdata):
|
|
||||||
self.config_identity(configdata)
|
|
||||||
self.config_nodes(configdata)
|
|
||||||
self.config_package_settings(configdata)
|
|
||||||
|
|
||||||
def config_identity(self, configdata):
|
|
||||||
try:
|
|
||||||
self.uuid = configdata['self']['uuid']
|
|
||||||
logging.debug("Own UUID is %s" % self.uuid)
|
|
||||||
except KeyError, e:
|
|
||||||
raise ConfigurationError("A UUID for the node ('self') must be specified.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.pubkey = configdata['self']['pubkey']
|
|
||||||
logging.debug("Own pubkey is %s" % self.pubkey)
|
|
||||||
except KeyError, e:
|
|
||||||
raise ConfigurationError("You must specify a public key for this node.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.privkey = configdata['self']['privkey']
|
|
||||||
logging.debug("Own privkey is %s" % self.privkey)
|
|
||||||
except KeyError, e:
|
|
||||||
raise ConfigurationError("You must specify a private key for this node.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.database = configdata['self']['database']
|
|
||||||
except KeyError, e:
|
|
||||||
self.database = "node.db"
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.port = configdata['self']['port']
|
|
||||||
except KeyError, e:
|
|
||||||
self.port = 3009
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.bound_ip = configdata['self']['ip']
|
|
||||||
except KeyError, e:
|
|
||||||
self.bound_ip = "*"
|
|
||||||
|
|
||||||
logging.debug("Database location is %s" % self.database)
|
|
||||||
|
|
||||||
def config_nodes(self, configdata):
|
|
||||||
try:
|
|
||||||
self.nodes = configdata['nodes']
|
|
||||||
except KeyError, e:
|
|
||||||
self.nodes = {} # Optional
|
|
||||||
|
|
||||||
for uuid, node in self.nodes.iteritems():
|
|
||||||
if "host" not in node:
|
|
||||||
raise ConfigurationError("Hostname is missing for node %s." % uuid)
|
|
||||||
if "port" not in node:
|
|
||||||
raise ConfigurationError("Port is missing for node %s." % uuid)
|
|
||||||
if "pubkey" not in node:
|
|
||||||
raise ConfigurationError("Public key is missing for node %s." % uuid)
|
|
||||||
if "permissions" not in node:
|
|
||||||
node['permissions'] = [] # Optional
|
|
||||||
if "override" not in node:
|
|
||||||
node['override'] = False
|
|
||||||
|
|
||||||
logging.debug("Node %s : Hostname %s, port %s, pubkey %s, permissions %s" % (uuid, node["host"], node["port"], node["pubkey"], "|".join(node["permissions"])))
|
|
||||||
|
|
||||||
def config_package_settings(self, configdata):
|
|
||||||
try:
|
|
||||||
self.package_settings = configdata['package-settings']
|
|
||||||
|
|
||||||
for key in self.package_settings:
|
|
||||||
logging.debug("Package settings found for package %s" % key)
|
|
||||||
except KeyError, e:
|
|
||||||
self.package_settings = {} # Optional
|
|
||||||
|
|
||||||
def config_includes(self, configdata):
|
|
||||||
try:
|
|
||||||
include_list = configdata['include']
|
|
||||||
except KeyError, e:
|
|
||||||
return # Optional
|
|
||||||
|
|
||||||
try:
|
|
||||||
include_list.append
|
|
||||||
except:
|
|
||||||
include_list = [include_list]
|
|
||||||
|
|
||||||
for include in include_list:
|
|
||||||
for file_ in glob.iglob(os.path.expanduser(include)):
|
|
||||||
if file_ not in self.sources:
|
|
||||||
self.sources.append(file_)
|
|
||||||
includedata = self.read_config(file_)
|
|
||||||
self.configdata = dict_combine_recursive(self.configdata, includedata)
|
|
||||||
self.config_includes(includedata)
|
|
@ -1,298 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
|
|
||||||
class Database(object):
|
|
||||||
def __init__(self, dbpath = "node.db"):
|
|
||||||
self.conn = sqlite3.connect(dbpath)
|
|
||||||
self.conn.row_factory = Row
|
|
||||||
self.tables = {}
|
|
||||||
|
|
||||||
def _get_cursor(self):
|
|
||||||
return self.conn.cursor()
|
|
||||||
|
|
||||||
def _table_create_nodes(self):
|
|
||||||
self.query("CREATE TABLE IF NOT EXISTS nodes (`id` INTEGER PRIMARY KEY, `uuid` TEXT, `host` TEXT, `port` NUMERIC, `pubkey` TEXT, `presupplied` NUMERIC, `attributes` NUMERIC);")
|
|
||||||
|
|
||||||
def _get_table(self, name, in_memory=False, forced=False):
|
|
||||||
if in_memory == True:
|
|
||||||
try:
|
|
||||||
# Only create a new MemoryTable if one doesn't already exist
|
|
||||||
create_new = forced or not isinstance(self.tables[name], MemoryTable)
|
|
||||||
except KeyError, e:
|
|
||||||
create_new = True
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.tables[name]
|
|
||||||
create_new = forced or False
|
|
||||||
except KeyError, e:
|
|
||||||
create_new = True
|
|
||||||
|
|
||||||
if create_new == False:
|
|
||||||
return self.tables[name]
|
|
||||||
else:
|
|
||||||
if in_memory == True:
|
|
||||||
new_table = MemoryTable(self, name)
|
|
||||||
else:
|
|
||||||
new_table = DatabaseTable(self, name)
|
|
||||||
|
|
||||||
self.tables[name] = new_table
|
|
||||||
return new_table
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return self.get_database_table(key)
|
|
||||||
|
|
||||||
def get_database_table(self, name):
|
|
||||||
return self._get_table(name, in_memory=False)
|
|
||||||
|
|
||||||
def get_memory_table(self, name):
|
|
||||||
return self._get_table(name, in_memory=True)
|
|
||||||
|
|
||||||
def query(self, query, params = [], commit=False):
|
|
||||||
#print "QUERY: %s" % query
|
|
||||||
#print "PARAMS: %s" % repr(params)
|
|
||||||
# TODO: Query log
|
|
||||||
|
|
||||||
cur = self._get_cursor()
|
|
||||||
cur.execute(query, params)
|
|
||||||
|
|
||||||
if commit == True:
|
|
||||||
self.conn.commit()
|
|
||||||
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
self._table_create_nodes()
|
|
||||||
|
|
||||||
class Row(object):
|
|
||||||
def __init__(self, cursor=None, row=None):
|
|
||||||
self._commit_buffer = {}
|
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
if cursor is None and row is None:
|
|
||||||
self._new = True
|
|
||||||
else:
|
|
||||||
self._new = False
|
|
||||||
|
|
||||||
for index, column in enumerate(cursor.description):
|
|
||||||
self._data[column[0]] = row[index]
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return self._data[key]
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
self._commit_buffer[key] = value
|
|
||||||
|
|
||||||
def _clear_buffer(self):
|
|
||||||
self._commit_buffer = {}
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
# Commit to database
|
|
||||||
if len(self._commit_buffer) > 0:
|
|
||||||
statement_list = ", ".join("`%s` = ?" % key for key in self._commit_buffer.keys())
|
|
||||||
query = "UPDATE %s SET %s WHERE `id` = %s" % (self._nexus_table, statement_list, self['id']) # Not SQLi-safe!
|
|
||||||
self._nexus_db.query(query, params=self._commit_buffer.values(), commit=True)
|
|
||||||
|
|
||||||
# Update locally
|
|
||||||
for key, value in self._commit_buffer.iteritems():
|
|
||||||
self._data[key] = value
|
|
||||||
|
|
||||||
# Clear out commit buffer
|
|
||||||
self._clear_buffer()
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
self._clear_buffer()
|
|
||||||
|
|
||||||
class Table(object):
|
|
||||||
def __init__(self, database, table_name):
|
|
||||||
# You should never construct this directly!
|
|
||||||
self.db = database
|
|
||||||
self.table = table_name
|
|
||||||
|
|
||||||
def _process_insert(self, value, key=None):
|
|
||||||
if key is not None:
|
|
||||||
value['id'] = key
|
|
||||||
|
|
||||||
for column in self.columns:
|
|
||||||
if column != "id" and column not in value._commit_buffer.keys():
|
|
||||||
value._commit_buffer[column] = None
|
|
||||||
|
|
||||||
column_list = ", ".join("`%s`" % name for name in value._commit_buffer.keys())
|
|
||||||
sub_list = ", ".join("?" for name in value._commit_buffer.keys())
|
|
||||||
query = "INSERT INTO %s (%s) VALUES (%s)" % (self.table, column_list, sub_list) # Not SQLi-safe!
|
|
||||||
|
|
||||||
result = self.db.query(query, params=value._commit_buffer.values(), commit=True)
|
|
||||||
|
|
||||||
value._new = False
|
|
||||||
|
|
||||||
return result.lastrowid
|
|
||||||
|
|
||||||
def _try_set(self, key, value, cache):
|
|
||||||
if key in cache:
|
|
||||||
raise TypeError("A row with the given ID already exists. Either edit the existing one, or append a new row using append().")
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._process_insert(value, key)
|
|
||||||
except sqlite3.IntegrityError, e:
|
|
||||||
raise TypeError("A row with the given ID already exists. Either edit the existing one, or append a new row using append().")
|
|
||||||
|
|
||||||
def _set_column_names(self, names):
|
|
||||||
self._column_names = names
|
|
||||||
|
|
||||||
def _retrieve_column_names(self):
|
|
||||||
cur = self.db.query("SELECT * FROM %s WHERE 0" % self.table) # Not SQLi-safe!
|
|
||||||
self._set_column_names([x[0] for x in cur.description])
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
if name == "columns":
|
|
||||||
try:
|
|
||||||
return self._column_names
|
|
||||||
except AttributeError, e:
|
|
||||||
self._retrieve_column_names()
|
|
||||||
return self._column_names
|
|
||||||
else:
|
|
||||||
raise AttributeError("No such attribute exists")
|
|
||||||
|
|
||||||
def append(self, value):
|
|
||||||
return self._process_insert(value)
|
|
||||||
|
|
||||||
class DatabaseTable(Table):
|
|
||||||
def __init__(self, database, table_name):
|
|
||||||
Table.__init__(self, database, table_name)
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
def _process_insert(self, value, key=None):
|
|
||||||
rowid = Table._process_insert(self, value, key)
|
|
||||||
self._cache[rowid] = value
|
|
||||||
return rowid
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
try:
|
|
||||||
return self._cache[key]
|
|
||||||
except KeyError, e:
|
|
||||||
result = self.db.query("SELECT * FROM %s WHERE `id` = ?" % self.table, params=(key,))
|
|
||||||
self._set_column_names([x[0] for x in result.description])
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
raise KeyError("No row with that ID was found in the table.")
|
|
||||||
else:
|
|
||||||
row = result.fetchone()
|
|
||||||
row._nexus_db = self.db
|
|
||||||
row._nexus_table = self.table
|
|
||||||
row._nexus_type = "database"
|
|
||||||
self._cache[key] = row
|
|
||||||
return row
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
self._try_set(key, value, self._cache)
|
|
||||||
|
|
||||||
def purge(self):
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
class MemoryTable(Table):
|
|
||||||
def __init__(self, database, table_name):
|
|
||||||
Table.__init__(self, database, table_name)
|
|
||||||
self.data = {}
|
|
||||||
self._retrieve_data()
|
|
||||||
|
|
||||||
def _retrieve_data(self):
|
|
||||||
result = self.db.query("SELECT * FROM %s" % self.table) # Not SQLi-safe!
|
|
||||||
self._set_column_names([x[0] for x in result.description])
|
|
||||||
|
|
||||||
for row in result:
|
|
||||||
row._nexus_db = self.db
|
|
||||||
row._nexus_table = self.table
|
|
||||||
row._nexus_type = "memory"
|
|
||||||
self.data[row['id']] = row
|
|
||||||
|
|
||||||
def _process_insert(self, value, key=None):
|
|
||||||
rowid = Table._process_insert(self, value, key)
|
|
||||||
self.data[rowid] = value
|
|
||||||
self.data[rowid]._nexus_db = self.db
|
|
||||||
self.data[rowid]._nexus_table = self.table
|
|
||||||
self.data[rowid]._nexus_type = "memory"
|
|
||||||
return rowid
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return self.data[key]
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
self._try_set(key, value, self.data)
|
|
||||||
|
|
||||||
def refresh(self):
|
|
||||||
self.data = {}
|
|
||||||
self._retrieve_data()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Testing code
|
|
||||||
db = Database()
|
|
||||||
db.setup()
|
|
||||||
table = db.get_database_table("nodes")
|
|
||||||
#print table.columns
|
|
||||||
new_row = Row()
|
|
||||||
new_row['uuid'] = "abc"
|
|
||||||
new_row['host'] = "def"
|
|
||||||
new_row['port'] = 123
|
|
||||||
|
|
||||||
table.append(new_row)
|
|
||||||
#table[10] = new_row
|
|
||||||
|
|
||||||
|
|
||||||
#table[1]['uuid'] = "bleep"
|
|
||||||
#table[1]['host'] = "bloop"
|
|
||||||
#table[1].commit()
|
|
||||||
#table[1]['port'] = 1234
|
|
||||||
#table[1].commit()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# This is a complete mess. Looks like subclassing C extension stuff is a bad idea.
|
|
||||||
|
|
||||||
class Row(sqlite3.Row):
|
|
||||||
def __init__(self, cursor, row):
|
|
||||||
sqlite3.Row.__init__(self, cursor, row)
|
|
||||||
#super(sqlite3.Row, self).__init__(cursor, row) # This segfaults!
|
|
||||||
self._commit_buffer = {}
|
|
||||||
self._commit_data = {}
|
|
||||||
|
|
||||||
# Yes, this will make lookup slower. No real solution for this for now.
|
|
||||||
for key in self.keys():
|
|
||||||
self._commit_data[key] = self[key]
|
|
||||||
|
|
||||||
self.__getitem__ = self._nexus_getitem
|
|
||||||
|
|
||||||
def _nexus_getitem(self, key):
|
|
||||||
# FIXME: Currently, only when using dict access, the data modified through a commit will be accessible.
|
|
||||||
#try:
|
|
||||||
print "GET %s" % key
|
|
||||||
return self._commit_data[key]
|
|
||||||
#except KeyError, e:
|
|
||||||
# #return sqlite3.Row.__get__(self, key)
|
|
||||||
# print super(Row, self).__get__(key)
|
|
||||||
# return super(Row, self).__get__(key)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
self._commit_buffer[key] = value
|
|
||||||
|
|
||||||
def _clear_buffer(self):
|
|
||||||
self._commit_buffer = {}
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
# Commit to database
|
|
||||||
statement_list = ", ".join("`%s` = ?" % key for key in self._commit_buffer.keys())
|
|
||||||
query = "UPDATE %s SET %s WHERE `id` = %s" % (self._nexus_table, statement_list, self['id']) # Not SQLi-safe!
|
|
||||||
print query
|
|
||||||
print self._commit_buffer.values()
|
|
||||||
#self._nexus_db.query(query, params=self._commit_buffer.values())
|
|
||||||
|
|
||||||
# Update locally
|
|
||||||
for key, value in self._commit_buffer.iteritems():
|
|
||||||
self._commit_data[key] = value
|
|
||||||
|
|
||||||
# Clear out commit buffer
|
|
||||||
self._clear_buffer()
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
self._clear_buffer()
|
|
||||||
"""
|
|
@ -1,11 +0,0 @@
|
|||||||
def dict_combine_recursive(a, b):
|
|
||||||
# Based on http://stackoverflow.com/a/8725321
|
|
||||||
if a is None: return b
|
|
||||||
if b is None: return a
|
|
||||||
if isinstance(a, list) and isinstance(b, list):
|
|
||||||
return list(set(a + b))
|
|
||||||
elif isinstance(a, dict) and isinstance(b, dict):
|
|
||||||
keys = set(a.iterkeys()) | set(b.iterkeys())
|
|
||||||
return dict((key, dict_combine_recursive(a.get(key), b.get(key))) for key in keys)
|
|
||||||
else:
|
|
||||||
return b
|
|
File diff suppressed because one or more lines are too long
@ -1,217 +0,0 @@
|
|||||||
# Database abstraction layer
|
|
||||||
|
|
||||||
Nexus uses a custom abstraction layer for SQLite database operations. It
|
|
||||||
has a NoSQL-like API, meaning that objects work like dicts or lists
|
|
||||||
where possible and appropriate.
|
|
||||||
|
|
||||||
The abstraction layer can be used by importing `nexus.core.db`.
|
|
||||||
|
|
||||||
! All tables that this abstraction layer is used for, **must** have a
|
|
||||||
`ROWID` alias named `id`.
|
|
||||||
|
|
||||||
{TOC}
|
|
||||||
|
|
||||||
^ Database([**filename**])
|
|
||||||
|
|
||||||
Creates a new Database connection representing an SQLite
|
|
||||||
database with the given filename. If it does not exist, it is
|
|
||||||
created.
|
|
||||||
|
|
||||||
filename::
|
|
||||||
**Optional.** Filename of the SQLite database.
|
|
||||||
|
|
||||||
^ Database.setup()
|
|
||||||
|
|
||||||
Attempts to set up the tables needed for Nexus in the database.
|
|
||||||
If the tables already exist, nothing happens.
|
|
||||||
|
|
||||||
^ Database.query(**query**[, params=**params**])
|
|
||||||
|
|
||||||
Runs a custom query against the database, and returns an
|
|
||||||
sqlite3.cursor object, that you can use to retrieve the results
|
|
||||||
from (using .fetchone(), .fetchmany(**size**), or .fetchall()).
|
|
||||||
The cursor will return Row objects, like other functions in the
|
|
||||||
abstraction layer do.
|
|
||||||
|
|
||||||
You'll only need this if you need to run queries that aren't
|
|
||||||
covered by any of the other functions; the functions specified
|
|
||||||
in the abstraction layer are prefered for performance reasons.
|
|
||||||
|
|
||||||
^ Database[**table_name**]
|
|
||||||
Database.get_database_table(**table_name**)
|
|
||||||
|
|
||||||
Retrieves a DatabaseTable object representing the table in the
|
|
||||||
database with the specified table name. Table objects are reused
|
|
||||||
where appropriate, to minimize resource usage and state issues.
|
|
||||||
|
|
||||||
While a DatabaseTable does not immediately load all data from
|
|
||||||
the table like a MemoryTable does, it **does** retain an
|
|
||||||
internal cache of retrieved rows. You will want to .purge() this
|
|
||||||
cache regularly if you use the table a lot.
|
|
||||||
|
|
||||||
! The existence of a table is not checked. You need to make sure
|
|
||||||
that the table exists by yourself, if you cannot rely on this!
|
|
||||||
|
|
||||||
table_name::
|
|
||||||
The name of the table you wish to work with.
|
|
||||||
|
|
||||||
@ Accessing a database table
|
|
||||||
|
|
||||||
$ db = Database("test.db")
|
|
||||||
table = db["sample_table"]
|
|
||||||
|
|
||||||
@ Accessing a database table through the explicit function
|
|
||||||
|
|
||||||
$ db = Database("test.db")
|
|
||||||
table = db.get_database_table("sample_table")
|
|
||||||
|
|
||||||
^ Database.get_memory_table(**table_name**)
|
|
||||||
|
|
||||||
Retrieves a MemoryTable object representing the specified table.
|
|
||||||
A MemoryTable, upon creation, will immediately load all data
|
|
||||||
from the database table it represents, and keep it in memory. It
|
|
||||||
is reused where possible, just like a DatabaseTable.
|
|
||||||
|
|
||||||
A MemoryTable is intended to be used for cases where frequent
|
|
||||||
lookup of data in small tables is necessary, eliminating the
|
|
||||||
SQLite lookup overhead.
|
|
||||||
|
|
||||||
! The existence of a table is not checked. You need to make sure
|
|
||||||
that the table exists by yourself, if you cannot rely on this!
|
|
||||||
|
|
||||||
! If your database table is modified by another application or
|
|
||||||
instance, the data in your MemoryTable will be out of sync.
|
|
||||||
In this case, use a regular MemoryTable or .refresh() the data
|
|
||||||
frequently.
|
|
||||||
|
|
||||||
table_name::
|
|
||||||
The name of the table you wish to work with.
|
|
||||||
|
|
||||||
@ Accessing a database table and keeping it in memory
|
|
||||||
|
|
||||||
$ db = Database("test.db")
|
|
||||||
table = db.get_memory_table("sample_table")
|
|
||||||
|
|
||||||
^ DatabaseTable[**row_id**]
|
|
||||||
MemoryTable[**row_id**]
|
|
||||||
|
|
||||||
Retrieves a Row object representing the row in the table with
|
|
||||||
the specified identifier (in the `id` field). Data is retrieved
|
|
||||||
immediately.
|
|
||||||
|
|
||||||
row_id::
|
|
||||||
The identifier of the row to retrieve.
|
|
||||||
|
|
||||||
### Exceptions
|
|
||||||
|
|
||||||
KeyError::
|
|
||||||
Raised when a row with the given identifier does not
|
|
||||||
exist in the table.
|
|
||||||
|
|
||||||
^ DatabaseTable[**row_id**] = **row**
|
|
||||||
MemoryTable[**row_id**] = **row**
|
|
||||||
|
|
||||||
Inserts a new row into the database. This can **not** be used to
|
|
||||||
edit an existing row; to do so, edit the Row object for that row
|
|
||||||
directly.
|
|
||||||
|
|
||||||
If you do not want to explicitly specify a row identifier, use
|
|
||||||
the .append() method instead.
|
|
||||||
|
|
||||||
row_id::
|
|
||||||
The identifier to give the row in the database.
|
|
||||||
|
|
||||||
row::
|
|
||||||
A Row object representing the new row to insert.
|
|
||||||
|
|
||||||
### Exceptions
|
|
||||||
|
|
||||||
TypeError::
|
|
||||||
Raised when a row with the given identifier already
|
|
||||||
exists.
|
|
||||||
|
|
||||||
^ DatabaseTable.append(**row**)
|
|
||||||
MemoryTable.append(**row**)
|
|
||||||
|
|
||||||
Inserts a new row into the table, and lets SQLite assign it an
|
|
||||||
identifier. This is the method you'll usually want to use.
|
|
||||||
|
|
||||||
row::
|
|
||||||
A Row object representing the new row to insert.
|
|
||||||
|
|
||||||
^ DatabaseTable.purge()
|
|
||||||
|
|
||||||
Purges the internal row cache of a DatabaseTable.
|
|
||||||
|
|
||||||
! You cannot use this method for a MemoryTable! Use .refresh()
|
|
||||||
instead.
|
|
||||||
|
|
||||||
^ MemoryTable.refresh()
|
|
||||||
|
|
||||||
Replaces the current copy of the table in memory, with a newly
|
|
||||||
retrieved copy. You'll need to call this regularly when you
|
|
||||||
have multiple applications modifying the same table, to prevent
|
|
||||||
going out of sync. If synchronized data is absolutely essential
|
|
||||||
at all times, use a DatabaseTable instead.
|
|
||||||
|
|
||||||
! You cannot use this method for a DatabaseTable! Use .purge()
|
|
||||||
instead.
|
|
||||||
|
|
||||||
^ Row()
|
|
||||||
|
|
||||||
Creates a new Row object. You'll only need to use this if you
|
|
||||||
want to insert a new row into a table.
|
|
||||||
|
|
||||||
You do not need to immediately specify a table name or row data.
|
|
||||||
Instead, you can just set column values on the Row object after
|
|
||||||
creating it, and tell it what table to be inserted to by using
|
|
||||||
any of the insertion methods on a DatabaseTable or MemoryTable.
|
|
||||||
|
|
||||||
^ Row[**column_name**]
|
|
||||||
|
|
||||||
Returns the value of the specified column in the row.
|
|
||||||
|
|
||||||
column_name::
|
|
||||||
The column whose value you wish to retrieve.
|
|
||||||
|
|
||||||
### Exceptions
|
|
||||||
|
|
||||||
KeyError::
|
|
||||||
Returned when there is no such column in the table that
|
|
||||||
the row belongs to.
|
|
||||||
|
|
||||||
^ Row[**column_name**] = **value**
|
|
||||||
|
|
||||||
Sets (or changes) the data for the given column in the row.
|
|
||||||
|
|
||||||
! The change is not immediately reflected in the database (or
|
|
||||||
memory table). To apply your changes, you need to .commit().
|
|
||||||
|
|
||||||
! This does not check whether such a column exists! If you
|
|
||||||
specify an invalid column name, the data will simply never be
|
|
||||||
inserted.
|
|
||||||
|
|
||||||
column_name::
|
|
||||||
The column whose value you wish to set or change.
|
|
||||||
|
|
||||||
value::
|
|
||||||
The value to set the column to.
|
|
||||||
|
|
||||||
^ Row.commit()
|
|
||||||
|
|
||||||
Process all changes you have made to the column data for the
|
|
||||||
row. This will run one or more SQL queries.
|
|
||||||
|
|
||||||
You don't need to do this when inserting a new row; the
|
|
||||||
insertion methods for the table will do this for you
|
|
||||||
automatically.
|
|
||||||
|
|
||||||
^ Row.rollback()
|
|
||||||
|
|
||||||
Cancels all the changes you have made to the column data for the
|
|
||||||
row, and returns it to the original state. The "original state"
|
|
||||||
will be state the row was in when you last retrieved or
|
|
||||||
committed it.
|
|
||||||
|
|
||||||
Note that this will only work with uncommitted changes; after
|
|
||||||
you commit a change, it is final and not reversible.
|
|
@ -1,26 +0,0 @@
|
|||||||
#!/usr/bin/env python2
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def usage():
|
|
||||||
print "Specify a valid action.\nPossible actions: start, stop, add-node, reload-config, reload-packages"
|
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
usage()
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
if sys.argv[1] == "start":
|
|
||||||
import application
|
|
||||||
application.run(sys.argv[2:], "blah")
|
|
||||||
elif sys.argv[1] == "stop":
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
usage()
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
parser = argparse.ArgumentParser(description="Nexus control application")
|
|
||||||
|
|
||||||
config = core.config.ConfigReader("")
|
|
||||||
"""
|
|
@ -1,45 +0,0 @@
|
|||||||
import sys
|
|
||||||
import parser.rulebook
|
|
||||||
|
|
||||||
# TODO: Keep trail of message travelling through the rules
|
|
||||||
|
|
||||||
f = open(sys.argv[1])
|
|
||||||
rulebook = f.read()
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
bins = parser.rulebook.parse(rulebook)
|
|
||||||
|
|
||||||
class Message(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.id_ = ""
|
|
||||||
self.type_ = "none"
|
|
||||||
self.tags = []
|
|
||||||
self.source = ""
|
|
||||||
self.chain = []
|
|
||||||
self.data = {}
|
|
||||||
|
|
||||||
def set_data(self, data):
|
|
||||||
self.id_ = data['id']
|
|
||||||
self.type_ = data['type']
|
|
||||||
self.tags = data['tags']
|
|
||||||
self.source = data['source']
|
|
||||||
self.chain = data['chain']
|
|
||||||
self.data = data['payload']
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
m = Message()
|
|
||||||
m.set_data({
|
|
||||||
"id": "qwert-yuiop-61238-10842",
|
|
||||||
"type": "task",
|
|
||||||
"tags": ["convert", "mpeg"],
|
|
||||||
"source": "abcde-fghij-00000-00008",
|
|
||||||
"chain": ["abcde-fghij-00000-00005", "abcde-fghij-00000-00006"],
|
|
||||||
"payload": {
|
|
||||||
"command": "convert",
|
|
||||||
"category": "video",
|
|
||||||
"original_filetype": "mpg"
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
bins['remote'].process(m)
|
|
@ -1 +0,0 @@
|
|||||||
|
|
@ -1,12 +0,0 @@
|
|||||||
NONE = 0
|
|
||||||
AND = 1
|
|
||||||
OR = 2
|
|
||||||
|
|
||||||
EQUALS = 3
|
|
||||||
NOT_EQUALS = 4
|
|
||||||
LESS_THAN = 5
|
|
||||||
MORE_THAN = 6
|
|
||||||
LESS_THAN_OR_EQUALS = 7
|
|
||||||
MORE_THAN_OR_EQUALS = 8
|
|
||||||
HAS = 9
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
|||||||
class ParserException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class MissingRootElementError(ParserException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ParsingSyntaxError(ParserException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class RulebookIndentationError(ParsingSyntaxError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class MissingParenthesesError(ParsingSyntaxError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class InvalidOperatorError(ParsingSyntaxError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class EvaluationError(ParserException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ScopeError(EvaluationError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class AttributeNameError(EvaluationError):
|
|
||||||
pass
|
|
@ -1,339 +0,0 @@
|
|||||||
from constants import *
|
|
||||||
from itertools import groupby
|
|
||||||
from collections import defaultdict
|
|
||||||
from exceptions import *
|
|
||||||
|
|
||||||
def parse(input_):
|
|
||||||
# Rules:
|
|
||||||
# Boolean 'and' has precedence over 'or'
|
|
||||||
# Enclosure in parentheses means creating a new FilterExpressionGroup
|
|
||||||
# Having 'and' and 'or' operators in the same group means a new FilterExpressionGroup is created for every 'and' chain, to retain precedence
|
|
||||||
# Variable accessors are prefixed with $
|
|
||||||
# Strings are enclosed in "quotes"
|
|
||||||
|
|
||||||
rule_length = len(input_)
|
|
||||||
idx = 0
|
|
||||||
buff = ""
|
|
||||||
in_expression = False
|
|
||||||
current_element = {}
|
|
||||||
element_list = defaultdict(list)
|
|
||||||
operator_list = defaultdict(list)
|
|
||||||
current_depth = 0
|
|
||||||
|
|
||||||
while idx < rule_length:
|
|
||||||
char = input_[idx]
|
|
||||||
|
|
||||||
if char == "(" and in_expression == False:
|
|
||||||
# New group encountered
|
|
||||||
#print "START GROUP %d" % current_depth
|
|
||||||
current_depth += 1
|
|
||||||
elif char == ")" and in_expression == False:
|
|
||||||
# End statement, Process list of elements
|
|
||||||
element_list[current_depth].append(create_filter_expression(buff))
|
|
||||||
# Add elements to group object
|
|
||||||
group = create_group(element_list[current_depth], operator_list[current_depth])
|
|
||||||
element_list[current_depth - 1].append(group)
|
|
||||||
|
|
||||||
element_list[current_depth] = []
|
|
||||||
operator_list[current_depth] = [] # Clear out lists to prevent working with stale data
|
|
||||||
|
|
||||||
#print "-- GR: %s" % group
|
|
||||||
buff = ""
|
|
||||||
current_depth -= 1
|
|
||||||
#print "END GROUP %d" % current_depth
|
|
||||||
elif char == '"':
|
|
||||||
in_expression = not in_expression
|
|
||||||
buff += '"'
|
|
||||||
elif not in_expression and char == "o" and idx + 2 < rule_length and input_[idx+1:idx+2] == "r" and len(buff) > 0 and (buff[-1] == " " or buff[-1] == ")"):
|
|
||||||
# End statement, Boolean OR
|
|
||||||
if buff.strip() != "":
|
|
||||||
element_list[current_depth].append(create_filter_expression(buff))
|
|
||||||
operator_list[current_depth].append(OR)
|
|
||||||
buff = ""
|
|
||||||
idx += 1 # We read ahead one position extra
|
|
||||||
elif not in_expression and char == "a" and idx + 3 < rule_length and input_[idx+1:idx+3] == "nd" and len(buff) > 0 and (buff[-1] == " " or buff[-1] == ")"):
|
|
||||||
# End statement, Boolean AND
|
|
||||||
if buff.strip() != "":
|
|
||||||
element_list[current_depth].append(create_filter_expression(buff))
|
|
||||||
operator_list[current_depth].append(AND)
|
|
||||||
buff = ""
|
|
||||||
idx += 2 # We read ahead two positions extra
|
|
||||||
else:
|
|
||||||
buff += char
|
|
||||||
|
|
||||||
idx += 1
|
|
||||||
|
|
||||||
if current_depth > 0:
|
|
||||||
raise MissingParenthesesError("Missing %d closing parenthese(s)." % current_depth)
|
|
||||||
elif current_depth < 0:
|
|
||||||
raise MissingParenthesesError("Missing %d opening parenthese(s)." % (0 - current_depth))
|
|
||||||
|
|
||||||
# If there's anything left in the buffer, it's probably a statement we still need to process.
|
|
||||||
if buff.strip() != "":
|
|
||||||
element_list[current_depth].append(create_filter_expression(buff))
|
|
||||||
|
|
||||||
if len(element_list[current_depth]) > 1:
|
|
||||||
# Multiple elements, need to encapsulate in a group
|
|
||||||
root_element = create_group(element_list[current_depth], operator_list[current_depth])
|
|
||||||
elif len(element_list[current_depth]) == 1:
|
|
||||||
root_element = element_list[current_depth][0]
|
|
||||||
else:
|
|
||||||
raise MissingRootElementError("No root element could be determined in the expression.")
|
|
||||||
|
|
||||||
return root_element
|
|
||||||
|
|
||||||
def create_group(elements, operators):
|
|
||||||
group = FilterExpressionGroup()
|
|
||||||
|
|
||||||
# Process operators
|
|
||||||
if len(elements) > 1:
|
|
||||||
# Check if the operators vary
|
|
||||||
operator_discrepancy = (len(set(operators)) > 1)
|
|
||||||
|
|
||||||
if operator_discrepancy:
|
|
||||||
# We'll need to find the 'and' chains and push them into separate child groups
|
|
||||||
idx = 0
|
|
||||||
sieve = [True for x in xrange(0, len(elements))]
|
|
||||||
final_list = []
|
|
||||||
|
|
||||||
for operator, items in groupby(operators):
|
|
||||||
items = list(items)
|
|
||||||
|
|
||||||
start = idx
|
|
||||||
end = idx + len(items) + 1
|
|
||||||
relevant_elements = elements[start:end]
|
|
||||||
|
|
||||||
if operator == AND:
|
|
||||||
for i in xrange(start, end):
|
|
||||||
# Mark as processed
|
|
||||||
sieve[i] = False
|
|
||||||
for i in [x for x in xrange(0, end) if sieve[x] is True]:
|
|
||||||
final_list.append(elements[i])
|
|
||||||
sieve[i] = False
|
|
||||||
final_list.append(create_group(relevant_elements, [AND for x in xrange(0, end - start)]))
|
|
||||||
|
|
||||||
idx += len(items)
|
|
||||||
|
|
||||||
# Add the remaining OR items after the last AND chain
|
|
||||||
for i in [x for x in xrange(0, len(elements)) if sieve[x] is True]:
|
|
||||||
final_list.append(elements[i])
|
|
||||||
|
|
||||||
for element in final_list:
|
|
||||||
group.add(element)
|
|
||||||
|
|
||||||
group.relation = OR # Hardcoded, because all AND chains are taken care of above...
|
|
||||||
else:
|
|
||||||
for element in elements:
|
|
||||||
group.add(element)
|
|
||||||
group.relation = operators[0]
|
|
||||||
else:
|
|
||||||
group.add(elements[0])
|
|
||||||
|
|
||||||
return group
|
|
||||||
|
|
||||||
def create_filter_expression(buff):
|
|
||||||
# TODO: Use shlex split because of spaces in strings?
|
|
||||||
left, operator, right = [x.strip() for x in buff.split(None, 2)]
|
|
||||||
|
|
||||||
if left[0] == '"' and left[-1] == '"':
|
|
||||||
left_obj = FilterExpressionString(left[1:-1])
|
|
||||||
elif left[0] == "$":
|
|
||||||
if "[" in left[1:] and left[-1] == "]":
|
|
||||||
name, scope = left[1:-1].split("[", 1)
|
|
||||||
else:
|
|
||||||
name = left[1:]
|
|
||||||
scope = None
|
|
||||||
|
|
||||||
left_obj = FilterExpressionVariable(name, scope)
|
|
||||||
else:
|
|
||||||
raise InvalidOperandError("Unrecognized operand type.") # No other types supported yet...
|
|
||||||
|
|
||||||
if right[0] == '"' and right[-1] == '"':
|
|
||||||
right_obj = FilterExpressionString(right[1:-1])
|
|
||||||
elif right[0] == "$":
|
|
||||||
if "[" in right[1:] and right[-1] == "]":
|
|
||||||
name, scope = right[1:-1].split("[", 1)
|
|
||||||
else:
|
|
||||||
name = right[1:]
|
|
||||||
scope = None
|
|
||||||
|
|
||||||
right_obj = FilterExpressionVariable(name, scope)
|
|
||||||
else:
|
|
||||||
raise InvalidOperandError("Unrecognized operand type.") # No other types supported yet...
|
|
||||||
|
|
||||||
operators = {
|
|
||||||
"=": EQUALS,
|
|
||||||
"==": EQUALS,
|
|
||||||
"!=": NOT_EQUALS,
|
|
||||||
">": MORE_THAN,
|
|
||||||
"<": LESS_THAN,
|
|
||||||
">=": MORE_THAN_OR_EQUALS,
|
|
||||||
"<=": LESS_THAN_OR_EQUALS,
|
|
||||||
"has": HAS
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
operator_type = operators[operator]
|
|
||||||
except KeyError, e:
|
|
||||||
raise InvalidOperatorError("Unrecognized operator.")
|
|
||||||
|
|
||||||
expression = FilterExpression(left_obj, operator_type, right_obj)
|
|
||||||
return expression
|
|
||||||
|
|
||||||
class FilterExpression(object):
|
|
||||||
def __init__(self, left, operator, right):
|
|
||||||
self.left = left
|
|
||||||
self.operator = operator
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def evaluate(self, message):
|
|
||||||
if self.operator == EQUALS:
|
|
||||||
return (self.left.value(message) == self.right.value(message))
|
|
||||||
elif self.operator == NOT_EQUALS:
|
|
||||||
return (self.left.value(message) != self.right.value(message))
|
|
||||||
elif self.operator == LESS_THAN:
|
|
||||||
return (self.left.value(message) < self.right.value(message))
|
|
||||||
elif self.operator == MORE_THAN:
|
|
||||||
return (self.left.value(message) > self.right.value(message))
|
|
||||||
elif self.operator == LESS_THAN_OR_EQUALS:
|
|
||||||
return (self.left.value(message) <= self.right.value(message))
|
|
||||||
elif self.operator == MORE_THAN_OR_EQUALS:
|
|
||||||
return (self.left.value(message) >= self.right.value(message))
|
|
||||||
elif self.operator == HAS:
|
|
||||||
if is_instance(self.left, basestring):
|
|
||||||
return (self.right.value(message) in self.left.value(message)) # Substring comparison
|
|
||||||
else:
|
|
||||||
return (self.right.value(message) in self.left.values(message)) # In-array check
|
|
||||||
else:
|
|
||||||
raise EvaluationError("Unhandled operator encountered during evaluation.")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<FE %s [%s] %s>" % (repr(self.left), self.get_opname(), repr(self.right))
|
|
||||||
|
|
||||||
def get_opname(self):
|
|
||||||
if self.operator == EQUALS:
|
|
||||||
return "EQUALS"
|
|
||||||
elif self.operator == NOT_EQUALS:
|
|
||||||
return "NOT EQUALS"
|
|
||||||
elif self.operator == LESS_THAN:
|
|
||||||
return "LESS THAN"
|
|
||||||
elif self.operator == MORE_THAN:
|
|
||||||
return "MORE THAN"
|
|
||||||
elif self.operator == LESS_THAN_OR_EQUALS:
|
|
||||||
return "LESS THAN OR EQUAL"
|
|
||||||
elif self.operator == MORE_THAN_OR_EQUALS:
|
|
||||||
return "MORE THAN OR EQUAL"
|
|
||||||
elif self.operator == HAS:
|
|
||||||
return "HAS"
|
|
||||||
else:
|
|
||||||
return "?"
|
|
||||||
|
|
||||||
def pretty_print(self, level=0):
|
|
||||||
prefix = "\t" * level
|
|
||||||
print prefix + "%s %s %s" % (repr(self.left), self.get_opname(), repr(self.right))
|
|
||||||
|
|
||||||
class FilterExpressionGroup(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.elements = []
|
|
||||||
self.relation = NONE
|
|
||||||
|
|
||||||
def add(self, element):
|
|
||||||
self.elements.append(element)
|
|
||||||
|
|
||||||
def evaluate(self, message):
|
|
||||||
if self.relation == AND:
|
|
||||||
for element in self.elements:
|
|
||||||
if element.evaluate(message) != True:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
elif self.relation == OR:
|
|
||||||
for element in self.elements:
|
|
||||||
if element.evaluate(message) == True:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
raise EvaluationError("Unhandled group relationship encountered during evaluation.")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<FEGroup %s (%s)>" % (self.get_relname(), ", ".join(repr(x) for x in self.elements))
|
|
||||||
|
|
||||||
def get_relname(self):
|
|
||||||
if self.relation == AND:
|
|
||||||
return "AND"
|
|
||||||
elif self.relation == OR:
|
|
||||||
return "OR"
|
|
||||||
else:
|
|
||||||
return "?"
|
|
||||||
|
|
||||||
def pretty_print(self, level=0):
|
|
||||||
prefix = "\t" * level
|
|
||||||
|
|
||||||
print prefix + "group[%s] (" % self.get_relname()
|
|
||||||
for element in self.elements:
|
|
||||||
element.pretty_print(level=(level+1))
|
|
||||||
print prefix + ")"
|
|
||||||
|
|
||||||
class FilterExpressionElement(object):
|
|
||||||
def select_value(self, message, scope, name, multiple=False):
|
|
||||||
if scope == "tags":
|
|
||||||
return_value = message.tags
|
|
||||||
elif scope == "type":
|
|
||||||
return_value = message.type_
|
|
||||||
elif scope == "source":
|
|
||||||
return_value = message.source
|
|
||||||
elif scope == "chain":
|
|
||||||
return_value = message.chain
|
|
||||||
elif scope == "attr":
|
|
||||||
return_value = self.select_attribute(message, name, multiple=multiple)
|
|
||||||
else:
|
|
||||||
raise ScopeError("Invalid scope specified.")
|
|
||||||
|
|
||||||
if isinstance(return_value, basestring):
|
|
||||||
return return_value
|
|
||||||
elif len(return_value) > 0:
|
|
||||||
if multiple == False:
|
|
||||||
return return_value[0]
|
|
||||||
else:
|
|
||||||
return return_value
|
|
||||||
else:
|
|
||||||
raise EvaluationError("No valid value could be found.")
|
|
||||||
|
|
||||||
def select_attribute(self, message, query, multiple=False):
|
|
||||||
segments = query.split("/")
|
|
||||||
current_object = message.data
|
|
||||||
|
|
||||||
for segment in segments:
|
|
||||||
try:
|
|
||||||
current_object = current_object[segment]
|
|
||||||
except KeyError, e:
|
|
||||||
raise AttributeNameError("Invalid attribute specified.")
|
|
||||||
|
|
||||||
return current_object
|
|
||||||
|
|
||||||
class FilterExpressionVariable(FilterExpressionElement):
|
|
||||||
def __init__(self, scope, name=None):
|
|
||||||
self.scope = scope
|
|
||||||
self.name = name
|
|
||||||
# TODO: name path parsing
|
|
||||||
|
|
||||||
def value(self, message):
|
|
||||||
return self.select_value(message, self.scope, self.name, multiple=False)
|
|
||||||
|
|
||||||
def values(self, message):
|
|
||||||
return self.select_value(message, self.scope, self.name, multiple=True)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<FEVar %s/%s>" % (self.scope, self.name)
|
|
||||||
|
|
||||||
class FilterExpressionString(FilterExpressionElement):
|
|
||||||
def __init__(self, string):
|
|
||||||
self.string = string
|
|
||||||
|
|
||||||
def value(self, message):
|
|
||||||
return self.string
|
|
||||||
|
|
||||||
def values(self, message):
|
|
||||||
return [self.string]
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<FEString \"%s\">" % self.string
|
|
@ -1,226 +0,0 @@
|
|||||||
from constants import *
|
|
||||||
import expression
|
|
||||||
|
|
||||||
def parse(input_):
|
|
||||||
rulebook_length = len(input_)
|
|
||||||
|
|
||||||
# Main parsing loop
|
|
||||||
idx = 0
|
|
||||||
tab_count = 0
|
|
||||||
current_level = 0
|
|
||||||
current_rule = {}
|
|
||||||
target_rule = None
|
|
||||||
statement_cache = None
|
|
||||||
new_line = True
|
|
||||||
multiple_statements = False
|
|
||||||
buff = ""
|
|
||||||
bins = {}
|
|
||||||
|
|
||||||
while idx < rulebook_length:
|
|
||||||
char = input_[idx]
|
|
||||||
|
|
||||||
if char == "\t":
|
|
||||||
if buff == "":
|
|
||||||
new_line = True
|
|
||||||
tab_count += 1
|
|
||||||
else:
|
|
||||||
buff += char
|
|
||||||
else:
|
|
||||||
if new_line == True:
|
|
||||||
if tab_count > current_level + 1:
|
|
||||||
raise RulebookIndentationError("Incorrect indentation encountered.")
|
|
||||||
|
|
||||||
if input_[idx:idx+2] == "=>":
|
|
||||||
# Skip over this, it's optional at the start of a line
|
|
||||||
idx += 2
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_line = False
|
|
||||||
|
|
||||||
if char == "\r":
|
|
||||||
idx += 1
|
|
||||||
continue # Ignore, we don't want carriage returns
|
|
||||||
elif char == "\n":
|
|
||||||
# Process
|
|
||||||
if buff.strip() == "":
|
|
||||||
# Skip empty lines, we don't care about them
|
|
||||||
idx += 1
|
|
||||||
tab_count = 0
|
|
||||||
continue
|
|
||||||
|
|
||||||
current_level = tab_count
|
|
||||||
tab_count = 0
|
|
||||||
|
|
||||||
if current_level == 0:
|
|
||||||
bin_name = buff.strip()
|
|
||||||
new_bin = Bin(bin_name)
|
|
||||||
current_rule[current_level] = new_bin
|
|
||||||
bins[bin_name] = new_bin
|
|
||||||
else:
|
|
||||||
if multiple_statements == True:
|
|
||||||
new_rule = create_rule(buff, statement_cache)
|
|
||||||
else:
|
|
||||||
new_rule = create_rule(buff, current_rule[current_level - 1])
|
|
||||||
|
|
||||||
current_rule[current_level] = new_rule
|
|
||||||
|
|
||||||
buff = ""
|
|
||||||
new_line = True
|
|
||||||
multiple_statements = False
|
|
||||||
elif char == "=" and input_[idx + 1] == ">":
|
|
||||||
# Next rule, same line!
|
|
||||||
if multiple_statements == True:
|
|
||||||
statement_cache = create_rule(buff, statement_cache)
|
|
||||||
else:
|
|
||||||
multiple_statements = True
|
|
||||||
statement_cache = create_rule(buff, current_rule[tab_count - 1])
|
|
||||||
|
|
||||||
buff = ""
|
|
||||||
idx += 1 # We read one extra character ahead
|
|
||||||
else:
|
|
||||||
# TODO: add entire chunks at once for speed
|
|
||||||
buff += char
|
|
||||||
|
|
||||||
idx += 1
|
|
||||||
|
|
||||||
return bins
|
|
||||||
|
|
||||||
def create_rule(buff, input_):
|
|
||||||
buff = buff.strip()
|
|
||||||
if buff[0] == "*":
|
|
||||||
# Node reference
|
|
||||||
new_obj = NodeReference(input_, buff[1:])
|
|
||||||
elif buff[0] == "@":
|
|
||||||
# Method call
|
|
||||||
new_obj = MethodReference(input_, buff[1:])
|
|
||||||
elif buff[0] == "#":
|
|
||||||
# Bin reference
|
|
||||||
new_obj = BinReference(input_, buff[1:])
|
|
||||||
elif buff[0] == ":":
|
|
||||||
# Distributor
|
|
||||||
if "(" in buff and buff[-1:] == ")":
|
|
||||||
name, arglist = buff[1:-1].split("(", 1)
|
|
||||||
args = [x.strip() for x in arglist.split(",")]
|
|
||||||
else:
|
|
||||||
name = buff[1:]
|
|
||||||
args = []
|
|
||||||
new_obj = DistributorReference(input_, name, args)
|
|
||||||
else:
|
|
||||||
# Filter
|
|
||||||
new_obj = Filter(input_, buff)
|
|
||||||
|
|
||||||
input_.outputs.append(new_obj)
|
|
||||||
return new_obj
|
|
||||||
|
|
||||||
class Element(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.outputs = []
|
|
||||||
|
|
||||||
def display(self, indent):
|
|
||||||
print ("\t" * indent) + self.get_description()
|
|
||||||
for output in self.outputs:
|
|
||||||
output.display(indent + 1)
|
|
||||||
|
|
||||||
class Bin(Element):
|
|
||||||
def __init__(self, name):
|
|
||||||
Element.__init__(self)
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[Bin] %s" % self.name
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
self.forward(message)
|
|
||||||
|
|
||||||
def forward(self, message):
|
|
||||||
for output in self.outputs:
|
|
||||||
output.process(message)
|
|
||||||
|
|
||||||
class Rule(Element):
|
|
||||||
def __init__(self, input_):
|
|
||||||
Element.__init__(self)
|
|
||||||
self.input_ = input_
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
self.forward(message)
|
|
||||||
|
|
||||||
def forward(self, message):
|
|
||||||
for output in self.outputs:
|
|
||||||
output.process(message)
|
|
||||||
|
|
||||||
class Filter(Rule):
|
|
||||||
def __init__(self, input_, rule):
|
|
||||||
Rule.__init__(self, input_)
|
|
||||||
self.rule = rule
|
|
||||||
self.root = expression.parse(rule)
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[Filter] %s" % self.rule
|
|
||||||
|
|
||||||
def evaluate(self, message):
|
|
||||||
return self.root.evaluate(message)
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
if self.evaluate(message):
|
|
||||||
self.forward(message)
|
|
||||||
|
|
||||||
class BinReference(Rule):
|
|
||||||
def __init__(self, input_, name):
|
|
||||||
Rule.__init__(self, input_)
|
|
||||||
self.bin_name = name
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
try:
|
|
||||||
return bins[self.bin_name]
|
|
||||||
except KeyError, e:
|
|
||||||
new_bin = Bin(self.bin_name)
|
|
||||||
bins[self.bin_name] = new_bin
|
|
||||||
return new_bin
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[BinRef] %s" % self.bin_name
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
# TODO: Actually deposit into bin
|
|
||||||
print "DEPOSITED INTO BIN %s" % self.name
|
|
||||||
|
|
||||||
class NodeReference(Rule):
|
|
||||||
def __init__(self, input_, name):
|
|
||||||
Rule.__init__(self, input_)
|
|
||||||
self.node_name = name
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[NodeRef] %s" % self.node_name
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
# TODO: Actually forward to node
|
|
||||||
print "FORWARDED TO NODE %s" % self.node_name
|
|
||||||
|
|
||||||
class MethodReference(Rule):
|
|
||||||
def __init__(self, input_, name):
|
|
||||||
Rule.__init__(self, input_)
|
|
||||||
# TODO: Support arguments?
|
|
||||||
self.method_name = name
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[MethodRef] %s" % self.method_name
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
# TODO: Actually pass to method
|
|
||||||
print "PASSED TO METHOD %s" % self.method_name
|
|
||||||
|
|
||||||
class DistributorReference(Rule):
|
|
||||||
def __init__(self, input_, name, args):
|
|
||||||
Rule.__init__(self, input_)
|
|
||||||
self.distributor_name = name
|
|
||||||
self.args = args
|
|
||||||
# TODO: Parse outputs
|
|
||||||
# Perhaps have special syntax for counting group as one entity?
|
|
||||||
|
|
||||||
def get_description(self):
|
|
||||||
return "[DistRef] %s (%s)" % (self.distributor_name, ", ".join(self.args))
|
|
||||||
|
|
||||||
def process(self, message):
|
|
||||||
# TODO: Actually distribute
|
|
||||||
# TODO: Parse args
|
|
||||||
print "DISTRIBUTED TO %s" % self.get_description()
|
|
@ -1,32 +0,0 @@
|
|||||||
remote
|
|
||||||
=> $type = "error"
|
|
||||||
=> $scope = "software"
|
|
||||||
=> @DoSomethingAboutError
|
|
||||||
=> #errors
|
|
||||||
=> $type = "task"
|
|
||||||
=> $attr[command] = "convert"
|
|
||||||
=> $attr[original_filetype] = "ogg"
|
|
||||||
=> :RoundRobin
|
|
||||||
=> *abcde-fghij-00000-00001
|
|
||||||
=> *abcde-fghij-00000-00002
|
|
||||||
=> $attr[category] = "video" and ($attr[original_filetype] = "mpeg" or $attr[original_filetype] = "mpg")
|
|
||||||
=> :RoundRobin(*mpeg_nodes)
|
|
||||||
=> $attr[herp1] = "derp1" or $attr[herp2] = "derp2" and $attr[herp3] = "derp3" or $attr[herp4] = "derp4" or $attr[herp5] = "derp5" and $attr[herp6] = "derp6" or $attr[herp7] = "derp7"
|
|
||||||
=> $attr[command] = "whois"
|
|
||||||
=> :WhoisDistributor(*whois_nodes)
|
|
||||||
=> $attr[command] = "dummy"
|
|
||||||
=> :RoundRobin
|
|
||||||
=> @ExtensionMethodOne => #dummy_results
|
|
||||||
=> @ExtensionMethodTwo => #dummy_results
|
|
||||||
=> $tags has "flag"
|
|
||||||
=> @LogFlag
|
|
||||||
|
|
||||||
self
|
|
||||||
=> $type = "error" or $type = "warning"
|
|
||||||
=> #errors
|
|
||||||
|
|
||||||
errors
|
|
||||||
=> *logging_nodes
|
|
||||||
|
|
||||||
dummy_results
|
|
||||||
=> @SaveToDatabase
|
|
@ -1 +0,0 @@
|
|||||||
Testing/development files (such as node databases) can be stored here. They're automatically ignored by Git.
|
|
@ -1,12 +0,0 @@
|
|||||||
self:
|
|
||||||
uuid: 9fb523e5-3948-425c-8642-01c586f699c7
|
|
||||||
port: 3001
|
|
||||||
pubkey: placeholder
|
|
||||||
privkey: placeholder
|
|
||||||
database: test/data/node1.db
|
|
||||||
|
|
||||||
nodes:
|
|
||||||
abcd:
|
|
||||||
host: db149623-5c75-484d-8cd3-fcabf7717008
|
|
||||||
port: 3002
|
|
||||||
pubkey: placeholder
|
|
@ -1,12 +0,0 @@
|
|||||||
self:
|
|
||||||
uuid: db149623-5c75-484d-8cd3-fcabf7717008
|
|
||||||
port: 3002
|
|
||||||
pubkey: placeholder
|
|
||||||
privkey: placeholder
|
|
||||||
database: test/data/node2.db
|
|
||||||
|
|
||||||
nodes:
|
|
||||||
abcd:
|
|
||||||
host: 9fb523e5-3948-425c-8642-01c586f699c7
|
|
||||||
port: 3001
|
|
||||||
pubkey: placeholder
|
|
@ -1,37 +0,0 @@
|
|||||||
import application
|
|
||||||
import threading, logging
|
|
||||||
|
|
||||||
class ApplicationThread(threading.Thread):
|
|
||||||
def __init__(self, name, args, hook):
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self._name = name
|
|
||||||
self._args = args
|
|
||||||
self._hook = hook
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
application.run(self._args, self._hook)
|
|
||||||
|
|
||||||
class Hook(object):
|
|
||||||
def write(self, data, *args, **kwargs):
|
|
||||||
lines = data.strip().split("\n")
|
|
||||||
for line in lines:
|
|
||||||
print "[%s] %s" % (self._name, line)
|
|
||||||
|
|
||||||
def logger_hook(level, format_, target):
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logger.setLevel(level)
|
|
||||||
|
|
||||||
handler = logging.StreamHandler(stream=target)
|
|
||||||
handler.setLevel(level)
|
|
||||||
handler.setFormatter(logging.Formatter(format_))
|
|
||||||
|
|
||||||
logger.addHandler(handler)
|
|
||||||
|
|
||||||
#hook = Hook()
|
|
||||||
#logger_hook(logging.DEBUG, "[%(name)s] %(asctime)s - %(levelname)s - %(message)s", hook)
|
|
||||||
|
|
||||||
t1 = ApplicationThread("node1", ["--debug", "--config", "test/node1.yaml"], "node1")
|
|
||||||
t2 = ApplicationThread("node2", ["--debug", "--config", "test/node2.yaml"], "node2")
|
|
||||||
|
|
||||||
t1.start()
|
|
||||||
t2.start()
|
|
Loading…
Reference in New Issue