|
@@ -12,8 +12,8 @@ from sets import Set
|
|
import re
|
|
import re
|
|
import datetime
|
|
import datetime
|
|
import Queue
|
|
import Queue
|
|
-
|
|
|
|
-
|
|
|
|
|
|
+from threading import Thread
|
|
|
|
+import time
|
|
class Analysis:
|
|
class Analysis:
|
|
def __init__(self):
|
|
def __init__(self):
|
|
self.logs = []
|
|
self.logs = []
|
|
@@ -96,7 +96,8 @@ class Sandbox(Analysis):
|
|
n = len(Set(required_modules) - Set(sandbox_modules))
|
|
n = len(Set(required_modules) - Set(sandbox_modules))
|
|
value = 1 - (n/N)
|
|
value = 1 - (n/N)
|
|
missing = list(Set(required_modules) - Set(sandbox_modules))
|
|
missing = list(Set(required_modules) - Set(sandbox_modules))
|
|
- return {"value":value,"missing":missing}
|
|
|
|
|
|
+
|
|
|
|
+ return dict(self.now,**{"value":value,"missing":missing})
|
|
|
|
|
|
"""
|
|
"""
|
|
This class performs the analysis of a list of processes and determines
|
|
This class performs the analysis of a list of processes and determines
|
|
@@ -121,7 +122,7 @@ class ProcessCounter(Analysis):
|
|
#N = len(r)
|
|
#N = len(r)
|
|
#n = sum(r)
|
|
#n = sum(r)
|
|
#return n/N
|
|
#return n/N
|
|
- return r
|
|
|
|
|
|
+ return dict(self.now,**r)
|
|
"""
|
|
"""
|
|
This class returns an application's both memory and cpu usage
|
|
This class returns an application's both memory and cpu usage
|
|
"""
|
|
"""
|
|
@@ -170,7 +171,7 @@ class DetailProcess(Analysis):
|
|
r= {"memory_usage":row[0],"cpu_usage":row[1],"memory_available":row[2]/1000,"label":row[3]}
|
|
r= {"memory_usage":row[0],"cpu_usage":row[1],"memory_available":row[2]/1000,"label":row[3]}
|
|
status = self.status(r)
|
|
status = self.status(r)
|
|
r['status'] = status
|
|
r['status'] = status
|
|
- return r
|
|
|
|
|
|
+ return dict(self.now,**r)
|
|
def composite(self):
|
|
def composite(self):
|
|
#value = self.evaluate(self.name)
|
|
#value = self.evaluate(self.name)
|
|
#row= {"memory_usage":value[0],"cpu_usage":value[1]}
|
|
#row= {"memory_usage":value[0],"cpu_usage":value[1]}
|
|
@@ -178,17 +179,84 @@ class DetailProcess(Analysis):
|
|
#ma = [self.evaluate(name) for name in self.names]
|
|
#ma = [self.evaluate(name) for name in self.names]
|
|
ma = []
|
|
ma = []
|
|
for name in self.names:
|
|
for name in self.names:
|
|
|
|
+
|
|
matrix = self.evaluate(name)
|
|
matrix = self.evaluate(name)
|
|
|
|
+
|
|
ma += [self.format(row) for row in matrix]
|
|
ma += [self.format(row) for row in matrix]
|
|
|
|
|
|
#return [{"memory_usage":row[0],"cpu_usage":row[1],"memory_available":row[2]/1000,"label":row[3]} for row in ma]
|
|
#return [{"memory_usage":row[0],"cpu_usage":row[1],"memory_available":row[2]/1000,"label":row[3]} for row in ma]
|
|
|
|
|
|
return ma
|
|
return ma
|
|
|
|
|
|
-class QListener(Thread)
|
|
|
|
- def __init__(self,handlers):
|
|
|
|
- self.handlers = handlers
|
|
|
|
- self.queue = Queue.LifoQueue()
|
|
|
|
- def post(self) :
|
|
|
|
- for handler in self.handlers:
|
|
|
|
- self.queue.put(handler.)
|
|
|
|
|
|
+class Monitor (Thread):
|
|
|
|
+ def __init__(self,pConfig,pQueue,id='processes') :
|
|
|
|
+ Thread.__init__(self)
|
|
|
|
+
|
|
|
|
+ self.config = pConfig[id]
|
|
|
|
+ self.queue = pQueue;
|
|
|
|
+ self.logs = []
|
|
|
|
+ self.handler = self.config['class']
|
|
|
|
+ self.mconfig = self.config['config']
|
|
|
|
+ def run(self):
|
|
|
|
+ r = {}
|
|
|
|
+ while True:
|
|
|
|
+ for label in self.mconfig:
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ self.handler.init(self.mconfig[label])
|
|
|
|
+ r[label] = self.handler.composite()
|
|
|
|
+ self.logs.append(r)
|
|
|
|
+ self.queue.put(r)
|
|
|
|
+ self.prune()
|
|
|
|
+ self.queue.task_done()
|
|
|
|
+
|
|
|
|
+ time.sleep(10)
|
|
|
|
+ def prune(self) :
|
|
|
|
+ MAX_ENTRIES = 1000
|
|
|
|
+ if len(self.logs) > MAX_ENTRIES :
|
|
|
|
+ BEG = len(self.logs) - MAX_SIZE -1
|
|
|
|
+ self.logs = self.logs[BEG:]
|
|
|
|
+
|
|
|
|
+class mapreducer:
|
|
|
|
+ def __init__(self):
|
|
|
|
+ self.store = {}
|
|
|
|
+ def filter (self,key,dataset):
|
|
|
|
+ return [row[key] for row in dataset if key in row]
|
|
|
|
+ def run(self,dataset,mapper,reducer):
|
|
|
|
+ r = None
|
|
|
|
+ if mapper is not None:
|
|
|
|
+ if isinstance(dataset,list) :
|
|
|
|
+ [mapper(row,self.emit) for row in dataset]
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ if reducer is not None:
|
|
|
|
+ r = [reducer(self.store[key]) for key in self.store]
|
|
|
|
+ else:
|
|
|
|
+ r = self.store
|
|
|
|
+ return r
|
|
|
|
+ def mapper(self,row,emit):
|
|
|
|
+ [emit(item['label'],item) for item in row ]
|
|
|
|
+ def reducer(self,values):
|
|
|
|
+ return value
|
|
|
|
+
|
|
|
|
+ def emit(self,key,content):
|
|
|
|
+ if key not in self.store:
|
|
|
|
+ self.store[key] = []
|
|
|
|
+ self.store[key].append(content)
|
|
|
|
+# #
|
|
|
|
+# # We need to generate the appropriate dataset here
|
|
|
|
+# # map/reduce is a well documented technique for generating datasets
|
|
|
|
+# #
|
|
|
|
+# def map(self,key,id,rows):
|
|
|
|
+
|
|
|
|
+# #r = [row[key] for row in rows if key in row]
|
|
|
|
+# for row in rows:
|
|
|
|
+# if key in row :
|
|
|
|
+# for xr in row[key]:
|
|
|
|
+# self.emit(xr['label'],xr)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+# def reduce(keys,values):
|
|
|
|
+# print values[0]
|
|
|
|
+# return r
|