|
@@ -32,9 +32,10 @@ Usage :
|
|
|
from healthcareio.params import SYS_ARGS
|
|
|
from transport import factory
|
|
|
import requests
|
|
|
-
|
|
|
from healthcareio import analytics
|
|
|
from healthcareio import server
|
|
|
+
|
|
|
+
|
|
|
from healthcareio.parser import get_content
|
|
|
import os
|
|
|
import json
|
|
@@ -43,6 +44,10 @@ import numpy as np
|
|
|
from multiprocessing import Process
|
|
|
import time
|
|
|
from healthcareio import x12
|
|
|
+from healthcareio.export import export
|
|
|
+import smart
|
|
|
+from healthcareio.server import proxy
|
|
|
+import pandas as pd
|
|
|
|
|
|
PATH = os.sep.join([os.environ['HOME'],'.healthcareio'])
|
|
|
OUTPUT_FOLDER = os.sep.join([os.environ['HOME'],'healthcare-io'])
|
|
@@ -53,10 +58,28 @@ if not os.path.exists(PATH) :
|
|
|
import platform
|
|
|
import sqlite3 as lite
|
|
|
# PATH = os.sep.join([os.environ['HOME'],'.edi-parser'])
|
|
|
-def register (**args) :
|
|
|
+HELP_MESSAGE = """
|
|
|
+ cli:
|
|
|
+
|
|
|
+ healthcare-io.py --<[signup|init]> <email> --store <sqlite|mongo> [--batch <value>]
|
|
|
+ healthcare-io.py --parse --folder <path> [--batch <value>] [--resume]
|
|
|
+ healthcare-io.py --check-update
|
|
|
+ healthcare-io.py --export <835|837> --config <config-path>
|
|
|
+ action :
|
|
|
+ --signup|init signup user and get configuration file
|
|
|
+ --parse starts parsing
|
|
|
+ --check checks for updates
|
|
|
+ --export export data of a 835 or 837 into another database
|
|
|
+ parameters :
|
|
|
+ --<[signup|init]> signup or get a configuration file from a parsing server
|
|
|
+ --folder location of the files (the program will recursively traverse it)
|
|
|
+ --store data store mongo or sqlite or mongodb
|
|
|
+ --resume will attempt to resume if there was an interruption
|
|
|
+ """
|
|
|
+def signup (**args) :
|
|
|
"""
|
|
|
:email user's email address
|
|
|
- :url url of the provider to register
|
|
|
+ :url url of the provider to signup
|
|
|
"""
|
|
|
|
|
|
email = args['email']
|
|
@@ -121,77 +144,77 @@ def init():
|
|
|
#
|
|
|
# Global variables that load the configuration files
|
|
|
|
|
|
-def parse(**args):
|
|
|
- """
|
|
|
- This function will parse the content of a claim or remittance (x12 format) give the following parameters
|
|
|
- :filename absolute path of the file to be parsed
|
|
|
- :type claims|remits in x12 format
|
|
|
- """
|
|
|
- global INFO
|
|
|
- if not INFO :
|
|
|
- INFO = init()
|
|
|
- if args['type'] == 'claims' :
|
|
|
- CONFIG = INFO['parser']['837']
|
|
|
- elif args['type'] == 'remits' :
|
|
|
- CONFIG = INFO['parser']['835']
|
|
|
- else:
|
|
|
- CONFIG = None
|
|
|
- if CONFIG :
|
|
|
- # CONFIG = CONFIG[-1] if 'version' not in args and (args['version'] < len(CONFIG)) else CONFIG[0]
|
|
|
- CONFIG = CONFIG[int(args['version'])-1] if 'version' in SYS_ARGS and int(SYS_ARGS['version']) < len(CONFIG) else CONFIG[-1]
|
|
|
- SECTION = CONFIG['SECTION']
|
|
|
- os.environ['HEALTHCAREIO_SALT'] = INFO['owner']
|
|
|
+# def parse(**args):
|
|
|
+# """
|
|
|
+# This function will parse the content of a claim or remittance (x12 format) give the following parameters
|
|
|
+# :filename absolute path of the file to be parsed
|
|
|
+# :type claims|remits in x12 format
|
|
|
+# """
|
|
|
+# global INFO
|
|
|
+# if not INFO :
|
|
|
+# INFO = init()
|
|
|
+# if args['type'] == 'claims' :
|
|
|
+# CONFIG = INFO['parser']['837']
|
|
|
+# elif args['type'] == 'remits' :
|
|
|
+# CONFIG = INFO['parser']['835']
|
|
|
+# else:
|
|
|
+# CONFIG = None
|
|
|
+# if CONFIG :
|
|
|
+# # CONFIG = CONFIG[-1] if 'version' not in args and (args['version'] < len(CONFIG)) else CONFIG[0]
|
|
|
+# CONFIG = CONFIG[int(args['version'])-1] if 'version' in SYS_ARGS and int(SYS_ARGS['version']) < len(CONFIG) else CONFIG[-1]
|
|
|
+# SECTION = CONFIG['SECTION']
|
|
|
+# os.environ['HEALTHCAREIO_SALT'] = INFO['owner']
|
|
|
|
|
|
|
|
|
- return get_content(args['filename'],CONFIG,SECTION)
|
|
|
-def resume (files,id,config):
|
|
|
- _args = config['store'].copy()
|
|
|
- if 'mongo' in config['store']['type'] :
|
|
|
- _args['type'] = 'mongo.MongoReader'
|
|
|
- reader = factory.instance(**_args)
|
|
|
- _files = []
|
|
|
- if 'resume' in config['analytics'] :
|
|
|
- _args = config['analytics']['resume'][id]
|
|
|
- _files = reader.read(**_args)
|
|
|
- _files = [item['name'] for item in _files if item['name'] != None]
|
|
|
- return list(set(files) - set(_files))
|
|
|
+# return get_content(args['filename'],CONFIG,SECTION)
|
|
|
+# def resume (files,id,config):
|
|
|
+# _args = config['store'].copy()
|
|
|
+# if 'mongo' in config['store']['type'] :
|
|
|
+# _args['type'] = 'mongo.MongoReader'
|
|
|
+# reader = factory.instance(**_args)
|
|
|
+# _files = []
|
|
|
+# if 'resume' in config['analytics'] :
|
|
|
+# _args = config['analytics']['resume'][id]
|
|
|
+# _files = reader.read(**_args)
|
|
|
+# _files = [item['name'] for item in _files if item['name'] != None]
|
|
|
+# return list(set(files) - set(_files))
|
|
|
|
|
|
- return files
|
|
|
- pass
|
|
|
-def apply(files,store_info,logger_info=None):
|
|
|
- """
|
|
|
- :files list of files to be processed in this given thread/process
|
|
|
- :store_info information about data-store, for now disk isn't thread safe
|
|
|
- :logger_info information about where to store the logs
|
|
|
- """
|
|
|
+ # return files
|
|
|
+ # pass
|
|
|
+# def apply(files,store_info,logger_info=None):
|
|
|
+# """
|
|
|
+# :files list of files to be processed in this given thread/process
|
|
|
+# :store_info information about data-store, for now disk isn't thread safe
|
|
|
+# :logger_info information about where to store the logs
|
|
|
+# """
|
|
|
|
|
|
- if not logger_info :
|
|
|
- logger = factory.instance(type='disk.DiskWriter',args={'path':os.sep.join([info['out-folder'],SYS_ARGS['parse']+'.log'])})
|
|
|
- else:
|
|
|
- logger = factory.instance(**logger_info)
|
|
|
+# if not logger_info :
|
|
|
+# logger = factory.instance(type='disk.DiskWriter',args={'path':os.sep.join([info['out-folder'],SYS_ARGS['parse']+'.log'])})
|
|
|
+# else:
|
|
|
+# logger = factory.instance(**logger_info)
|
|
|
|
|
|
- writer = factory.instance(**store_info)
|
|
|
- for filename in files :
|
|
|
+# writer = factory.instance(**store_info)
|
|
|
+# for filename in files :
|
|
|
|
|
|
- if filename.strip() == '':
|
|
|
- continue
|
|
|
- # content,logs = get_content(filename,CONFIG,CONFIG['SECTION'])
|
|
|
- #
|
|
|
- try:
|
|
|
- content,logs = parse(filename = filename,type=SYS_ARGS['parse'])
|
|
|
+# if filename.strip() == '':
|
|
|
+# continue
|
|
|
+# # content,logs = get_content(filename,CONFIG,CONFIG['SECTION'])
|
|
|
+# #
|
|
|
+# try:
|
|
|
+# content,logs = parse(filename = filename,type=SYS_ARGS['parse'])
|
|
|
|
|
|
- if content :
|
|
|
- writer.write(content)
|
|
|
- if logs :
|
|
|
- [logger.write(dict(_row,**{"parse":SYS_ARGS['parse']})) for _row in logs]
|
|
|
- else:
|
|
|
- logger.write({"parse":SYS_ARGS['parse'],"name":filename,"completed":True,"rows":len(content)})
|
|
|
- except Exception as e:
|
|
|
+# if content :
|
|
|
+# writer.write(content)
|
|
|
+# if logs :
|
|
|
+# [logger.write(dict(_row,**{"parse":SYS_ARGS['parse']})) for _row in logs]
|
|
|
+# else:
|
|
|
+# logger.write({"parse":SYS_ARGS['parse'],"name":filename,"completed":True,"rows":len(content)})
|
|
|
+# except Exception as e:
|
|
|
|
|
|
- logger.write({"parse":SYS_ARGS['parse'],"filename":filename,"completed":False,"rows":-1,"msg":e.args[0]})
|
|
|
- # print ([filename,len(content)])
|
|
|
- #
|
|
|
- # @TODO: forward this data to the writer and log engine
|
|
|
+# logger.write({"parse":SYS_ARGS['parse'],"filename":filename,"completed":False,"rows":-1,"msg":e.args[0]})
|
|
|
+# # print ([filename,len(content)])
|
|
|
+# #
|
|
|
+# # @TODO: forward this data to the writer and log engine
|
|
|
#
|
|
|
def upgrade(**args):
|
|
|
"""
|
|
@@ -200,13 +223,28 @@ def upgrade(**args):
|
|
|
"""
|
|
|
url = args['url'] if 'url' in args else URL+"/upgrade"
|
|
|
headers = {"key":args['key'],"email":args["email"],"url":url}
|
|
|
+def check(**_args):
|
|
|
+ """
|
|
|
+ This function will check if there is an update available (versions are in the configuration file)
|
|
|
+ :param url
|
|
|
+ """
|
|
|
+ url = _args['url'][:-1] if _args['url'].endswith('/') else _args['url']
|
|
|
+ url = url + "/version"
|
|
|
+ if 'version' not in _args :
|
|
|
+ version = {"_id":"version","current":0.0}
|
|
|
+ else:
|
|
|
+ version = _args['version']
|
|
|
+ http = requests.session()
|
|
|
+ r = http.get(url)
|
|
|
+ return r.json()
|
|
|
|
|
|
if __name__ == '__main__' :
|
|
|
info = init()
|
|
|
|
|
|
if 'out-folder' in SYS_ARGS :
|
|
|
OUTPUT_FOLDER = SYS_ARGS['out-folder']
|
|
|
-
|
|
|
+ SYS_ARGS['url'] = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
|
|
|
+
|
|
|
if set(list(SYS_ARGS.keys())) & set(['signup','init']):
|
|
|
#
|
|
|
# This command will essentially get a new copy of the configurations
|
|
@@ -214,10 +252,10 @@ if __name__ == '__main__' :
|
|
|
#
|
|
|
|
|
|
email = SYS_ARGS['signup'].strip() if 'signup' in SYS_ARGS else SYS_ARGS['init']
|
|
|
- url = SYS_ARGS['url'] if 'url' in SYS_ARGS else 'https://healthcareio.the-phi.com'
|
|
|
+ url = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
|
|
|
store = SYS_ARGS['store'] if 'store' in SYS_ARGS else 'sqlite'
|
|
|
db='healthcareio' if 'db' not in SYS_ARGS else SYS_ARGS['db']
|
|
|
- register(email=email,url=url,store=store,db=db)
|
|
|
+ signup(email=email,url=url,store=store,db=db)
|
|
|
# else:
|
|
|
# m = """
|
|
|
# usage:
|
|
@@ -241,51 +279,31 @@ if __name__ == '__main__' :
|
|
|
if 'file' in SYS_ARGS :
|
|
|
files = [SYS_ARGS['file']] if not os.path.isdir(SYS_ARGS['file']) else []
|
|
|
if 'folder' in SYS_ARGS and os.path.exists(SYS_ARGS['folder']):
|
|
|
- names = os.listdir(SYS_ARGS['folder'])
|
|
|
- files += [os.sep.join([SYS_ARGS['folder'],name]) for name in names if not os.path.isdir(os.sep.join([SYS_ARGS['folder'],name]))]
|
|
|
+ for root,_dir,f in os.walk(SYS_ARGS['folder']) :
|
|
|
+
|
|
|
+ if f :
|
|
|
+ files += [os.sep.join([root,name]) for name in f]
|
|
|
+
|
|
|
+ # names = os.listdir(SYS_ARGS['folder'])
|
|
|
+ # files += [os.sep.join([SYS_ARGS['folder'],name]) for name in names if not os.path.isdir(os.sep.join([SYS_ARGS['folder'],name]))]
|
|
|
else:
|
|
|
#
|
|
|
- # raise an erro
|
|
|
+ # raise an error
|
|
|
+
|
|
|
pass
|
|
|
#
|
|
|
# if the user has specified to resume, we should look into the logs and pull the files processed and those that haven't
|
|
|
#
|
|
|
if 'resume' in SYS_ARGS :
|
|
|
- files = resume(files,SYS_ARGS['parse'],info)
|
|
|
- print (["Found ",len(files)," files unprocessed"])
|
|
|
+ store_config = json.loads( (open(os.sep.join([PATH,'config.json']))).read() )
|
|
|
+ files = proxy.get.resume(files,store_config )
|
|
|
+ # print (["Found ",len(files)," files unprocessed"])
|
|
|
#
|
|
|
# @TODO: Log this here so we know what is being processed or not
|
|
|
SCOPE = None
|
|
|
|
|
|
if files : #and ('claims' in SYS_ARGS['parse'] or 'remits' in SYS_ARGS['parse']):
|
|
|
- # logger = factory.instance(type='disk.DiskWriter',args={'path':os.sep.join([info['out-folder'],SYS_ARGS['parse']+'.log'])})
|
|
|
- # if info['store']['type'] == 'disk.DiskWriter' :
|
|
|
- # info['store']['args']['path'] += (os.sep + 'healthcare-io.json')
|
|
|
- # elif info['store']['type'] == 'disk.SQLiteWriter' :
|
|
|
- # # info['store']['args']['path'] += (os.sep + 'healthcare-io.db3')
|
|
|
- # pass
|
|
|
-
|
|
|
-
|
|
|
- # if info['store']['type'] == 'disk.SQLiteWriter' :
|
|
|
- # info['store']['args']['table'] = SYS_ARGS['parse'].strip().lower()
|
|
|
- # _info = json.loads(json.dumps(info['store']))
|
|
|
- # _info['args']['table']='logs'
|
|
|
- # else:
|
|
|
- # #
|
|
|
- # # if we are working with no-sql we will put the logs in it (performance )?
|
|
|
-
|
|
|
- # info['store']['args']['doc'] = SYS_ARGS['parse'].strip().lower()
|
|
|
- # _info = json.loads(json.dumps(info['store']))
|
|
|
- # _info['args']['doc'] = 'logs'
|
|
|
- # logger = factory.instance(**_info)
|
|
|
|
|
|
- # writer = factory.instance(**info['store'])
|
|
|
-
|
|
|
- #
|
|
|
- # we need to have batches ready for this in order to run some of these queries in parallel
|
|
|
- # @TODO: Make sure it is with a persistence storage (not disk .. not thread/process safe yet)
|
|
|
- # - Make sure we can leverage this on n-cores later on, for now the assumption is a single core
|
|
|
- #
|
|
|
BATCH_COUNT = 1 if 'batch' not in SYS_ARGS else int (SYS_ARGS['batch'])
|
|
|
|
|
|
files = np.array_split(files,BATCH_COUNT)
|
|
@@ -304,27 +322,7 @@ if __name__ == '__main__' :
|
|
|
while len(procs) > 0 :
|
|
|
procs = [proc for proc in procs if proc.is_alive()]
|
|
|
time.sleep(2)
|
|
|
- # for filename in files :
|
|
|
-
|
|
|
- # if filename.strip() == '':
|
|
|
- # continue
|
|
|
- # # content,logs = get_content(filename,CONFIG,CONFIG['SECTION'])
|
|
|
- # #
|
|
|
- # try:
|
|
|
- # content,logs = parse(filename = filename,type=SYS_ARGS['parse'])
|
|
|
- # if content :
|
|
|
- # writer.write(content)
|
|
|
- # if logs :
|
|
|
- # [logger.write(dict(_row,**{"parse":SYS_ARGS['parse']})) for _row in logs]
|
|
|
- # else:
|
|
|
- # logger.write({"parse":SYS_ARGS['parse'],"name":filename,"completed":True,"rows":len(content)})
|
|
|
- # except Exception as e:
|
|
|
- # logger.write({"parse":SYS_ARGS['parse'],"filename":filename,"completed":False,"rows":-1,"msg":e.args[0]})
|
|
|
- # # print ([filename,len(content)])
|
|
|
- # #
|
|
|
- # # @TODO: forward this data to the writer and log engine
|
|
|
- # #
|
|
|
-
|
|
|
+
|
|
|
|
|
|
|
|
|
pass
|
|
@@ -337,103 +335,71 @@ if __name__ == '__main__' :
|
|
|
|
|
|
# PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
|
|
|
|
|
- e = analytics.engine(os.sep.join([PATH,'config.json'])) #--@TODO: make the configuration file globally accessible
|
|
|
- e.apply(type='claims',serialize=True)
|
|
|
- SYS_ARGS['engine'] = e
|
|
|
+ if os.path.exists(os.sep.join([PATH,'config.json'])) :
|
|
|
+ e = analytics.engine(os.sep.join([PATH,'config.json'])) #--@TODO: make the configuration file globally accessible
|
|
|
+ e.apply(type='claims',serialize=True)
|
|
|
+ SYS_ARGS['engine'] = e
|
|
|
+ SYS_ARGS['config'] = json.loads(open(os.sep.join([PATH,'config.json'])).read())
|
|
|
+ else:
|
|
|
+ SYS_ARGS['config'] = {"owner":None,"store":None}
|
|
|
+
|
|
|
+ if 'args' not in SYS_ARGS['config'] :
|
|
|
+ SYS_ARGS['config']["args"] = {"batch":1,"resume":True,"folder":"/data"}
|
|
|
|
|
|
+ me = pd.DataFrame(smart.top.read(name='healthcare-io.py')).args.unique().tolist()
|
|
|
+ SYS_ARGS['me'] = me[0] #-- This key will identify the current process
|
|
|
+
|
|
|
pointer = lambda : server.app.run(host='0.0.0.0',port=PORT,debug=DEBUG,threaded=False)
|
|
|
pthread = Process(target=pointer,args=())
|
|
|
pthread.start()
|
|
|
+ elif 'check-update' in SYS_ARGS :
|
|
|
+ _args = {"url":SYS_ARGS['url']}
|
|
|
+ try:
|
|
|
+ if os.path.exists(os.sep.join([PATH,'config.json'])) :
|
|
|
+ SYS_ARGS['config'] = json.loads((open(os.sep.join([PATH,'config.json']))).read())
|
|
|
+ else:
|
|
|
+ SYS_ARGS['config'] = {}
|
|
|
+ if 'version' in SYS_ARGS['config'] :
|
|
|
+ _args['version'] = SYS_ARGS['config']['version']
|
|
|
+ version = check(**_args)
|
|
|
+ _version = {"current":0.0}if 'version' not in SYS_ARGS['config'] else SYS_ARGS['config']['version']
|
|
|
+ if _version['current'] != version['current'] :
|
|
|
+ print ()
|
|
|
+ print ("You need to upgrade your system to version to ",version['current'])
|
|
|
+ print ("\t- signup (for new configuration)")
|
|
|
+ print ("\t- use pip to upgrade the codebase")
|
|
|
+ else:
|
|
|
+ print ()
|
|
|
+ print ("You are running the current configuraiton version ",_version['current'])
|
|
|
+ except Exception as e:
|
|
|
+ print (e)
|
|
|
+ pass
|
|
|
|
|
|
elif 'export' in SYS_ARGS:
|
|
|
#
|
|
|
# this function is designed to export the data to csv
|
|
|
#
|
|
|
- format = SYS_ARGS['format'] if 'format' in SYS_ARGS else 'csv'
|
|
|
- format = format.lower()
|
|
|
- if set([format]) not in ['xls','csv'] :
|
|
|
- format = 'csv'
|
|
|
-
|
|
|
- else:
|
|
|
- msg = """
|
|
|
- cli:
|
|
|
-
|
|
|
- healthcare-io.py --<[signup|init]> <email> --store <sqlite|mongo> [--batch <value>]
|
|
|
- healthcare-io.py --parse claims --folder <path> [--batch <value>]
|
|
|
- healthcare-io.py --parse remits --folder <path> [--batch <value>] [--resume]
|
|
|
-
|
|
|
- parameters :
|
|
|
- --<[signup|init]> signup or get a configuration file from a parsing server
|
|
|
- --store data store mongo or sqlite or mongodb
|
|
|
- --resume will attempt to resume if there was an interruption
|
|
|
- """
|
|
|
- print(msg)
|
|
|
- pass
|
|
|
- # """
|
|
|
- # The program was called from the command line thus we are expecting
|
|
|
- # parse in [claims,remits]
|
|
|
- # config os.sep.path.exists(path)
|
|
|
- # folder os.sep.path.exists(path)
|
|
|
- # store store ()
|
|
|
- # """
|
|
|
- # p = len( set(['store','config','folder']) & set(SYS_ARGS.keys())) == 3 and ('db' in SYS_ARGS or 'path' in SYS_ARGS)
|
|
|
- # TYPE = {
|
|
|
- # 'mongo':'mongo.MongoWriter',
|
|
|
- # 'couch':'couch.CouchWriter',
|
|
|
- # 'disk':'disk.DiskWriter'
|
|
|
- # }
|
|
|
- # INFO = {
|
|
|
- # '837':{'scope':'claims','section':'HL'},
|
|
|
- # '835':{'scope':'remits','section':'CLP'}
|
|
|
- # }
|
|
|
- # if p :
|
|
|
- # args = {}
|
|
|
- # scope = SYS_ARGS['config'][:-5].split(os.sep)[-1]
|
|
|
- # CONTEXT = INFO[scope]['scope']
|
|
|
- # #
|
|
|
- # # @NOTE:
|
|
|
- # # improve how database and data stores are handled.
|
|
|
- # if SYS_ARGS['store'] == 'couch' :
|
|
|
- # args = {'url': SYS_ARGS['url'] if 'url' in SYS_ARGS else 'http://localhost:5984'}
|
|
|
- # args['dbname'] = SYS_ARGS['db']
|
|
|
+ path = SYS_ARGS['config']
|
|
|
+ TYPE = SYS_ARGS['export'] if 'export' in SYS_ARGS else '835'
|
|
|
+ if not os.path.exists(path) or TYPE not in ['835','837']:
|
|
|
+ print (HELP_MESSAGE)
|
|
|
+ else:
|
|
|
+ #
|
|
|
+ # Let's run the export function ..., This will push files into a data-store of choice Redshift, PostgreSQL, MySQL ...
|
|
|
+ #
|
|
|
+ _store = {"type":"sql.SQLWriter","args":json.loads( (open(path) ).read())}
|
|
|
|
|
|
- # elif SYS_ARGS ['store'] == 'mongo':
|
|
|
- # args = {'host':SYS_ARGS['host']if 'host' in SYS_ARGS else 'localhost:27017'}
|
|
|
- # if SYS_ARGS['store'] in ['mongo','couch']:
|
|
|
- # args['dbname'] = SYS_ARGS['db'] if 'db' in SYS_ARGS else 'claims_outcomes'
|
|
|
- # args['doc'] = CONTEXT
|
|
|
+ pipes = export.Factory.instance(type=TYPE,write_store=_store) #"inspect":0,"cast":0}})
|
|
|
+ # pipes[0].run()
|
|
|
+ for thread in pipes:
|
|
|
+ thread.start()
|
|
|
+ time.sleep(1)
|
|
|
+ while pipes :
|
|
|
+ pipes = [thread for thread in pipes if thread.is_alive()]
|
|
|
+ time.sleep(1)
|
|
|
+
|
|
|
|
|
|
- # TYPE = TYPE[SYS_ARGS['store']]
|
|
|
- # writer = factory.instance(type=TYPE,args=args)
|
|
|
- # if SYS_ARGS['store'] == 'disk':
|
|
|
- # writer.init(path = 'output-claims.json')
|
|
|
- # logger = factory.instance(type=TYPE,args= dict(args,**{"doc":"logs"}))
|
|
|
- # files = os.listdir(SYS_ARGS['folder'])
|
|
|
- # CONFIG = json.loads(open(SYS_ARGS['config']).read())
|
|
|
- # SECTION = INFO[scope]['section']
|
|
|
|
|
|
- # for file in files :
|
|
|
- # if 'limit' in SYS_ARGS and files.index(file) == int(SYS_ARGS['limit']) :
|
|
|
- # break
|
|
|
- # else:
|
|
|
- # filename = os.sep.join([SYS_ARGS['folder'],file])
|
|
|
-
|
|
|
- # try:
|
|
|
- # content,logs = get_content(filename,CONFIG,SECTION)
|
|
|
- # except Exception as e:
|
|
|
- # if sys.version_info[0] > 2 :
|
|
|
- # logs = [{"filename":filename,"msg":e.args[0]}]
|
|
|
- # else:
|
|
|
- # logs = [{"filename":filename,"msg":e.message}]
|
|
|
- # content = None
|
|
|
- # if content :
|
|
|
-
|
|
|
- # writer.write(content)
|
|
|
- # if logs:
|
|
|
-
|
|
|
- # logger.write(logs)
|
|
|
-
|
|
|
-
|
|
|
- # pass
|
|
|
- # else:
|
|
|
- # print (__doc__)
|
|
|
+ else:
|
|
|
+
|
|
|
+ print(HELP_MESSAGE)
|