|
@@ -19,13 +19,13 @@ class BasicParser (Process) :
|
|
|
super().__init__()
|
|
|
self._plugins = _args['plugins']
|
|
|
self._parents = _args['parents']
|
|
|
- self._files = _args['files']
|
|
|
+ self._files = _args['files']
|
|
|
self._store = _args['store']
|
|
|
|
|
|
def apply(self,**_args):
|
|
|
- _content = _args['content']
|
|
|
- _filetype = _args['x12']
|
|
|
- _doc = _args['document'] #{}
|
|
|
+ _content = _args['content']
|
|
|
+ _filetype = _args['x12']
|
|
|
+ _doc = _args['document'] #{}
|
|
|
_documentHandler = x12.util.document.Builder(plugins = self._plugins,parents=self._parents)
|
|
|
try:
|
|
|
|
|
@@ -116,30 +116,30 @@ class X12Parser(BasicParser):
|
|
|
_writer.close()
|
|
|
|
|
|
|
|
|
-def instance (**_args):
|
|
|
- """
|
|
|
- :path
|
|
|
- """
|
|
|
- # _files = x12.util.Files.get(_args['file'])
|
|
|
+# def instance (**_args):
|
|
|
+# """
|
|
|
+# :path
|
|
|
+# """
|
|
|
+# # _files = x12.util.Files.get(_args['file'])
|
|
|
|
|
|
- # #
|
|
|
- # # We can split these files (multi-processing)
|
|
|
- # #
|
|
|
- # _jobCount = 1 if 'jobs' not in _args else int (_args['jobs'])
|
|
|
- # _files = np.array_split(_files,_jobCount)
|
|
|
- # PATH = os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
|
|
- # if 'config' in _args :
|
|
|
- # PATH = _args['config']
|
|
|
- # f = open(PATH)
|
|
|
- # _config = json.loads(f.read())
|
|
|
- # f.close()
|
|
|
- # jobs = []
|
|
|
- # for _batch in _files :
|
|
|
- # pthread = Parser(files=_batch,config=_config)
|
|
|
- # pthread.start()
|
|
|
- # jobs.append(pthread)
|
|
|
- # time.sleep(1)
|
|
|
- pass
|
|
|
+# # #
|
|
|
+# # # We can split these files (multi-processing)
|
|
|
+# # #
|
|
|
+# # _jobCount = 1 if 'jobs' not in _args else int (_args['jobs'])
|
|
|
+# # _files = np.array_split(_files,_jobCount)
|
|
|
+# # PATH = os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
|
|
+# # if 'config' in _args :
|
|
|
+# # PATH = _args['config']
|
|
|
+# # f = open(PATH)
|
|
|
+# # _config = json.loads(f.read())
|
|
|
+# # f.close()
|
|
|
+# # jobs = []
|
|
|
+# # for _batch in _files :
|
|
|
+# # pthread = Parser(files=_batch,config=_config)
|
|
|
+# # pthread.start()
|
|
|
+# # jobs.append(pthread)
|
|
|
+# # time.sleep(1)
|
|
|
+# pass
|
|
|
|
|
|
|
|
|
# class parser (Process) :
|