diff --git a/run_workflow_phaeoexplorer.py b/run_workflow_phaeoexplorer.py index 661ea13ca3b60fd6a14b9082b355b16f6141b025..9ac3f23dccabeb70a2c73fc333d94c9678c186e8 100755 --- a/run_workflow_phaeoexplorer.py +++ b/run_workflow_phaeoexplorer.py @@ -987,12 +987,13 @@ if __name__ == "__main__": args = parser.parse_args() + bioblend_logger = logging.getLogger("bioblend") if args.verbose: logging.basicConfig(level=logging.DEBUG) + bioblend_logger.setLevel(logging.DEBUG) else: logging.basicConfig(level=logging.INFO) - logging.getLogger("urllib3").setLevel(logging.INFO) - logging.getLogger("bioblend").setLevel(logging.INFO) + bioblend_logger.setLevel(logging.INFO) # Parsing the config file if provided, using the default config otherwise if not args.config: @@ -1000,6 +1001,12 @@ if __name__ == "__main__": else: args.config = os.path.abspath(args.config) + if args.config: + config_file = os.path.abspath(args.config) + else: + config_file = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), constants.DEFAULT_CONFIG) + config = utilities.parse_config(config_file) + if not args.main_directory: args.main_directory = os.getcwd() else: @@ -1190,7 +1197,7 @@ if __name__ == "__main__": logging.warning("Error finding workflow %s" % workflow_name) # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it - # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) + instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url)) @@ -1426,7 +1433,7 @@ if __name__ == "__main__": logging.warning("Error finding workflow %s" % workflow_name) # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it - # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) + instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url)) @@ -1541,7 +1548,7 @@ if __name__ == "__main__": logging.warning("Error finding workflow %s" % workflow_name) # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it - # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) + instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url)) @@ -1695,7 +1702,7 @@ if __name__ == "__main__": logging.warning("Error finding workflow %s" % workflow_name) # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it - # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) + instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True) logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url))