diff --git a/gga_init.py b/gga_init.py
index 5a7b47d41b45a39e46e39a405eff7ee19ed6ec1f..8c5e507cb31b8328e3b69b213b5862a7811c2bcf 100644
--- a/gga_init.py
+++ b/gga_init.py
@@ -11,7 +11,7 @@ import yaml
 import shutil
 
 from pathlib import Path
-from jinja2 import Template
+from jinja2 import Template, Environment, FileSystemLoader
 
 import utilities
 import speciesData
@@ -55,16 +55,15 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             sys.exit()
 
         # Copy the custom banner to the species dir (banner used in tripal pages)
-        if not self.config["custom_banner"] or not self.config["custom_banner"] == "/path/to/banner" or not self.config["custom_banner"] == "":
-            try:
-                logging.debug("Custom banner path: %s" % self.config["custom_banner"])
-                if os.path.isfile(os.path.abspath(self.config["custom_banner"])):
-                    shutil.copy(os.path.abspath(self.config["custom_banner"]), "%s/banner.png" % self.species_dir)
-
-            except FileNotFoundError:
-                logging.warning("Specified banner not found (%s), skipping" % self.config["custom_banner"])
-
-        # Copy nginx conf
+        # if not "custom_banner" not in self.config.keys() or not self.config["custom_banner"] == "/path/to/banner" or not self.config["custom_banner"] == "":
+        #     try:
+        #         logging.debug("Custom banner path: %s" % self.config["custom_banner"])
+        #         if os.path.isfile(os.path.abspath(self.config["custom_banner"])):
+        #             shutil.copy(os.path.abspath(self.config["custom_banner"]), "%s/banner.png" % self.species_dir)
+        #     except FileNotFoundError:
+        #         logging.warning("Specified banner not found (%s), skipping" % self.config["custom_banner"])
+
+        # Write nginx conf
         try:
             os.mkdir("./nginx/")
             os.mkdir("./nginx/conf")
@@ -140,7 +139,7 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             sys.exit(0)
 
         # Path to the templates used to generate the custom docker-compose files for an input species
-        stack_template_path = self.script_dir + "/templates/gspecies_compose_template.yml"
+        gspecies_template_path = self.script_dir + "/templates/gspecies_compose_template.yml.j2"
         traefik_template_path = self.script_dir + "/templates/traefik_compose_template.yml"
         # authelia_config_path = self.script_dir + "/templates/authelia_config_example.yml"  # Do not copy the authelia config!
         authelia_users_path = self.script_dir + "/templates/authelia_users_template.yml"
@@ -154,31 +153,49 @@ class DeploySpeciesStack(speciesData.SpeciesData):
         else:
             genus_species_strain_sex = "{0}_{1}".format(self.genus.lower(), self.species)
 
-        with open(stack_template_path, 'r') as infile:
-            organism_content = list()
-            for line in infile:
-                # Replace placeholders in the compose file and append line to output
-                organism_content.append(
-                    line.replace("genus_species",
-                                 str(self.genus.lower() + "_" + self.species)).replace("Genus species",
-                                 str(self.genus_uppercase + " " + self.species)).replace("Genus/species",
-                                 str(self.genus_uppercase + "/" + self.species)).replace("gspecies",
-                                 str(self.genus.lower()[0] + self.species)).replace("genus_species_strain_sex",
-                                 genus_species_strain_sex))
-            # Write/format the output compose file
-            with open("./docker-compose.yml", 'w') as outfile:
-                outfile.truncate(0)  # Delete file content
-                for line in organism_content:  # Replace env variables by those in the config file
-                    for env_variable, value in self.config.items():  # env variables are stored in this dict
-                        # print("ENV VARIABLE: " + env_variable + "\t VALUE: " + value)
-                        if env_variable in line:
-                            line = line.replace(env_variable, value)
-                            break
-                    # Write the new line in the docker-compose
-                    outfile.write(line)
-
-            # Create mounts for the current docker-compose
-            self.create_mounts(working_dir=self.species_dir)
+        # with open(stack_template_path, 'r') as infile:
+        #     organism_content = list()
+        #     for line in infile:
+        #         # Replace placeholders in the compose file and append line to output
+        #         organism_content.append(
+        #             line.replace("genus_species",
+        #                          str(self.genus.lower() + "_" + self.species)).replace("Genus species",
+        #                          str(self.genus_uppercase + " " + self.species)).replace("Genus/species",
+        #                          str(self.genus_uppercase + "/" + self.species)).replace("gspecies",
+        #                          str(self.genus.lower()[0] + self.species)).replace("genus_species_strain_sex",
+        #                          genus_species_strain_sex))
+        #     # Write/format the output compose file
+        #     with open("./docker-compose.yml", 'w') as outfile:
+        #         outfile.truncate(0)  # Delete file content
+        #         for line in organism_content:  # Replace env variables by those in the config file
+        #             for env_variable, value in self.config.items():  # env variables are stored in this dict
+        #                 # print("ENV VARIABLE: " + env_variable + "\t VALUE: " + value)
+        #                 if env_variable in line:
+        #                     line = line.replace(env_variable, value)
+        #                     break
+        #             # Write the new line in the docker-compose
+        #             outfile.write(line)
+
+        #     # Create mounts for the current docker-compose
+        #     self.create_mounts(working_dir=self.species_dir)
+
+
+        # jinja templating, handled using the python jinja module (TODO: use ansible to handle the templating in production)
+        file_loader = FileSystemLoader(self.script_dir + "/templates")
+        env = Environment(loader=file_loader)
+        template = env.get_template("gspecies_compose_template.yml.j2")
+
+        # We need a dict holding all key (variables) - values that needs to be replaced in the template as our rendering dict
+        # To do so we need both input file vars and config vars
+        # Create input file vars dict
+        input_vars = {"genus": self.genus_lowercase, "Genus": self.genus_uppercase, "species": self.species,
+                      "genus_species": self.genus_species, "genus_species_strain_sex": self.full_name,
+                      "strain": self.strain, "sex": self.sex, "Genus_species": self.genus_species[0].upper() + self.genus_species[1:]}
+        # Merge the two dicts
+        render_vars = {**self.config, **input_vars}
+        output = template.render(render_vars)
+
+        print(output)
 
         try:
             os.chdir(os.path.abspath(self.main_dir))
@@ -381,7 +398,7 @@ if __name__ == "__main__":
 
         # Deploy the stack
         logging.info("Deploying stack for %s..." % deploy_stack_for_current_organism.full_name)
-        deploy_stack_for_current_organism.deploy_stack()
+        # deploy_stack_for_current_organism.deploy_stack()
         logging.info("Successfully deployed stack for %s" % deploy_stack_for_current_organism.full_name)
 
         logging.info("Stack deployed for %s" % deploy_stack_for_current_organism.full_name)
diff --git a/gga_load_data.py b/gga_load_data.py
index 25b8a65bdcf545ed3c8c67dac96f6f9bfb880cd8..fc214b1ee550d42121175119de0cb35798cd9129 100644
--- a/gga_load_data.py
+++ b/gga_load_data.py
@@ -207,73 +207,6 @@ class LoadData(speciesData.SpeciesData):
         return self.history_id
 
 
-    # def import_datasets_into_history(self):
-    #     """
-    #     Find datasets in a library, get their ID and import thme into the current history if they are not already
-
-    #     :return:
-    #     """
-
-    #     # Instanciate the instance 
-    #     gio = GalaxyInstance(url=self.instance_url,
-    #                          email=self.config["custom_galaxy_default_admin_email"],
-    #                          password=self.config["custom_galaxy_default_admin_password"])
-
-    #     prj_lib = gio.libraries.get_previews(name="Project Data")
-    #     self.library_id = prj_lib[0].id
-
-    #     instance_source_data_folders = self.instance.libraries.get_folders(library_id=str(self.library_id))
-
-    #     folders_ids = {}
-    #     current_folder_name = ""
-    #     # Loop over the folders in the library and map folders names to their IDs
-    #     for i in instance_source_data_folders:
-    #         for k, v in i.items():
-    #             if k == "name":
-    #                 folders_ids[v] = 0
-    #                 current_folder_name = v
-    #             if k == "id":
-    #                 folders_ids[current_folder_name] = v
-
-    #     # Iterating over the folders to find datasets and map datasets to their IDs
-    #     logging.info("Datasets IDs: ")
-    #     for k, v in folders_ids.items():
-    #         if k == "/genome":
-    #             sub_folder_content = self.instance.folders.show_folder(folder_id=v, contents=True)
-    #             final_sub_folder_content = self.instance.folders.show_folder(folder_id=sub_folder_content["folder_contents"][0]["id"], contents=True)
-    #             for k2, v2 in final_sub_folder_content.items():
-    #                 for e in v2:
-    #                     if type(e) == dict:
-    #                         if e["name"].endswith(".fa"):
-    #                             self.datasets["genome_file"] = e["ldda_id"]
-    #                             logging.info("\t" + e["name"] + ": " + e["ldda_id"])
-    #         if k == "/annotation":
-    #             sub_folder_content = self.instance.folders.show_folder(folder_id=v, contents=True)
-    #             final_sub_folder_content = self.instance.folders.show_folder(folder_id=sub_folder_content["folder_contents"][0]["id"], contents=True)
-    #             for k2, v2 in final_sub_folder_content.items():
-    #                 for e in v2:
-    #                     if type(e) == dict:
-    #                         # TODO: manage versions? (differentiate between the correct folders using self.config)
-    #                         if "transcripts" in e["name"]:
-    #                             self.datasets["transcripts_file"] = e["ldda_id"]
-    #                             logging.info("\t" + e["name"] + ": " + e["ldda_id"])
-    #                         elif "proteins" in e["name"]:
-    #                             self.datasets["proteins_file"] = e["ldda_id"]
-    #                             logging.info("\t" + e["name"] + ": " + e["ldda_id"])
-    #                         elif "gff" in e["name"]:
-    #                             self.datasets["gff_file"] = e["ldda_id"]
-    #                             logging.info("\t" + e["name"] + ": " + e["ldda_id"])
-
-    #     logging.info("Uploading datasets into history %s" % self.history_id)
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["genome_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["gff_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["transcripts_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["proteins_file"])
-
-    #     return {"history_id": self.history_id, "library_id": self.library_id, "datasets": self.datasets}
-
-
-
     def remove_homo_sapiens_from_db(self):
         """
         Run the GMOD tool to remove the "Homo sapiens" default organism from the original database
@@ -303,80 +236,6 @@ class LoadData(speciesData.SpeciesData):
             pass
 
 
-    # def prepare_history(self):
-    #     """
-    #     Galaxy instance startup in preparation for importing datasets and running a workflow
-    #     - Remove Homo sapiens from the chado database.
-    #     - Add organism and analyses into the chado database --> separate
-    #     - Get any other existing organisms IDs before updating the galaxy instance --> separate
-
-    #     Calling this function is mandatory to have a working galaxy instance history
-    #     TODO Run a check to see if the instance is correctly set up
-
-    #     :return:
-    #     """
-
-    #     self.connect_to_instance()
-    #     self.set_get_history()
-
-    #     # Delete Homo sapiens from Chado database
-    #     logging.debug("Getting 'Homo sapiens' ID in instance's chado database")
-    #     get_sapiens_id_job = self.instance.tools.run_tool(
-    #         tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_organism_get_organisms/organism_get_organisms/2.3.2",
-    #         history_id=self.history_id,
-    #         tool_inputs={"genus": "Homo", "species": "sapiens"})
-    #     get_sapiens_id_job_output = get_sapiens_id_job["outputs"][0]["id"]
-    #     get_sapiens_id_json_output = self.instance.datasets.download_dataset(dataset_id=get_sapiens_id_job_output)
-    #     try:
-    #         logging.debug("Deleting Homo 'sapiens' in the instance's chado database")
-    #         get_sapiens_id_final_output = json.loads(get_sapiens_id_json_output)[0]
-    #         sapiens_id = str(
-    #             get_sapiens_id_final_output["organism_id"])  # needs to be str to be recognized by the chado tool
-    #         self.instance.tools.run_tool(
-    #             tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_organism_delete_organisms/organism_delete_organisms/2.3.2",
-    #             history_id=self.history_id,
-    #             tool_inputs={"organism": str(sapiens_id)})
-    #     except bioblend.ConnectionError:
-    #         logging.debug("Homo sapiens isn't in the instance's chado database")
-    #     except IndexError:
-    #         logging.debug("Homo sapiens isn't in the instance's chado database")
-    #         pass
-
-    #     # Add organism (species) to chado
-    #     logging.info("Adding organism to the instance's chado database")
-    #     self.instance.tools.run_tool(
-    #         tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_organism_add_organism/organism_add_organism/2.3.2",
-    #         history_id=self.history_id,
-    #         tool_inputs={"abbr": self.abbreviation,
-    #                      "genus": self.genus,
-    #                      "species": self.species,
-    #                      "common": self.common})
-
-    #     # Add OGS analysis to chado
-    #     logging.info("Adding OGS analysis to the instance's chado database")
-    #     self.instance.tools.run_tool(
-    #         tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_analysis_add_analysis/analysis_add_analysis/2.3.2",
-    #         history_id=self.history_id,
-    #         tool_inputs={"name": self.genus + " " + self.species + " OGS" + self.ogs_version,
-    #                      "program": "Performed by Genoscope",
-    #                      "programversion": str("OGS" + self.ogs_version),
-    #                      "sourcename": "Genoscope",
-    #                      "date_executed": self.date})
-
-    #     # Add genome analysis to chado
-    #     logging.info("Adding genome analysis to the instance's chado database")
-    #     self.instance.tools.run_tool(
-    #         tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_analysis_add_analysis/analysis_add_analysis/2.3.2",
-    #         history_id=self.history_id,
-    #         tool_inputs={"name": self.genus + " " + self.species + " genome v" + self.genome_version,
-    #                      "program": "Performed by Genoscope",
-    #                      "programversion": str("genome v" + self.genome_version),
-    #                      "sourcename": "Genoscope",
-    #                      "date_executed": self.date})
-    #     self.get_organism_and_analyses_ids()
-    #     logging.info("Finished initializing instance")
-
-
     def purge_histories(self):
         """
         Delete all histories in the instance
@@ -720,3 +579,4 @@ if __name__ == "__main__":
 
         else:
             logging.critical("The galaxy container for %s is not ready yet!" % load_data_for_current_species.full_name)
+            sys.exit()
diff --git a/run_workflow_phaeoexplorer.py b/run_workflow_phaeoexplorer.py
index c28a769faf002ccc477385e501bba7e92b389391..f3ca06e9558c126743a73673867d26c6e93ac61c 100644
--- a/run_workflow_phaeoexplorer.py
+++ b/run_workflow_phaeoexplorer.py
@@ -18,7 +18,7 @@ from bioblend import galaxy
 """ 
 gga_init.py
 
-Usage: $ python3 gga_init.py -i input_example.yml [OPTIONS]
+Usage: $ python3 gga_init.py -i input_example.yml --config [config file] [OPTIONS]
 """
 
 
@@ -34,20 +34,6 @@ class RunWorkflow(speciesData.SpeciesData):
     """
 
 
-    # def get_species_history_id(self):
-    #     """
-    #     Set and return the current species history id in its galaxy instance
-
-    #     :return:
-    #     """
-
-    #     histories = self.instance.histories.get_histories(name=str(self.full_name))
-    #     self.history_id = histories[0]["id"]
-    #     self.instance.histories.show_history(history_id=self.history_id)
-
-    #     return self.history_id
-
-
     def set_get_history(self):
         """
         Create or set the working history to the current species one
@@ -125,17 +111,6 @@ class RunWorkflow(speciesData.SpeciesData):
         return {"history_id": self.history_id, "library_id": self.library_id, "datasets": self.datasets}
 
 
-    # def import_datasets_to_history(self):
-    #     """
-    #     Load the datasets into the current species history
-    #     OBSOLETE
-    #     """
-    #     logging.info("Uploading datasets into history %s" % self.history_id)
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["genome_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["gff_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["transcripts_file"])
-    #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["proteins_file"])
-
     def connect_to_instance(self):
         """
         Test the connection to the galaxy instance for the current organism
@@ -143,18 +118,18 @@ class RunWorkflow(speciesData.SpeciesData):
 
         """
         self.instance = galaxy.GalaxyInstance(url=self.instance_url,
-                                              email=self.config["custom_galaxy_default_admin_email"],
-                                              password=self.config["custom_galaxy_default_admin_password"]
+                                              email=self.config["galaxy_default_admin_email"],
+                                              password=self.config["galaxy_default_admin_password"]
                                               )
 
-        logging.info("Connecting to the galaxy instance...")
+        logging.info("Connecting to the galaxy instance %s" % self.instance_url)
         try:
             self.instance.histories.get_histories()
         except bioblend.ConnectionError:
-            logging.critical("Cannot connect to galaxy instance @ " + self.instance_url)
+            logging.critical("Cannot connect to galaxy instance %s" % self.instance_url)
             sys.exit()
         else:
-            logging.info("Successfully connected to galaxy instance @ " + self.instance_url)
+            logging.info("Successfully connected to galaxy instance %s" % self.instance_url)
 
 
     def prepare_history(self):
@@ -240,22 +215,20 @@ class RunWorkflow(speciesData.SpeciesData):
             self.instance.workflows.import_workflow_dict(workflow_dict=workflow_dict)
             workflow_attributes = self.instance.workflows.get_workflows(name=workflow_name)
             workflow_id = workflow_attributes[0]["id"]
-            logging.debug("Workflow ID: %s" % workflow_id)
             show_workflow = self.instance.workflows.show_workflow(workflow_id=workflow_id)
             try:
-                logging.info("Workflow ID: %s" % workflow_id)
+                logging.debug("Workflow ID: %s" % workflow_id)
             except Exception:
-                logging.warning("Error retrieving workflow attributes")
-            logging.debug("Workflow ID: " + workflow_id)
+                logging.warning("Error retrieving workflow attributes for workflow %s" % workflow_name)
 
-            logging.info("Running workflow: %s" % workflow_name)
             self.instance.workflows.invoke_workflow(workflow_id=workflow_id,
                                                     history_id=self.history_id,
                                                     params=workflow_parameters,
                                                     inputs=datamap,
-                                                    inputs_by="")
+                                                    inputs_by="",
+                                                    allow_tool_state_corrections=True)
 
-            # self.instance.workflows.delete_workflow(workflow_id=workflow_id) # TODO : Keep for prod? (add a "if test" condition)
+            logging.info("Successfully imported and invoked workflow {0}, check your galaxy instance ({1}) for the jobs state".format(workflow_name, self.instance_url))
 
     def import_datasets_into_history(self):
         """
@@ -266,8 +239,8 @@ class RunWorkflow(speciesData.SpeciesData):
 
         # Instanciate the instance 
         gio = GalaxyInstance(url=self.instance_url,
-                             email=self.config["custom_galaxy_default_admin_email"],
-                             password=self.config["custom_galaxy_default_admin_password"])
+                             email=self.config["galaxy_default_admin_email"],
+                             password=self.config["galaxy_default_admin_password"])
 
         prj_lib = gio.libraries.get_previews(name="Project Data")
         self.library_id = prj_lib[0].id
@@ -464,83 +437,133 @@ if __name__ == "__main__":
             run_workflow_for_current_organism.config = utilities.parse_config(args.config)
             # Set the instance url attribute
             for env_variable, value in run_workflow_for_current_organism.config.items():
-                if env_variable == "custom_host":
-                    run_workflow_for_current_organism.instance_url = "http://{0}:8888/sp/{1}_{2}/galaxy/".format(
-                        value, run_workflow_for_current_organism.genus_lowercase, run_workflow_for_current_organism.species)
+                if env_variable == "hostname":
+                    run_workflow_for_current_organism.instance_url = "http://{0}:8888/sp/{1}/galaxy/".format(
+                        value, run_workflow_for_current_organism.genus_species)
                     break
                 else:
                     run_workflow_for_current_organism.instance_url = "http://localhost:8888/sp/{0}_{1}/galaxy/".format(
                         run_workflow_for_current_organism.genus_lowercase,
                         run_workflow_for_current_organism.species)
 
-            run_workflow_for_current_organism.connect_to_instance()
-            run_workflow_for_current_organism.set_get_history()
-            # run_workflow_for_current_organism.get_species_history_id()
+            # TODO: Create distinct methods to call different pre-set workflows using CL arguments/config options (i.e load-chado, jbrowse, functional-annotation, orthology, ...)
+
+            # If input workflow is Chado_load_Tripal_synchronize.ga
+            if "Chado_load_Tripal_synchronize" in str(workflow):
+
+                logging.info("Executing workflow 'Chado_load_Tripal_synchronize'")
+
+                run_workflow_for_current_organism.connect_to_instance()
+                run_workflow_for_current_organism.set_get_history()
+                # run_workflow_for_current_organism.get_species_history_id()
+
+                # Prepare the instance+history for the current organism (add organism and analyses in Chado) TODO: add argument "setup"
+                # (although it should pose no problem as the "Chado add" refuses to duplicate an analysis/organism anyway)
+                run_workflow_for_current_organism.prepare_history()
+
+                # Get the attributes of the instance and project data files
+                run_workflow_for_current_organism.get_instance_attributes()
+                run_workflow_for_current_organism.get_organism_and_analyses_ids()
+
+                # Import datasets into history
+                # TODO: it seems it is not required anymore since using "ldda" option for datasets in the workflow datamap doesn't need files from history
+                run_workflow_for_current_organism.import_datasets_into_history()
+
+                # Explicit workflow parameter names
+                GENOME_FASTA_FILE = "0"
+                GFF_FILE = "1"
+                PROTEINS_FASTA_FILE = "2"
+                TRANSCRIPTS_FASTA_FILE = "3"
+
+                LOAD_FASTA_IN_CHADO = "4"
+                LOAD_GFF_IN_CHADO = "5"
+                SYNC_ORGANISM_INTO_TRIPAL = "6"
+                SYNC_GENOME_ANALYSIS_INTO_TRIPAL = "7"
+                SYNC_OGS_ANALYSIS_INTO_TRIPAL = "8"
+                SYNC_FEATURES_INTO_TRIPAL = "9"
+
+                workflow_parameters = {}
+
+                workflow_parameters[GENOME_FASTA_FILE] = {}
+                workflow_parameters[GFF_FILE] = {}
+                workflow_parameters[PROTEINS_FASTA_FILE] = {}
+                workflow_parameters[TRANSCRIPTS_FASTA_FILE] = {}
+                workflow_parameters[LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
+                                                            "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
+                                                            "do_update": "true"}
+                # Change "do_update": "true" to "do_update": "false" in above parameters to prevent appending/updates to the fasta file in chado
+                # WARNING: It is safer to never update it and just change the genome/ogs versions in the config
+                workflow_parameters[LOAD_GFF_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
+                                                          "analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
+                workflow_parameters[SYNC_ORGANISM_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
+                workflow_parameters[SYNC_GENOME_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
+                workflow_parameters[SYNC_OGS_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.genome_analysis_id}
+                workflow_parameters[SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
+
+                # Datamap for input datasets - dataset source (type): ldda (LibraryDatasetDatasetAssociation)
+                run_workflow_for_current_organism.datamap = {}
+                run_workflow_for_current_organism.datamap[GENOME_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
+                run_workflow_for_current_organism.datamap[GFF_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
+                run_workflow_for_current_organism.datamap[PROTEINS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
+                run_workflow_for_current_organism.datamap[TRANSCRIPTS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
+
+                run_workflow_for_current_organism.datamap = {}
+                run_workflow_for_current_organism.datamap[GENOME_FASTA_FILE] = {"src": "ldda", "id":
+                    run_workflow_for_current_organism.datasets["genome_file"]}
+                run_workflow_for_current_organism.datamap[GFF_FILE] = {"src": "ldda",
+                                                                       "id": run_workflow_for_current_organism.datasets[
+                                                                           "gff_file"]}
+
+                # Run the Chado load Tripal sync workflow with the parameters set above
+                run_workflow_for_current_organism.run_workflow(workflow_path=workflow,
+                                                               workflow_parameters=workflow_parameters,
+                                                               datamap=run_workflow_for_current_organism.datamap,
+                                                               workflow_name="Chado load Tripal synchronize")
 
-            # Prepare the instance+history for the current organism (add organism and analyses in Chado) TODO: add argument "setup"
-            # (althought it should pose no problem as the "Chado add" refuses to duplicate an analysis/organism anyway)
-            run_workflow_for_current_organism.prepare_history()
 
-            # Get the attributes of the instance and project data files
-            run_workflow_for_current_organism.get_instance_attributes()
-            run_workflow_for_current_organism.get_organism_and_analyses_ids()
+            # Jbrowse creation workflow
+            elif "Jbrowse" in str(workflow):
 
-            # Import datasets into history
-            # TODO: put this only for the chado load tripal sync workflow?
-            # TODO: it seems it is not required anymore since using "ldda" option for datasets in the workflow datamap doesn't need the files
-            run_workflow_for_current_organism.import_datasets_into_history()
+                logging.info("Executing workflow 'Jbrowse'")
 
-            # Explicit workflow parameter names
-            # TODO: Create distinct methods to call different pre-set workflows using CL arguments/config options (i.e load-chado, jbrowse, functional-annotation, orthology, ...)
+                run_workflow_for_current_organism.connect_to_instance()
+                run_workflow_for_current_organism.set_get_history()
+                run_workflow_for_current_organism.get_instance_attributes()
+                run_workflow_for_current_organism.get_organism_and_analyses_ids()
+                run_workflow_for_current_organism.import_datasets_into_history()
 
-            # Chado load and Tripal synchronize workflow
-            workflow_parameters = {}
-
-            GENOME_FASTA_FILE = "0"
-            GFF_FILE = "1"
-            PROTEINS_FASTA_FILE = "2"
-            TRANSCRIPTS_FASTA_FILE = "3"
-
-
-            LOAD_FASTA_IN_CHADO = "4"
-            LOAD_GFF_IN_CHADO = "5"
-            SYNC_ORGANISM_INTO_TRIPAL = "6"
-            SYNC_GENOME_ANALYSIS_INTO_TRIPAL = "7"
-            SYNC_OGS_ANALYSIS_INTO_TRIPAL = "8"
-            SYNC_FEATURES_INTO_TRIPAL = "9"
-
-            workflow_parameters[GENOME_FASTA_FILE] = {}
-            workflow_parameters[GFF_FILE] = {}
-            workflow_parameters[PROTEINS_FASTA_FILE] = {}
-            workflow_parameters[TRANSCRIPTS_FASTA_FILE] = {}
-            workflow_parameters[LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-                                                        "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
-                                                        "do_update": "true"}
-            # Change "do_update": "true" to "do_update": "false" in above parameters to prevent appending/updates to the fasta file in chado
-            # WARNING: It is safer to never update it and just change the genome/ogs versions in the config
-            workflow_parameters[LOAD_GFF_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-                                                      "analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
-            workflow_parameters[SYNC_ORGANISM_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-            workflow_parameters[SYNC_GENOME_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
-            workflow_parameters[SYNC_OGS_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.genome_analysis_id}
-            workflow_parameters[SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-
-            # Datamap for input datasets - dataset source (type): ldda (LibraryDatasetDatasetAssociation)
-            run_workflow_for_current_organism.datamap = {}
-            run_workflow_for_current_organism.datamap[GENOME_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
-            run_workflow_for_current_organism.datamap[GFF_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
-            run_workflow_for_current_organism.datamap[PROTEINS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
-            run_workflow_for_current_organism.datamap[TRANSCRIPTS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
-
-            # Run the Chado load Tripal sync workflow with the parameters set above
-            run_workflow_for_current_organism.run_workflow(workflow_path=workflow,
-                                                           workflow_parameters=workflow_parameters,
-                                                           datamap=run_workflow_for_current_organism.datamap,
-                                                           workflow_name="Chado load Tripal synchronize")
+                GENOME_FASTA_FILE = "0"
+                GFF_FILE = "1"
+                ADD_JBROWSE = "2"
+                ADD_ORGANISM_TO_JBROWSE = "3"
+
+                workflow_parameters = {}
+
+                workflow_parameters[GENOME_FASTA_FILE] = {}
+                workflow_parameters[GFF_FILE] = {}
+                # Jbrowse custom feature url
+                workflow_parameters[ADD_JBROWSE] = {"jb_menu": {"menu_url": "http://{hostname}:{port}/sp/{genus_sp}/feature/{Genus}/{species}/{id}".format(hostname=run_workflow_for_current_organism.config["hostname"],
+                                                                                                                                                           port=run_workflow_for_current_organism.config["http_port"],
+                                                                                                                                                           genus_sp=run_workflow_for_current_organism.genus_species,
+                                                                                                                                                           Genus=run_workflow_for_current_organism.genus_uppercase,
+                                                                                                                                                           species=run_workflow_for_current_organism.species,
+                                                                                                                                                           id="id")}}
+                # Organism to add to the Jbrowse "container" (consists of a name and an id, not tied to the galaxy instance or chado/tripal names and ids)
+                workflow_parameters[ADD_ORGANISM_TO_JBROWSE] = {"name": [{"name": run_workflow_for_current_organism.full_name,
+                                                               "unique_id": run_workflow_for_current_organism.abbreviation}]}
+
+                run_workflow_for_current_organism.datamap = {}
+                run_workflow_for_current_organism.datamap[GENOME_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
+                run_workflow_for_current_organism.datamap[GFF_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
+
+                # Run the jbrowse creation workflow
+                run_workflow_for_current_organism.run_workflow(workflow_path=workflow,
+                                                               workflow_parameters=workflow_parameters,
+                                                               datamap=run_workflow_for_current_organism.datamap,
+                                                               workflow_name="Chado load Tripal synchronize")
 
-            # Jbrowse creation workflow
 
-            workflow_parameters = {}
 
         else:
             logging.critical("The galaxy container for %s is not ready yet!" % run_workflow_for_current_organism.full_name)
+            sys.exit()
diff --git a/speciesData.py b/speciesData.py
index 1e9305b39251dba1a3855205765d5df6bc367244..8e48cf5cd4d2ec1db19662445ae31cdb3ec6c6b6 100644
--- a/speciesData.py
+++ b/speciesData.py
@@ -40,7 +40,7 @@ class SpeciesData:
         self.full_name_lowercase = self.full_name.lower()
         self.abbreviation = "_".join(utilities.filter_empty_not_empty_items([self.genus_lowercase[0], self.species, self.strain, self.sex])["not_empty"])
         self.genus_species = self.genus_lowercase + "_" + self.species
-        self.instance_url = "http://scratchgmodv1:8888/sp/" + self.genus_lowercase + "_" + self.species + "/galaxy/" # Testing with scratchgmodv1, is overwritten in the script by the provided hostname anyway
+        self.instance_url = "http://scratchgmodv1:8888/sp/" + self.genus_lowercase + "_" + self.species + "/galaxy/"  # Testing with scratchgmodv1, is overwritten in the script by the provided hostname
         self.instance = None
         self.history_id = None
         self.library = None
diff --git a/templates/gspecies_compose_template.yml b/templates/gspecies_compose_template.yml
index 1fe698f9624e117e0cdf0769826ae5d232e9ac59..da6d64550b74eca0ca9198476360cef43a2fffd5 100644
--- a/templates/gspecies_compose_template.yml
+++ b/templates/gspecies_compose_template.yml
@@ -114,7 +114,7 @@ services:
         image: quay.io/galaxy-genome-annotation/docker-galaxy-annotation:gmod
         volumes:
             - ../galaxy_data_libs_SI.py:/opt/galaxy_data_libs_SI.py
-            - ./docker_data/galaxy/:/export/
+            #- ./docker_data/galaxy/:/export/  # Commented for testing TODO: Uncomment in production
             - ./src_data/:/project_data/:ro
             #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
             - ./docker_data/jbrowse/:/jbrowse/data/
diff --git a/templates/gspecies_compose_template.yml.j2 b/templates/gspecies_compose_template.yml.j2
new file mode 100644
index 0000000000000000000000000000000000000000..1111add80afdc332826633c975e60c25b79be0a7
--- /dev/null
+++ b/templates/gspecies_compose_template.yml.j2
@@ -0,0 +1,303 @@
+# ./docker_data is created and filled with persistent data that should be backuped
+
+version: '3.7'
+services:
+    proxy:
+        image: quay.io/abretaud/nginx-ldap:latest
+        volumes:
+            - ./src_data/:/project_data/
+            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
+            - ./nginx/conf:/etc/nginx/conf.d
+        networks:
+            - traefikbig
+            - {{ genus_species }}
+        deploy:
+          labels:
+            # Download page
+            - "traefik.http.routers.{{ genus_species }}-nginx.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}/download`))"
+#            - "traefik.http.routers.{{ genus_species }}-nginx.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-nginx.entryPoints=webs"
+            - "traefik.http.routers.{{ genus_species }}-nginx.entryPoints=web" # lg
+#            - "traefik.http.routers.{{ genus_species }}-nginx.middlewares=sp-auth,sp-app-trailslash,sp-prefix"
+            - "traefik.http.routers.{{ genus_species }}-nginx.middlewares=sp-app-trailslash,sp-prefix" # lg
+            - "traefik.http.services.{{ genus_species }}-nginx.loadbalancer.server.port=80"
+          restart_policy:
+            condition: on-failure
+            delay: 5s
+            max_attempts: 3
+            window: 120s
+
+    tripal:
+        image: quay.io/galaxy-genome-annotation/tripal:v2.x
+        depends_on:
+            - tripal-db
+            - elasticsearch
+        volumes:
+            - ./docker_data/galaxy/:/export/:ro
+            - ./src_data/:/project_data/:ro
+            - ./src_data:/data:ro
+            - ./banner.png:/var/www/html/banner.png:ro
+            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
+        environment:
+            DB_HOST: tripal-db.{{ genus_species }}
+            BASE_URL_PATH: /sp/{{ genus_species }}
+            UPLOAD_LIMIT: 20M
+            MEMORY_LIMIT: 512M
+            TRIPAL_GIT_CLONE_MODULES: "https://github.com/abretaud/tripal_rest_api.git[@c6f9021ea5d4c6d7c67c5bd363a7dd9359228bbc] https://github.com/tripal/tripal_elasticsearch.git[@dc7f276046e394a80a7dfc9404cf1a149006eb2a] https://github.com/tripal/tripal_analysis_interpro.git https://github.com/tripal/tripal_analysis_go.git https://github.com/tripal/tripal_analysis_blast.git  https://github.com/tripal/tripal_analysis_expression.git[@7240039fdeb4579afd06bbcb989cb7795bd4c342]"
+            TRIPAL_DOWNLOAD_MODULES: ""
+            TRIPAL_ENABLE_MODULES: "tripal_analysis_blast tripal_analysis_interpro tripal_analysis_go tripal_rest_api tripal_elasticsearch"
+            SITE_NAME: "{{ Genus_species }}"
+            ELASTICSEARCH_HOST: elasticsearch.{{ genus_species }}
+            ENABLE_JBROWSE: /jbrowse/?data=data/{{ genus_species_sex }}
+            ENABLE_APOLLO: 0
+            ENABLE_BLAST: 1
+            ENABLE_DOWNLOAD: 1
+            ENABLE_WIKI: 0
+            ENABLE_GO: 0
+            ENABLE_ORTHOLOGY: 0
+            ENABLE_ORTHOLOGY_LINKS: 0
+            THEME: "{{ tripal_theme_name }}"  # Use this to use another theme
+            THEME_GIT_CLONE: "{{ tripal_theme_git_clone }}"  # Use this to install another theme
+            ADMIN_PASSWORD: {{ tripal_password }}  # You need to define it and update it in galaxy config below
+        networks:
+            - traefikbig
+            - {{ genus_species }}
+        deploy:
+          labels:
+            - "traefik.http.routers.{{ genus_species }}-tripal.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}`))"
+#            - "traefik.http.routers.{{ genus_species }}-tripal.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-tripal.entryPoints=webs"
+            - "traefik.http.routers.{{ genus_species }}-tripal.entryPoints=web" # lg
+#            - "traefik.http.routers.{{ genus_species }}-tripal.middlewares=sp-auth,sp-trailslash,sp-prefix,tripal-addprefix"
+            - "traefik.http.routers.{{ genus_species }}-tripal.middlewares=sp-trailslash,sp-prefix,tripal-addprefix" # lg
+            - "traefik.http.services.{{ genus_species }}-tripal.loadbalancer.server.port=80"
+          restart_policy:
+            condition: on-failure
+            delay: 5s
+            max_attempts: 3
+            window: 120s
+
+    tripal-db:
+        image: quay.io/galaxy-genome-annotation/chado:1.31-jenkins26-pg9.5
+        environment:
+            - POSTGRES_PASSWORD=postgres
+            # The default chado image would try to install the schema on first run,
+            # we just want the tools to be available.
+            - INSTALL_CHADO_SCHEMA=0
+        volumes:
+            - ./docker_data/tripal_db/:/var/lib/postgresql/data/
+        networks:
+            - {{ genus_species }}
+
+    elasticsearch:
+        image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1
+        #deploy:
+          #resources:
+            #limits:
+              #memory: 500M
+        volumes:
+            - ./docker_data/elastic_search_index/:/usr/share/elasticsearch/data/
+        environment:
+            bootstrap.memory_lock: "true"
+            xpack.security.enabled: "false"
+            xpack.monitoring.enabled: "false"
+            xpack.ml.enabled: "false"
+            xpack.graph.enabled: "false"
+            xpack.watcher.enabled: "false"
+            cluster.routing.allocation.disk.threshold_enabled: "false"
+            ES_JAVA_OPTS: "-Xms500m -Xmx500m"
+            TAKE_FILE_OWNERSHIP: "true"
+        networks:
+            - {{ genus_species }}
+
+    galaxy:
+        image: quay.io/galaxy-genome-annotation/docker-galaxy-annotation:gmod
+        volumes:
+            - ../galaxy_data_libs_SI.py:/opt/galaxy_data_libs_SI.py
+            #- ./docker_data/galaxy/:/export/  # Commented for testing TODO: Uncomment in production
+            - ./src_data/:/project_data/:ro
+            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
+            - ./docker_data/jbrowse/:/jbrowse/data/
+            - ./docker_data/apollo/:/apollo-data/
+            - ../galaxy_nginx.conf:/etc/nginx/uwsgi_params
+        environment:
+            NONUSE: nodejs,proftp,reports,condor
+            GALAXY_LOGGING: full
+            GALAXY_CONFIG_BRAND: "Genus species"
+            GALAXY_CONFIG_ALLOW_LIBRARY_PATH_PASTE: "True"
+            GALAXY_CONFIG_USE_REMOTE_USER: "True"
+            GALAXY_CONFIG_REMOTE_USER_MAILDOMAIN: "sb-roscoff.fr"
+            GALAXY_DEFAULT_ADMIN_EMAIL: "{{ galaxy_default_admin_email }}"
+            GALAXY_DEFAULT_ADMIN_USER: "{{ galaxy_defaut_admin_user }}"
+            GALAXY_DEFAULT_ADMIN_PASSWORD: "{{ galaxy_default_admin_password }}"
+            GALAXY_CONFIG_ADMIN_USERS: "admin@galaxy.org, gga@sb-roscoff.fr"   # admin@galaxy.org is the default (leave it), gogepp@bipaa is a shared ldap user we use to connect
+            GALAXY_CONFIG_MASTER_API_KEY: "{{ galaxy_config_master_api_key }}"
+            ENABLE_FIX_PERMS: 0
+            PROXY_PREFIX: /sp/{{ genus_species }}/galaxy
+            GALAXY_TRIPAL_URL: http://tripal. {{ genus_species }}/tripal/
+            GALAXY_TRIPAL_PASSWORD: {{ tripal_password }}  # See tripal config above
+            GALAXY_WEBAPOLLO_URL: http://one-of-the-swarm-node:8888/apollo/
+            GALAXY_WEBAPOLLO_USER: "{{ webapollo_user }}"
+            GALAXY_WEBAPOLLO_PASSWORD: "{{ webapollo_password }}"  # See tripal config below
+            GALAXY_WEBAPOLLO_EXT_URL: /apollo/
+            GALAXY_CHADO_DBHOST: tripal-db.genus_species
+            GALAXY_CHADO_DBSCHEMA: chado
+            GALAXY_AUTO_UPDATE_DB: 1
+            GALAXY_AUTO_UPDATE_CONDA: 1
+            GALAXY_AUTO_UPDATE_TOOLS: "/galaxy-central/tools_1.yaml"
+            GALAXY_SHARED_DIR: ""
+            BLAT_ENABLED: 1
+        networks:
+            - traefikbig
+            - {{ genus_species }}
+        deploy:
+          labels:
+            - "traefik.http.routers.{{ genus_species }}-galaxy.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}/galaxy`))"
+#            - "traefik.http.routers.{{ genus_species }}-galaxy.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-galaxy.entryPoints=webs"
+            - "traefik.http.routers.{{ genus_species }}-galaxy.entryPoints=web" #lg
+#            - "traefik.http.routers.{{ genus_species }}-galaxy.middlewares=sp-auth,sp-app-trailslash,sp-app-prefix"
+            - "traefik.http.routers.{{ genus_species }}-galaxy.middlewares=sp-app-trailslash,sp-app-prefix" #lg
+            - "traefik.http.services.{{ genus_species }}-galaxy.loadbalancer.server.port=80"
+          restart_policy:
+            condition: on-failure
+            delay: 5s
+            max_attempts: 3
+            window: 120s
+
+    jbrowse:
+        image: quay.io/galaxy-genome-annotation/jbrowse:v1.16.8
+        volumes:
+            - ./docker_data/galaxy/:/export/:ro
+            - ./src_data/:/project_data/:ro
+            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
+            - ./docker_data/jbrowse/:/jbrowse/data/:ro
+        networks:
+            - traefikbig
+            - {{ genus_species }}
+        deploy:
+          labels:
+            - "traefik.http.routers.{{ genus_species }}-jbrowse.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}/jbrowse`))"
+#            - "traefik.http.routers.{{ genus_species }}-jbrowse.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-jbrowse.entryPoints=webs"
+            - "traefik.http.routers.{{ genus_species }}-jbrowse.entryPoints=web" # lg
+#            - "traefik.http.routers.{{ genus_species }}-jbrowse.middlewares=sp-auth,sp-app-trailslash,sp-app-prefix"
+            - "traefik.http.routers.{{ genus_species }}-jbrowse.middlewares=sp-app-trailslash,sp-app-prefix" #lg
+            - "traefik.http.services.{{ genus_species }}-jbrowse.loadbalancer.server.port=80"
+          restart_policy:
+            condition: on-failure
+            delay: 5s
+            max_attempts: 3
+            window: 120s
+
+    blast:
+        image: quay.io/abretaud/sf-blast:latest
+        depends_on:
+            - blast-db
+        environment:
+            DB_HOST: blast-db.{{ genus_species }}
+            UPLOAD_LIMIT: 20M
+            MEMORY_LIMIT: 128M
+            DB_NAME: 'postgres'
+            ADMIN_EMAIL: 'g.ga@sb-roscoff.fr'  # email sender
+            ADMIN_NAME: 'gga'  # email sender name
+            JOBS_METHOD: 'local'   # Can be local (= no sge jobs, but run inside the container) or drmaa (= to submit to a cluster)
+            JOBS_WORK_DIR: '/tmp/blast_jobs/'  # disk accessible both from compute nodes and mounted in this docker (at the same path)
+            CDD_DELTA_PATH: '/db/cdd_delta/current/flat/cdd_delta'
+            BLAST_TITLE: '{{ Genus }} {{ species }} blast server'
+            JOBS_SCHED_NAME: '{{ blast_gspecies }}'    # job names
+            #PRE_CMD: '. /local/env/envblast-2.6.0.sh; . /local/env/envpython-3.7.1.sh;'    # executed at the beginning of each job
+            #APACHE_RUN_USER: 'bipaaweb'   # username known by sge
+            #APACHE_RUN_GROUP: 'bipaa'   # group known by sge
+            BASE_URL_PATH: '/sp/{{ genus_species }}/blast/'
+            UID: 55914  # username known by sge (for drmaa mode only)
+            GID: 40259  # group known by sge (for drmaa mode only)
+            #JOBS_DRMAA_NATIVE: '-p web' # This line and following for slurm
+            #DRMAA_METHOD: 'slurm' # This line and following for slurm
+        volumes:
+            - ../blast-themes/abims/:/var/www/blast/app/Resources/:ro # You can theme the app
+            - /usr/local/genome2/:/usr/local/genome2/:ro # path for blast executables
+            - /db/:/db/:ro # for access to indexed blast databases
+            #- /data1/sge/:/usr/local/sge/:ro # an sge install
+            #- /xxxx/blast_jobs/:/xxxx/blast_jobs/ # (for drmaa mode only)
+            - ./blast/banks.yml:/var/www/blast/app/config/banks.yml:ro
+            - ./blast/links.yml:/etc/blast_links/links.yml:ro
+            #- /data1/slurm/slurm.conf:/etc/slurm-llnl/slurm.conf:ro # This line and following for slurm
+            #- /data1/slurm/gres.conf:/etc/slurm-llnl/gres.conf:ro
+            #- /data1/slurm/cgroup.conf:/etc/slurm-llnl/cgroup.conf:ro
+            #- /data1/slurm/slurmdbd.conf:/etc/slurm-llnl/slurmdbd.conf:ro
+            #- /data1/slurm/drmaa/:/etc/slurm-llnl/drmaa/:ro
+            #- /etc/munge/:/etc/munge/:ro
+        networks:
+            - traefikbig
+            - {{ genus_species }}
+        deploy:
+          labels:
+            - "traefik.http.routers.{{ genus_species }}-blast.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}/blast`))"
+#            - "traefik.http.routers.{{ genus_species }}-blast.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-blast.entryPoints=webs"
+            - "traefik.http.routers.{{ genus_species }}-blast.entryPoints=web" # lg
+#            - "traefik.http.routers.{{ genus_species }}-blast.middlewares=sp-big-req,sp-auth,sp-app-trailslash,sp-app-prefix"
+            - "traefik.http.routers.{{ genus_species }}-blast.middlewares=sp-big-req,sp-app-trailslash,sp-app-prefix" # lg
+            - "traefik.http.services.{{ genus_species }}-blast.loadbalancer.server.port=80"
+          restart_policy:
+            condition: on-failure
+            delay: 5s
+            max_attempts: 3
+            window: 120s
+
+    blast-db:
+#        image: postgres:9.6-alpine
+        image: postgres:9.5
+        environment:
+            - POSTGRES_PASSWORD=postgres
+            - PGDATA=/var/lib/postgresql/data/
+        volumes:
+            - ./docker_data/blast_db/:/var/lib/postgresql/data/
+        networks:
+            - {{ genus_species }}
+
+#    wiki:
+#        image: quay.io/abretaud/mediawiki
+#        environment:
+#            MEDIAWIKI_SERVER: http://localhost
+#            MEDIAWIKI_PROXY_PREFIX: /sp/{{ genus_species }}/wiki
+#            MEDIAWIKI_SITENAME: {{ Genus }} {{ species }}
+#            MEDIAWIKI_SECRET_KEY: XXXXXXXXXX
+#            MEDIAWIKI_DB_HOST: wiki-db.{{genus_species }}
+#            MEDIAWIKI_DB_PASSWORD: password
+#            MEDIAWIKI_ADMIN_USER: abretaud   # ldap user
+#        depends_on:
+#            - wiki-db
+#        volumes:
+#            - ./docker_data/wiki_uploads:/images
+#            #- ../bipaa_wiki.png:/var/www/mediawiki/resources/assets/wiki.png:ro # To change the logo at the top left
+#        networks:
+#            - traefikbig
+#            - {{ genus_species }}
+#        deploy:
+#          labels:
+#            - "traefik.http.routers.{{ genus_species }}-blast.rule=(Host(`{{ hostname }}`) && PathPrefix(`/sp/{{ genus_species }}/blast`))"
+#            - "traefik.http.routers.{{ genus_species }}-blast.tls=true"
+#            - "traefik.http.routers.{{ genus_species }}-blast.entryPoints=webs"
+#            - "traefik.http.routers.{{ genus_species }}-blast.middlewares=sp-big-req,sp-auth,sp-app-trailslash,sp-app-prefix"
+#            - "traefik.http.services.{{ genus_species }}-blast.loadbalancer.server.port=80"
+#          restart_policy:
+#            condition: on-failure
+#            delay: 5s
+#            max_attempts: 3
+#            window: 120s
+
+#    wiki-db:
+#        image: postgres:9.6-alpine
+#        volumes:
+#            - ./docker_data/wiki_db/:/var/lib/postgresql/data/
+#        networks:
+#            - {{ genus_species }}
+
+networks:
+    traefikbig:
+        external: true
+    {{ genus_species }}:
+        driver: overlay
+        name: {{ genus_species }}
diff --git a/templates/traefik_compose_template.yml.j2 b/templates/traefik_compose_template.yml.j2
new file mode 100644
index 0000000000000000000000000000000000000000..92aac2eef2e712259354f6ac58c03b3a3e088509
--- /dev/null
+++ b/templates/traefik_compose_template.yml.j2
@@ -0,0 +1,120 @@
+version: '3.7'
+services:
+  traefik:
+    image: traefik:2.1.6
+    command:
+      - "--api"
+      - "--api.dashboard"
+#      - "--api.insecure=true" # added by lg to debug, for dashboard
+      - "--log.level=DEBUG"
+      - "--providers.docker"
+      - "--providers.docker.swarmMode=true"
+      - "--providers.docker.network=traefikbig" # changed by lg from traefik to traefikbig
+      - "--entryPoints.web.address=:80"
+      - "--entryPoints.web.forwardedHeaders.trustedIPs={{ proxy_ip }}"  # The ips of our upstream proxies: eci
+      - "--entryPoints.webs.address=:443"
+      - "--entryPoints.webs.forwardedHeaders.trustedIPs={{ proxy_ip }}"  # The ips of our upstream proxies: eci
+    ports:
+      - {{ dashboard_port }}:8080 # added by lg to debug, for dashboard
+      - {{ http_port }}:80
+      - {{ https_port }}:443
+    networks:
+      - traefikbig
+    volumes:
+      - /var/run/docker.sock:/var/run/docker.sock
+    deploy:
+      placement:
+        constraints:
+          - node.role == manager
+      labels:
+#        - "traefik.http.routers.traefik-api.rule=PathPrefix(`/traefik`)"
+        - "traefik.http.routers.traefik-api.rule=PathPrefix(`/api`) || PathPrefix(`/dashboard`) || PathPrefix(`/traefik`)" # lg
+#        - "traefik.http.routers.traefik-api.tls=true"
+        - "traefik.http.routers.traefik-api.entryPoints=web" # lg
+#        - "traefik.http.routers.traefik-api.entryPoints=webs"
+        - "traefik.http.routers.traefik-api.service=api@internal"
+        - "traefik.http.middlewares.traefik-strip.stripprefix.prefixes=/traefik"
+        - "traefik.http.middlewares.traefik-auth.forwardauth.address=http://authelia:9091/api/verify?rd=https://auth.abims-gga.sb-roscoff.fr/"
+        - "traefik.http.middlewares.traefik-auth.forwardauth.trustForwardHeader=true"
+#        - "traefik.http.routers.traefik-api.middlewares=traefik-auth,traefik-strip"
+        - "traefik.http.routers.traefik-api.middlewares=traefik-strip" # lg
+        # Dummy service for Swarm port detection. The port can be any valid integer value.
+        - "traefik.http.services.traefik-svc.loadbalancer.server.port=9999"
+        # Some generally useful middlewares for organisms hosting
+        - "traefik.http.middlewares.sp-auth.forwardauth.address=http://authelia:9091/api/verify?rd=https://auth.abims-gga.sb-roscoff.fr/"
+        - "traefik.http.middlewares.sp-auth.forwardauth.trustForwardHeader=true"
+        - "traefik.http.middlewares.sp-auth.forwardauth.authResponseHeaders=Remote-User,Remote-Groups"
+#        - "traefik.http.middlewares.sp-trailslash.redirectregex.regex=^(https?://[^/]+/sp/[^/]+)$$"
+        - "traefik.http.middlewares.sp-trailslash.redirectregex.regex=^(http?://[^/]+/sp/[^/]+)$$" # lg
+        - "traefik.http.middlewares.sp-trailslash.redirectregex.replacement=$${1}/"
+        - "traefik.http.middlewares.sp-trailslash.redirectregex.permanent=true"
+#        - "traefik.http.middlewares.sp-app-trailslash.redirectregex.regex=^(https?://[^/]+/sp/[^/]+/[^/]+)$$"
+        - "traefik.http.middlewares.sp-app-trailslash.redirectregex.regex=^(http?://[^/]+/sp/[^/]+/[^/]+)$$" # lg
+        - "traefik.http.middlewares.sp-app-trailslash.redirectregex.replacement=$${1}/"
+        - "traefik.http.middlewares.sp-app-trailslash.redirectregex.permanent=true"
+        - "traefik.http.middlewares.sp-prefix.stripprefixregex.regex=/sp/[^/]+"
+        - "traefik.http.middlewares.sp-app-prefix.stripprefixregex.regex=/sp/[^/]+/[^/]+"
+        - "traefik.http.middlewares.tripal-addprefix.addprefix.prefix=/tripal"
+        - "traefik.http.middlewares.sp-big-req.buffering.maxRequestBodyBytes=50000000"
+        - "traefik.http.middlewares.sp-huge-req.buffering.maxRequestBodyBytes=2000000000"
+      restart_policy:
+        condition: on-failure
+        delay: 5s
+        max_attempts: 3
+        window: 120s
+
+  authelia:
+    image: authelia/authelia:4.12.0
+    networks:
+      - traefikbig
+    depends_on:
+        - authelia-redis
+        - authelia-db
+    volumes:
+      - ./authelia/:/etc/authelia/:ro
+    deploy:
+      labels:
+        - "traefik.http.routers.authelia.rule=Host(`auth.example.org`)"
+        - "traefik.http.services.authelia.loadbalancer.server.port=9091"
+      restart_policy:
+        condition: on-failure
+        delay: 5s
+        max_attempts: 3
+        window: 120s
+
+  authelia-redis:
+    image: redis:5.0.7-alpine
+    command: ["redis-server", "--appendonly", "yes"]
+    volumes:
+      - ./authelia-redis/:/data/
+    networks:
+      - traefikbig
+    deploy:
+      restart_policy:
+        condition: on-failure
+        delay: 5s
+        max_attempts: 3
+        window: 120s
+
+  authelia-db:
+      image: postgres:12.2-alpine
+      environment:
+          POSTGRES_PASSWORD: z3A,hQ-9
+      volumes:
+          - ./docker_data/authelia_db/:/var/lib/postgresql/data/
+      networks:
+          - traefikbig
+      deploy:
+        restart_policy:
+          condition: on-failure
+          delay: 5s
+          max_attempts: 3
+          window: 120s
+
+networks:
+  traefikbig:
+    driver: overlay
+    name: traefikbig
+    ipam:
+      config:
+        - subnet: 10.50.0.0/16
diff --git a/utilities.py b/utilities.py
index 92ffce8c3a44fe33c369409d7d288abfe42901f4..143c2119927ff2e62702e757aa54fe93c83b88ac 100644
--- a/utilities.py
+++ b/utilities.py
@@ -46,20 +46,15 @@ def parse_input(input_file):
 
     parsed_sp_dict_list = []
 
-    if str(input_file).endswith("yml") or str(input_file).endswith("yaml"):
-        logging.debug("Input format used: YAML")
-    else:
-        logging.critical("Error, please input a YAML file")
-        sys.exit()
     try:
         with open(input_file, 'r') as stream:
             try:
                 yaml_dict = yaml.safe_load(stream)
                 for k, v in yaml_dict.items():
                     parsed_sp_dict_list.append(v)
-            except yaml.YAMLError:
-                logging.critical("YAML input file might be incorrect")
-                sys.exit()
+            except yaml.YAMLError as err:
+                logging.critical("Input file is not in YAML format")
+                sys.exit(err)
     except FileNotFoundError:
         logging.critical("The specified input file doesn't exist (%s)" % input_file)
         sys.exit()
diff --git a/workflows/Chado_load_Tripal_synchronize.ga b/workflows/Chado_load_Tripal_synchronize.ga
index c8ce7a4549828092413ddc6a4c0e2ca033ea44f8..f9e114cfcb57e16f11db2ab897c7dde56c1d536f 100644
--- a/workflows/Chado_load_Tripal_synchronize.ga
+++ b/workflows/Chado_load_Tripal_synchronize.ga
@@ -16,7 +16,7 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 227
+                "top": 662.7333374023438
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -27,7 +27,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "08f77326-1435-4fad-a610-e56f06a0ced5"
+                    "uuid": "df303ec8-a34f-47ff-8e23-f6fd4f7a4a25"
                 }
             ]
         },
@@ -43,7 +43,7 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 317
+                "top": 707.7333374023438
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -54,7 +54,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "e8f16746-c91f-4147-b460-f1052650e699"
+                    "uuid": "cae20d90-587c-4b3b-b3a4-5aedb8dde3bf"
                 }
             ]
         },
@@ -70,7 +70,7 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 407
+                "top": 752.7333374023438
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -81,7 +81,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "98ade546-95a4-477c-bd19-38d278849b98"
+                    "uuid": "79cc48a0-de9b-45d1-8372-7dabded47796"
                 }
             ]
         },
@@ -97,7 +97,7 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 497
+                "top": 797.7333374023438
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -108,7 +108,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "41417aad-64e5-4480-9c36-842d0dda1c6d"
+                    "uuid": "00f82694-ec6e-471c-90ab-66311651c023"
                 }
             ]
         },
@@ -146,8 +146,8 @@
                 }
             ],
             "position": {
-                "left": 486,
-                "top": 227
+                "left": 343,
+                "top": 662.7333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.3",
@@ -171,9 +171,62 @@
         },
         "5": {
             "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.3",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.10+galaxy0",
             "errors": null,
             "id": 5,
+            "input_connections": {
+                "reference_genome|genome": {
+                    "id": 0,
+                    "output_name": "output"
+                },
+                "track_groups_0|data_tracks_0|data_format|annotation": {
+                    "id": 1,
+                    "output_name": "output"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool JBrowse",
+                    "name": "reference_genome"
+                }
+            ],
+            "label": null,
+            "name": "JBrowse",
+            "outputs": [
+                {
+                    "name": "output",
+                    "type": "html"
+                }
+            ],
+            "position": {
+                "left": 343,
+                "top": 748.7333374023438
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.10+galaxy0",
+            "tool_shed_repository": {
+                "changeset_revision": "8774b28235bb",
+                "name": "jbrowse",
+                "owner": "iuc",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"action\": {\"action_select\": \"create\", \"__current_case__\": 0}, \"gencode\": \"1\", \"jbgen\": {\"defaultLocation\": \"\", \"trackPadding\": \"20\", \"shareLink\": \"true\", \"aboutDescription\": \"\", \"show_tracklist\": \"true\", \"show_nav\": \"true\", \"show_overview\": \"true\", \"show_menu\": \"true\", \"hideGenomeOptions\": \"false\"}, \"plugins\": {\"BlastView\": \"true\", \"ComboTrackSelector\": \"false\", \"GCContent\": \"false\"}, \"reference_genome\": {\"genome_type_select\": \"history\", \"__current_case__\": 1, \"genome\": {\"__class__\": \"RuntimeValue\"}}, \"standalone\": \"minimal\", \"track_groups\": [{\"__index__\": 0, \"category\": \"Default\", \"data_tracks\": [{\"__index__\": 0, \"data_format\": {\"data_format_select\": \"gene_calls\", \"__current_case__\": 2, \"annotation\": {\"__class__\": \"RuntimeValue\"}, \"match_part\": {\"match_part_select\": \"false\", \"__current_case__\": 1}, \"index\": \"false\", \"track_config\": {\"track_class\": \"NeatHTMLFeatures/View/Track/NeatFeatures\", \"__current_case__\": 3, \"html_options\": {\"topLevelFeatures\": \"\"}}, \"jbstyle\": {\"style_classname\": \"feature\", \"style_label\": \"product,name,id\", \"style_description\": \"note,description\", \"style_height\": \"10px\", \"max_height\": \"600\"}, \"jbcolor_scale\": {\"color_score\": {\"color_score_select\": \"none\", \"__current_case__\": 0, \"color\": {\"color_select\": \"automatic\", \"__current_case__\": 0}}}, \"jb_custom_config\": {\"option\": []}, \"jbmenu\": {\"track_menu\": [{\"__index__\": 0, \"menu_action\": \"iframeDialog\", \"menu_label\": \"View transcript report\", \"menu_title\": \"Transcript {id}\", \"menu_url\": {\"__class__\": \"RuntimeValue\"}, \"menu_icon\": \"dijitIconBookmark\"}]}, \"track_visibility\": \"default_off\", \"override_apollo_plugins\": \"False\", \"override_apollo_drag\": \"False\"}}]}], \"uglyTestingHack\": \"\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "1.16.10+galaxy0",
+            "type": "tool",
+            "uuid": "00657cb2-12f9-4f93-98da-04feac9e1388",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "6cbfa232-911e-49b9-96ad-fa9ed236f806"
+                }
+            ]
+        },
+        "6": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.3",
+            "errors": null,
+            "id": 6,
             "input_connections": {
                 "fasta": {
                     "id": 2,
@@ -193,21 +246,9 @@
                     "description": "runtime parameter for tool Chado load gff",
                     "name": "analysis_id"
                 },
-                {
-                    "description": "runtime parameter for tool Chado load gff",
-                    "name": "fasta"
-                },
-                {
-                    "description": "runtime parameter for tool Chado load gff",
-                    "name": "gff"
-                },
                 {
                     "description": "runtime parameter for tool Chado load gff",
                     "name": "organism"
-                },
-                {
-                    "description": "runtime parameter for tool Chado load gff",
-                    "name": "wait_for"
                 }
             ],
             "label": null,
@@ -219,8 +260,8 @@
                 }
             ],
             "position": {
-                "left": 772,
-                "top": 227
+                "left": 486,
+                "top": 743.2333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.3",
@@ -230,7 +271,7 @@
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"add_only\": \"false\", \"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"gff\": {\"__class__\": \"RuntimeValue\"}, \"landmark_type\": \"contig\", \"no_seq_compute\": \"false\", \"organism\": {\"__class__\": \"RuntimeValue\"}, \"prot_naming\": {\"method\": \"regex\", \"__current_case__\": 1, \"re_protein_capture\": \"^mRNA(\\\\..+)$\", \"re_protein\": \"protein\\\\1\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"add_only\": \"false\", \"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"fasta\": {\"__class__\": \"ConnectedValue\"}, \"gff\": {\"__class__\": \"ConnectedValue\"}, \"landmark_type\": \"contig\", \"no_seq_compute\": \"false\", \"organism\": {\"__class__\": \"RuntimeValue\"}, \"prot_naming\": {\"method\": \"regex\", \"__current_case__\": 1, \"re_protein_capture\": \"^mRNA(\\\\..+)$\", \"re_protein\": \"protein\\\\1\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "2.3.3",
             "type": "tool",
             "uuid": "b100a055-0dab-4f2f-8c46-573713ed3fff",
@@ -242,14 +283,58 @@
                 }
             ]
         },
-        "6": {
+        "7": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1",
+            "errors": null,
+            "id": 7,
+            "input_connections": {
+                "organisms_0|jbrowse": {
+                    "id": 5,
+                    "output_name": "output"
+                }
+            },
+            "inputs": [],
+            "label": null,
+            "name": "Add organisms to JBrowse container",
+            "outputs": [
+                {
+                    "name": "output",
+                    "type": "html"
+                }
+            ],
+            "position": {
+                "left": 486,
+                "top": 662.7333374023438
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1",
+            "tool_shed_repository": {
+                "changeset_revision": "11033bdad2ca",
+                "name": "jbrowse_to_container",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"organisms\": [{\"__index__\": 0, \"jbrowse\": {\"__class__\": \"RuntimeValue\"}, \"name\": {\"__class__\": \"RuntimeValue\"}, \"advanced\": {\"unique_id\": {\"__class__\": \"RuntimeValue\"}}}], \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "0.5.1",
+            "type": "tool",
+            "uuid": "7b7cca87-4000-45de-93a5-bd22cd661d0a",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "8c23d473-4ffa-4a66-b071-aeecc105a529"
+                }
+            ]
+        },
+        "8": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
             "errors": null,
-            "id": 6,
+            "id": 8,
             "input_connections": {
                 "wait_for": {
-                    "id": 5,
+                    "id": 6,
                     "output_name": "results"
                 }
             },
@@ -268,8 +353,8 @@
                 }
             ],
             "position": {
-                "left": 1058,
-                "top": 227
+                "left": 629,
+                "top": 662.7333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
@@ -291,14 +376,14 @@
                 }
             ]
         },
-        "7": {
+        "9": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
             "errors": null,
-            "id": 7,
+            "id": 9,
             "input_connections": {
                 "wait_for": {
-                    "id": 6,
+                    "id": 8,
                     "output_name": "results"
                 }
             },
@@ -317,8 +402,8 @@
                 }
             ],
             "position": {
-                "left": 1344,
-                "top": 227
+                "left": 772,
+                "top": 662.7333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
@@ -340,14 +425,14 @@
                 }
             ]
         },
-        "8": {
+        "10": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
             "errors": null,
-            "id": 8,
+            "id": 10,
             "input_connections": {
                 "wait_for": {
-                    "id": 7,
+                    "id": 9,
                     "output_name": "results"
                 }
             },
@@ -366,8 +451,8 @@
                 }
             ],
             "position": {
-                "left": 1630,
-                "top": 227
+                "left": 915,
+                "top": 662.7333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
@@ -389,14 +474,14 @@
                 }
             ]
         },
-        "9": {
+        "11": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0",
             "errors": null,
-            "id": 9,
+            "id": 11,
             "input_connections": {
                 "wait_for": {
-                    "id": 8,
+                    "id": 10,
                     "output_name": "results"
                 }
             },
@@ -415,8 +500,8 @@
                 }
             ],
             "position": {
-                "left": 1916,
-                "top": 227
+                "left": 1058,
+                "top": 662.7333374023438
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0",
@@ -440,6 +525,6 @@
         }
     },
     "tags": [],
-    "uuid": "00c4d6e1-f470-4b31-9cc1-402aa9b598e2",
-    "version": 1
+    "uuid": "69699b44-94c8-4cc7-977e-74f266e58fdf",
+    "version": 3
 }
\ No newline at end of file
diff --git a/workflows/Chado_load_Tripal_synchronize.ga.bak b/workflows/Chado_load_Tripal_synchronize.ga.bak
index a194b03268c48c4300a3b2a0f6b759cbf0fb6c6b..c8ce7a4549828092413ddc6a4c0e2ca033ea44f8 100644
--- a/workflows/Chado_load_Tripal_synchronize.ga.bak
+++ b/workflows/Chado_load_Tripal_synchronize.ga.bak
@@ -16,14 +16,20 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 200
+                "top": 227
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
             "tool_version": null,
             "type": "data_input",
             "uuid": "89e7487e-004d-4db1-b5eb-1676b98aebde",
-            "workflow_outputs": []
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "08f77326-1435-4fad-a610-e56f06a0ced5"
+                }
+            ]
         },
         "1": {
             "annotation": "",
@@ -37,14 +43,20 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 290
+                "top": 317
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
             "tool_version": null,
             "type": "data_input",
             "uuid": "1d25f54c-7575-4c8d-be55-73dd7e58613f",
-            "workflow_outputs": []
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "e8f16746-c91f-4147-b460-f1052650e699"
+                }
+            ]
         },
         "2": {
             "annotation": "",
@@ -58,14 +70,20 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 380
+                "top": 407
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
             "tool_version": null,
             "type": "data_input",
             "uuid": "6c1a20fa-828a-404c-b107-76fb8ddf3954",
-            "workflow_outputs": []
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "98ade546-95a4-477c-bd19-38d278849b98"
+                }
+            ]
         },
         "3": {
             "annotation": "",
@@ -79,38 +97,44 @@
             "outputs": [],
             "position": {
                 "left": 200,
-                "top": 470
+                "top": 497
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
             "tool_version": null,
             "type": "data_input",
             "uuid": "74f22d9b-e764-45e4-b0eb-579c9b647ea0",
-            "workflow_outputs": []
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "41417aad-64e5-4480-9c36-842d0dda1c6d"
+                }
+            ]
         },
         "4": {
             "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.4+galaxy0",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.3",
             "errors": null,
             "id": 4,
             "input_connections": {
                 "fasta": {
-                    "id": 3,
+                    "id": 0,
                     "output_name": "output"
                 },
                 "wait_for": {
-                    "id": 3,
+                    "id": 0,
                     "output_name": "output"
                 }
             },
             "inputs": [
                 {
                     "description": "runtime parameter for tool Chado load fasta",
-                    "name": "fasta"
+                    "name": "analysis_id"
                 },
                 {
                     "description": "runtime parameter for tool Chado load fasta",
-                    "name": "wait_for"
+                    "name": "organism"
                 }
             ],
             "label": null,
@@ -123,31 +147,31 @@
             ],
             "position": {
                 "left": 486,
-                "top": 200
+                "top": 227
             },
             "post_job_actions": {},
-            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.4+galaxy0",
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.3",
             "tool_shed_repository": {
-                "changeset_revision": "ba4d07fbaf47",
+                "changeset_revision": "a7ab30ded37d",
                 "name": "chado_feature_load_fasta",
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"analysis_id\": \"1\", \"do_update\": \"false\", \"ext_db\": {\"db\": \"\", \"re_db_accession\": \"\"}, \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"match_on_name\": \"false\", \"organism\": \"2\", \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"re_name\": \"\", \"re_uniquename\": \"\", \"relationships\": {\"rel_type\": \"none\", \"__current_case__\": 0}, \"sequence_type\": \"contig\", \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
-            "tool_version": "2.3.4+galaxy0",
+            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"do_update\": \"false\", \"ext_db\": {\"db\": \"\", \"re_db_accession\": \"\"}, \"fasta\": {\"__class__\": \"ConnectedValue\"}, \"match_on_name\": \"false\", \"organism\": {\"__class__\": \"RuntimeValue\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"re_name\": \"\", \"re_uniquename\": \"\", \"relationships\": {\"rel_type\": \"none\", \"__current_case__\": 0}, \"sequence_type\": \"contig\", \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "2.3.3",
             "type": "tool",
-            "uuid": "ed72bf37-aa81-4b25-8ab4-dccb54bc68d9",
+            "uuid": "f7a44182-3620-4a19-9e67-94fe584d4206",
             "workflow_outputs": [
                 {
                     "label": null,
                     "output_name": "results",
-                    "uuid": "c617e0d3-a44c-4fb1-b831-22a487a6be6a"
+                    "uuid": "24ed7255-b4a6-4000-b82b-c18d81822262"
                 }
             ]
         },
         "5": {
             "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.4+galaxy0",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.3",
             "errors": null,
             "id": 5,
             "input_connections": {
@@ -165,6 +189,10 @@
                 }
             },
             "inputs": [
+                {
+                    "description": "runtime parameter for tool Chado load gff",
+                    "name": "analysis_id"
+                },
                 {
                     "description": "runtime parameter for tool Chado load gff",
                     "name": "fasta"
@@ -173,6 +201,10 @@
                     "description": "runtime parameter for tool Chado load gff",
                     "name": "gff"
                 },
+                {
+                    "description": "runtime parameter for tool Chado load gff",
+                    "name": "organism"
+                },
                 {
                     "description": "runtime parameter for tool Chado load gff",
                     "name": "wait_for"
@@ -188,25 +220,25 @@
             ],
             "position": {
                 "left": 772,
-                "top": 200
+                "top": 227
             },
             "post_job_actions": {},
-            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.4+galaxy0",
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.3",
             "tool_shed_repository": {
-                "changeset_revision": "e9a6d7568817",
+                "changeset_revision": "6fdfbf0caa8c",
                 "name": "chado_feature_load_gff",
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"add_only\": \"false\", \"analysis_id\": \"1\", \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"gff\": {\"__class__\": \"RuntimeValue\"}, \"landmark_type\": \"\", \"no_seq_compute\": \"false\", \"organism\": \"2\", \"prot_naming\": {\"method\": \"auto\", \"__current_case__\": 0}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
-            "tool_version": "2.3.4+galaxy0",
+            "tool_state": "{\"add_only\": \"false\", \"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"gff\": {\"__class__\": \"RuntimeValue\"}, \"landmark_type\": \"contig\", \"no_seq_compute\": \"false\", \"organism\": {\"__class__\": \"RuntimeValue\"}, \"prot_naming\": {\"method\": \"regex\", \"__current_case__\": 1, \"re_protein_capture\": \"^mRNA(\\\\..+)$\", \"re_protein\": \"protein\\\\1\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "2.3.3",
             "type": "tool",
-            "uuid": "0b5746d7-952d-4aff-b688-4666c13cab8a",
+            "uuid": "b100a055-0dab-4f2f-8c46-573713ed3fff",
             "workflow_outputs": [
                 {
                     "label": null,
                     "output_name": "results",
-                    "uuid": "5da80c86-c510-425a-b8e1-475ab26436f3"
+                    "uuid": "d0ce6d85-fb8b-4509-9784-a8ded1aeae05"
                 }
             ]
         },
@@ -237,15 +269,9 @@
             ],
             "position": {
                 "left": 1058,
-                "top": 200
-            },
-            "post_job_actions": {
-                "HideDatasetActionresults": {
-                    "action_arguments": {},
-                    "action_type": "HideDatasetAction",
-                    "output_name": "results"
-                }
+                "top": 227
             },
+            "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
             "tool_shed_repository": {
                 "changeset_revision": "afd5d92745fb",
@@ -256,8 +282,14 @@
             "tool_state": "{\"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "3.2.1.0",
             "type": "tool",
-            "uuid": "05314408-41fa-4a2f-8aae-3988e2d899f6",
-            "workflow_outputs": []
+            "uuid": "040183a5-1aba-4bf6-9669-c6e93cfff3ea",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "f1f2dc7a-37e2-4bd3-b997-8f1e3eed11aa"
+                }
+            ]
         },
         "7": {
             "annotation": "",
@@ -286,15 +318,9 @@
             ],
             "position": {
                 "left": 1344,
-                "top": 200
-            },
-            "post_job_actions": {
-                "HideDatasetActionresults": {
-                    "action_arguments": {},
-                    "action_type": "HideDatasetAction",
-                    "output_name": "results"
-                }
+                "top": 227
             },
+            "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
             "tool_shed_repository": {
                 "changeset_revision": "f487ff676088",
@@ -305,8 +331,14 @@
             "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "3.2.1.0",
             "type": "tool",
-            "uuid": "44c7cc7c-0848-47a7-872c-351f057803c1",
-            "workflow_outputs": []
+            "uuid": "6096e174-c85d-4e50-916f-a396d58a909b",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "a309f4e4-3402-4eb5-8b70-1acfa1f26bb4"
+                }
+            ]
         },
         "8": {
             "annotation": "",
@@ -335,15 +367,9 @@
             ],
             "position": {
                 "left": 1630,
-                "top": 200
-            },
-            "post_job_actions": {
-                "HideDatasetActionresults": {
-                    "action_arguments": {},
-                    "action_type": "HideDatasetAction",
-                    "output_name": "results"
-                }
+                "top": 227
             },
+            "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
             "tool_shed_repository": {
                 "changeset_revision": "f487ff676088",
@@ -354,8 +380,14 @@
             "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "3.2.1.0",
             "type": "tool",
-            "uuid": "8ce8c990-39ce-4725-892b-4216a75f487d",
-            "workflow_outputs": []
+            "uuid": "d6072bb6-036d-4fbf-893c-d25f139a05ac",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "ff817e04-d0ce-461f-a97a-843e36a25aca"
+                }
+            ]
         },
         "9": {
             "annotation": "",
@@ -384,15 +416,9 @@
             ],
             "position": {
                 "left": 1916,
-                "top": 200
-            },
-            "post_job_actions": {
-                "HideDatasetActionresults": {
-                    "action_arguments": {},
-                    "action_type": "HideDatasetAction",
-                    "output_name": "results"
-                }
+                "top": 227
             },
+            "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0",
             "tool_shed_repository": {
                 "changeset_revision": "64e36c3f0dd6",
@@ -400,14 +426,20 @@
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"repeat_ids\": [], \"repeat_types\": [{\"__index__\": 0, \"types\": \"mRNA\"}, {\"__index__\": 1, \"types\": \"popylpeptide\"}], \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"repeat_ids\": [], \"repeat_types\": [{\"__index__\": 0, \"types\": \"mRNA\"}, {\"__index__\": 1, \"types\": \"polypeptide\"}], \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "3.2.1.0",
             "type": "tool",
-            "uuid": "04600903-dd16-4db1-b562-552aeb003e6c",
-            "workflow_outputs": []
+            "uuid": "9d39d394-7375-4187-86f3-6dc676c9ca30",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "5047a899-60e2-4acc-a95f-4f60f83c32ae"
+                }
+            ]
         }
     },
     "tags": [],
-    "uuid": "4c66363e-ff14-4c79-8edf-9ab05cafa33d",
-    "version": 2
+    "uuid": "00c4d6e1-f470-4b31-9cc1-402aa9b598e2",
+    "version": 1
 }
\ No newline at end of file
diff --git a/workflows/Jbrowse.ga b/workflows/Jbrowse.ga
index 5c4ac9510ec3f8d69e5b5a638260ddd3e9b34761..cbbc20053e70212bf651a7eda6da2249c4f89cf5 100644
--- a/workflows/Jbrowse.ga
+++ b/workflows/Jbrowse.ga
@@ -27,7 +27,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "6015fbdd-dc71-4fb8-916d-2241f37693c4"
+                    "uuid": "6fddadc5-3420-4747-920f-7816c926f16b"
                 }
             ]
         },
@@ -54,7 +54,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "c0667455-6271-4d1f-9388-75ec65762bb4"
+                    "uuid": "d29ec40e-71fb-4960-94d5-af4666ad1c1d"
                 }
             ]
         },
@@ -99,7 +99,7 @@
                 "owner": "iuc",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"action\": {\"action_select\": \"create\", \"__current_case__\": 0}, \"gencode\": \"1\", \"jbgen\": {\"defaultLocation\": \"\", \"trackPadding\": \"20\", \"shareLink\": \"true\", \"aboutDescription\": \"\", \"show_tracklist\": \"true\", \"show_nav\": \"true\", \"show_overview\": \"true\", \"show_menu\": \"true\", \"hideGenomeOptions\": \"false\"}, \"plugins\": {\"BlastView\": \"true\", \"ComboTrackSelector\": \"false\", \"GCContent\": \"false\"}, \"reference_genome\": {\"genome_type_select\": \"history\", \"__current_case__\": 1, \"genome\": {\"__class__\": \"RuntimeValue\"}}, \"standalone\": \"minimal\", \"track_groups\": [{\"__index__\": 0, \"category\": \"Annotation\", \"data_tracks\": [{\"__index__\": 0, \"data_format\": {\"data_format_select\": \"gene_calls\", \"__current_case__\": 2, \"annotation\": {\"__class__\": \"RuntimeValue\"}, \"match_part\": {\"match_part_select\": \"false\", \"__current_case__\": 1}, \"index\": \"false\", \"track_config\": {\"track_class\": \"NeatHTMLFeatures/View/Track/NeatFeatures\", \"__current_case__\": 3, \"html_options\": {\"topLevelFeatures\": \"\"}}, \"jbstyle\": {\"style_classname\": \"transcript\", \"style_label\": \"product,name,id\", \"style_description\": \"note,description\", \"style_height\": \"10px\", \"max_height\": \"600\"}, \"jbcolor_scale\": {\"color_score\": {\"color_score_select\": \"none\", \"__current_case__\": 0, \"color\": {\"color_select\": \"automatic\", \"__current_case__\": 0}}}, \"jb_custom_config\": {\"option\": []}, \"jbmenu\": {\"track_menu\": [{\"__index__\": 0, \"menu_action\": \"iframeDialog\", \"menu_label\": \"View transcript report\", \"menu_title\": \"Transcript {id}\", \"menu_url\": {\"__class__\": \"RuntimeValue\"}, \"menu_icon\": \"dijitIconBookmark\"}]}, \"track_visibility\": \"default_off\", \"override_apollo_plugins\": \"False\", \"override_apollo_drag\": \"False\"}}]}], \"uglyTestingHack\": \"\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"action\": {\"action_select\": \"create\", \"__current_case__\": 0}, \"gencode\": \"1\", \"jbgen\": {\"defaultLocation\": \"test\", \"trackPadding\": \"20\", \"shareLink\": \"true\", \"aboutDescription\": \"test\", \"show_tracklist\": \"true\", \"show_nav\": \"true\", \"show_overview\": \"true\", \"show_menu\": \"true\", \"hideGenomeOptions\": \"false\"}, \"plugins\": {\"BlastView\": \"true\", \"ComboTrackSelector\": \"false\", \"GCContent\": \"false\"}, \"reference_genome\": {\"genome_type_select\": \"history\", \"__current_case__\": 1, \"genome\": {\"__class__\": \"RuntimeValue\"}}, \"standalone\": \"minimal\", \"track_groups\": [{\"__index__\": 0, \"category\": \"Annotation\", \"data_tracks\": [{\"__index__\": 0, \"data_format\": {\"data_format_select\": \"gene_calls\", \"__current_case__\": 2, \"annotation\": {\"__class__\": \"RuntimeValue\"}, \"match_part\": {\"match_part_select\": \"false\", \"__current_case__\": 1}, \"index\": \"false\", \"track_config\": {\"track_class\": \"NeatHTMLFeatures/View/Track/NeatFeatures\", \"__current_case__\": 3, \"html_options\": {\"topLevelFeatures\": \"mRNA\"}}, \"jbstyle\": {\"style_classname\": \"transcript\", \"style_label\": \"product,name,id\", \"style_description\": \"note,description\", \"style_height\": \"10px\", \"max_height\": \"600\"}, \"jbcolor_scale\": {\"color_score\": {\"color_score_select\": \"none\", \"__current_case__\": 0, \"color\": {\"color_select\": \"automatic\", \"__current_case__\": 0}}}, \"jb_custom_config\": {\"option\": []}, \"jbmenu\": {\"track_menu\": [{\"__index__\": 0, \"menu_action\": \"iframeDialog\", \"menu_label\": \"View transcript report\", \"menu_title\": \"Transcript {id}\", \"menu_url\": {\"__class__\": \"RuntimeValue\"}, \"menu_icon\": \"dijitIconBookmark\"}]}, \"track_visibility\": \"default_off\", \"override_apollo_plugins\": \"False\", \"override_apollo_drag\": \"False\"}}]}], \"uglyTestingHack\": \"\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "1.16.10+galaxy0",
             "type": "tool",
             "uuid": "ba7d15fd-8ffd-407d-9a45-47cd4be68bd2",
@@ -143,7 +143,7 @@
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"organisms\": [{\"__index__\": 0, \"jbrowse\": {\"__class__\": \"RuntimeValue\"}, \"name\": {\"__class__\": \"RuntimeValue\"}, \"advanced\": {\"unique_id\": \"\"}}], \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"organisms\": [{\"__index__\": 0, \"jbrowse\": {\"__class__\": \"ConnectedValue\"}, \"name\": {\"__class__\": \"RuntimeValue\"}, \"advanced\": {\"unique_id\": {\"__class__\": \"RuntimeValue\"}}}], \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "0.5.1",
             "type": "tool",
             "uuid": "1cf25ca3-2287-4b82-9e93-b8828eed70a2",
@@ -157,6 +157,6 @@
         }
     },
     "tags": [],
-    "uuid": "e12dd4c9-0012-4d14-9cd2-8313a99c71cc",
-    "version": 1
+    "uuid": "7745ddc9-190a-436d-9bd3-2318e9d568a8",
+    "version": 0
 }
\ No newline at end of file
diff --git a/workflows/Jbrowse.ga.bak b/workflows/Jbrowse.ga.bak
new file mode 100644
index 0000000000000000000000000000000000000000..7e2c31e55b2f808411ddeef4aa15937f0fac7767
--- /dev/null
+++ b/workflows/Jbrowse.ga.bak
@@ -0,0 +1,157 @@
+{
+    "a_galaxy_workflow": "true",
+    "annotation": "",
+    "format-version": "0.1",
+    "name": "Jbrowse",
+    "steps": {
+        "0": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 0,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 200
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "751caac1-d015-4d77-8a68-2c3debae0caf",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "6955cc2b-d4d6-484b-8a89-6e4c5dcca879"
+                }
+            ]
+        },
+        "1": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 1,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 290
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "5cb81c38-64fe-4bdc-9043-bd862bdefc6d",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "2b6950b9-6e05-478b-a548-66de8230d217"
+                }
+            ]
+        },
+        "2": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.10+galaxy0",
+            "errors": null,
+            "id": 2,
+            "input_connections": {
+                "reference_genome|genome": {
+                    "id": 0,
+                    "output_name": "output"
+                },
+                "track_groups_0|data_tracks_0|data_format|annotation": {
+                    "id": 1,
+                    "output_name": "output"
+                }
+            },
+            "inputs": [],
+            "label": null,
+            "name": "JBrowse",
+            "outputs": [
+                {
+                    "name": "output",
+                    "type": "html"
+                }
+            ],
+            "position": {
+                "left": 486,
+                "top": 200
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.10+galaxy0",
+            "tool_shed_repository": {
+                "changeset_revision": "8774b28235bb",
+                "name": "jbrowse",
+                "owner": "iuc",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"action\": {\"action_select\": \"create\", \"__current_case__\": 0}, \"gencode\": \"1\", \"jbgen\": {\"defaultLocation\": \"\", \"trackPadding\": \"20\", \"shareLink\": \"true\", \"aboutDescription\": \"\", \"show_tracklist\": \"true\", \"show_nav\": \"true\", \"show_overview\": \"true\", \"show_menu\": \"true\", \"hideGenomeOptions\": \"false\"}, \"plugins\": {\"BlastView\": \"true\", \"ComboTrackSelector\": \"false\", \"GCContent\": \"false\"}, \"reference_genome\": {\"genome_type_select\": \"history\", \"__current_case__\": 1, \"genome\": {\"__class__\": \"ConnectedValue\"}}, \"standalone\": \"minimal\", \"track_groups\": [{\"__index__\": 0, \"category\": \"Annotation\", \"data_tracks\": [{\"__index__\": 0, \"data_format\": {\"data_format_select\": \"gene_calls\", \"__current_case__\": 2, \"annotation\": {\"__class__\": \"ConnectedValue\"}, \"match_part\": {\"match_part_select\": \"false\", \"__current_case__\": 1}, \"index\": \"false\", \"track_config\": {\"track_class\": \"NeatHTMLFeatures/View/Track/NeatFeatures\", \"__current_case__\": 3, \"html_options\": {\"topLevelFeatures\": \"\"}}, \"jbstyle\": {\"style_classname\": \"transcript\", \"style_label\": \"product,name,id\", \"style_description\": \"note,description\", \"style_height\": \"10px\", \"max_height\": \"600\"}, \"jbcolor_scale\": {\"color_score\": {\"color_score_select\": \"none\", \"__current_case__\": 0, \"color\": {\"color_select\": \"automatic\", \"__current_case__\": 0}}}, \"jb_custom_config\": {\"option\": []}, \"jbmenu\": {\"track_menu\": [{\"__index__\": 0, \"menu_action\": \"iframeDialog\", \"menu_label\": \"View transcript report\", \"menu_title\": \"Transcript {id}\", \"menu_url\": {\"__class__\": \"RuntimeValue\"}, \"menu_icon\": \"dijitIconBookmark\"}]}, \"track_visibility\": \"default_off\", \"override_apollo_plugins\": \"False\", \"override_apollo_drag\": \"False\"}}]}], \"uglyTestingHack\": \"\", \"__page__\": "1", \"__rerun_remap_job_id__\": "True"}",
+            "tool_version": "1.16.10+galaxy0",
+            "type": "tool",
+            "uuid": "ba7d15fd-8ffd-407d-9a45-47cd4be68bd2",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "519355d7-82cc-47f0-a96c-3ee0e39aa7df"
+                }
+            ]
+        },
+        "3": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1",
+            "errors": null,
+            "id": 3,
+            "input_connections": {
+                "organisms_0|jbrowse": {
+                    "id": 2,
+                    "output_name": "output"
+                }
+            },
+            "inputs": [],
+            "label": null,
+            "name": "Add organisms to JBrowse container",
+            "outputs": [
+                {
+                    "name": "output",
+                    "type": "html"
+                }
+            ],
+            "position": {
+                "left": 772,
+                "top": 200
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1",
+            "tool_shed_repository": {
+                "changeset_revision": "11033bdad2ca",
+                "name": "jbrowse_to_container",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"organisms\": [{\"__index__\": 0, \"jbrowse\": {\"__class__\": \"ConnectedValue\"}, \"name\": {\"__class__\": \"RuntimeValue\"}, \"advanced\": {\"unique_id\": {\"__class__\": \"RuntimeValue\"}}}], \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "0.5.1",
+            "type": "tool",
+            "uuid": "1cf25ca3-2287-4b82-9e93-b8828eed70a2",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "output",
+                    "uuid": "f78c7496-18a9-4c47-ad7f-a3ac31456749"
+                }
+            ]
+        }
+    },
+    "tags": [],
+    "uuid": "82768602-9800-4868-ac2b-a5cbacb79b8a",
+    "version": 2
+}