diff --git a/examples/input_demo.yaml b/examples/input_demo.yaml
index 9948a4b02e2028c844774d16b53a9d451788aeb0..79aaaa467ed75781049149e13819a8132d05e7b1 100644
--- a/examples/input_demo.yaml
+++ b/examples/input_demo.yaml
@@ -38,23 +38,4 @@ ectocarpus_sp2_male:  # Dummy value the user gives to designate the species (isn
     genome_version: "1.0"
     # Same as genome version, but for the analysis
     ogs_version: ""
-    performed_by: ""
-
-# Second example without the comments doc
-ectocarpus_sp2_female:
-  description:
-    genus: "ectocarpus"
-    species: "sp4"
-    sex: "female"
-    strain: ""
-    common_name: ""
-    origin: ""
-  data:
-    parent_directory: "/path/to/closest/parent/dir"
-    genome_path: "/path/to/fasta"
-    transcripts_path: "/path/to/fasta"
-    proteins_path: "/path/to/fasta"
-    gff_path: "/path/to/gff"
-    genome_version: "1.0"
-    ogs_version: "1.0"
-    performed_by: ""
+    performed_by: ""
\ No newline at end of file
diff --git a/examples/example.yml b/examples/input_example.yml
similarity index 100%
rename from examples/example.yml
rename to examples/input_example.yml
diff --git a/gga_init.py b/gga_init.py
index 393e5ccd4f487d772f360bf3290b7459a96bce25..b77251c406ec4e091f1e98fe63501dc4dcab2cd7 100644
--- a/gga_init.py
+++ b/gga_init.py
@@ -54,7 +54,7 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             sys.exit()
 
         # Copy the custom banner to the species dir (banner used in tripal pages)
-        if not self.config["custom_banner"] or self.config["custom_banner"] == "/path/to/banner" or self.config["custom_banner"] == "":
+        if not self.config["custom_banner"] or not self.config["custom_banner"] == "/path/to/banner" or not self.config["custom_banner"] == "":
             try:
                 if os.path.isfile(os.path.abspath(self.config["custom_banner"])):
                     shutil.copy(os.path.abspath(self.config["custom_banner"]), "%s/banner.png" % self.species_dir)
diff --git a/gga_load_data.py b/gga_load_data.py
index ea604318827eca75e3515be37f5a2c74c7c1db83..945d85d3df23c391d0879b00ce56b51b89d7120e 100644
--- a/gga_load_data.py
+++ b/gga_load_data.py
@@ -124,6 +124,8 @@ class LoadData(speciesData.SpeciesData):
         Find source data files in the parent_directory
         Link data files
 
+        TODO: implement search/tests for individual file paths
+
         :return:
         """
 
@@ -483,7 +485,7 @@ class LoadData(speciesData.SpeciesData):
                         # Need to do it AFTER the datasets import is finished, otherwise the new names are not kept by galaxy
                         # (erased by metadata generation I guess)
 
-                        # ALB: Doesn't work for some reason (LibraryDataset not subscriptable, __getitem__() not implemented)
+                        # Doesn't work for some reason (LibraryDataset not subscriptable, __getitem__() not implemented)
                         # post_renaming[datasets[0]] = clean_name
 
                     time.sleep(1)
@@ -508,7 +510,7 @@ class LoadData(speciesData.SpeciesData):
 
         time.sleep(10)
 
-        # ALB: Batch renaming --> Throws a critical error at the moment
+        # Batch renaming --> Throws a critical error at the moment
         # logging.info("Import finished, now renaming datasets with pretty names")
         # for dataset in post_renaming:
         #     dataset.update(name=post_renaming[dataset])
@@ -703,7 +705,7 @@ if __name__ == "__main__":
             load_data_for_current_species.setup_library()
             logging.info("Successfully set up library in galaxy for %s" % load_data_for_current_species.full_name)
 
-            # # Set or get the history for the current organism
+            # Set or get the history for the current organism
             load_data_for_current_species.set_get_history()
             
             # Remove H. sapiens from database if here TODO: set a dedicated history for removing H. sapiens (instead of doing it into a species history)
diff --git a/run_workflow_phaeoexplorer.py b/run_workflow_phaeoexplorer.py
index 5485c6cd207015faa8544cbef30bee1bfd731df9..984c00ffded5ce11cf032f10f01b8a481da14cea 100644
--- a/run_workflow_phaeoexplorer.py
+++ b/run_workflow_phaeoexplorer.py
@@ -8,6 +8,7 @@ import os
 import logging
 import sys
 import json
+import time
 import utilities
 import speciesData
 
@@ -218,16 +219,11 @@ class RunWorkflow(speciesData.SpeciesData):
         logging.info("Finished initializing instance")
 
 
-    def run_workflow(self, workflow_path, workflow_parameters, datamap):
+    def run_workflow(self, workflow_path, workflow_parameters, workflow_name, datamap):
         """
-        Run the "main" workflow in the galaxy instance
-        - import data to library
-        - load fasta and gff
-        - sync with tripal
-        - add jbrowse + organism
-        - fill in the tripal views
-
-        TODO: map tool name to step id
+        Run a workflow in galaxy
+        Requires the .ga file to be loaded as a dictionary (optionally could be uploaded as a raw file)
+
         :param workflow_name:
         :param workflow_parameters:
         :param datamap:
@@ -235,45 +231,21 @@ class RunWorkflow(speciesData.SpeciesData):
         """
 
         logging.info("Importing workflow: " + str(workflow_path))
-        workflow_name = "demo"  # for workflow demo
         workflow_ga_file = workflow_path
 
-        # Name the workflow in galaxy
-        if self.strain != "":
-            custom_ga_file = "_".join([self.genus, self.species, self.strain]) + "_workflow.ga"
-            custom_ga_file_path = os.path.abspath(custom_ga_file)
-        elif self.sex != "":
-            custom_ga_file = "_".join([self.genus, self.species, self.sex]) + "_workflow.ga"
-            custom_ga_file_path = os.path.abspath(custom_ga_file)
-        else:
-            custom_ga_file = "_".join([self.genus, self.species]) + "_workflow.ga"
-            custom_ga_file_path = os.path.abspath(custom_ga_file)
-
-        # Solving format issues in the .ga (encoding errors when importing the file via bioblend)
         with open(workflow_ga_file, 'r') as ga_in_file:
-            # workflow = str(ga_in_file.readlines())
-            # # Ugly fix for the jbrowse parameters (formatting) --> TODO: OBSOLETE (everything set at runtime)
-            # workflow = workflow.replace('{\\\\\\\\\\\\"unique_id\\\\\\\\\\\\": \\\\\\\\\\\\"UNIQUE_ID\\\\\\\\\\\\"}',
-            #                             str('{\\\\\\\\\\\\"unique_id\\\\\\\\\\\\": \\\\\\\\\\\\"' + self.genus + " " + self.species) + '\\\\\\\\\\\\"')
-            # workflow = workflow.replace('\\\\\\\\\\\\"name\\\\\\\\\\\\": \\\\\\\\\\\\"NAME\\\\\\\\\\\\"',
-            #                             str('\\\\\\\\\\\\"name\\\\\\\\\\\\": \\\\\\\\\\\\"' + self.genus.lower()[0] + self.species) + '\\\\\\\\\\\\"')
-            # workflow = workflow.replace("\\\\", "\\")  # to restore the correct amount of backslashes in the workflow string before import
-            
-            # # OBSOLETE
-            # workflow = workflow.replace('http://localhost/sp/genus_species/feature/Genus/species/mRNA/{id}',
-            #                             "http://" + self.config["custom_host"] + ":8888/sp/" + self.genus_lowercase+ "_" + self.species + "/feature/" + self.genus + "/mRNA/{id}")
-
-            # # The json dict might sometimes turn to be invalid for unknown reasons and the json module will fail to decode it (galaxy export error) 
-            # workflow = workflow[2:-2]  # if the line under doesn't output a correct json
-            # # workflow = workflow[:-2]  # if the line above doesn't output a correct json
-
             # Store the decoded json dictionary
             workflow_dict = json.load(ga_in_file)
 
             self.instance.workflows.import_workflow_dict(workflow_dict=workflow_dict)
             workflow_attributes = self.instance.workflows.get_workflows(name=workflow_name)
             workflow_id = workflow_attributes[0]["id"]
+            logging.debug("Workflow ID: %s" % workflow_id)
             show_workflow = self.instance.workflows.show_workflow(workflow_id=workflow_id)
+            try:
+                logging.info("Workflow ID: %s" % workflow_id)
+            except Exception:
+                logging.warning("Error retrieving workflow attributes")
             logging.debug("Workflow ID: " + workflow_id)
 
             logging.info("Running workflow: %s" % workflow_name)
@@ -368,6 +340,16 @@ class RunWorkflow(speciesData.SpeciesData):
                          "genus": self.genus_uppercase,
                          "species": self.chado_species_name,
                          "common": self.common})
+        time.sleep(3)
+        # Run tool again (sometimes the tool doesn't return anything despite the organism already being in the db)
+        org = self.instance.tools.run_tool(
+            tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_organism_get_organisms/organism_get_organisms/2.3.3",
+            history_id=self.history_id,
+            tool_inputs={"abbr": self.abbreviation,
+                         "genus": self.genus_uppercase,
+                         "species": self.chado_species_name,
+                         "common": self.common})
+        time.sleep(10)
         org_job_out = org["outputs"][0]["id"]
         org_json_output = self.instance.datasets.download_dataset(dataset_id=org_job_out)
         try:
@@ -478,7 +460,6 @@ if __name__ == "__main__":
         # Set the instance url attribute
         for env_variable, value in run_workflow_for_current_organism.config.items():
             if env_variable == "custom_host":
-                # TODO:
                 run_workflow_for_current_organism.instance_url = "http://{0}:8888/sp/{1}_{2}/galaxy/".format(
                     value, run_workflow_for_current_organism.genus_lowercase, run_workflow_for_current_organism.species)
                 break
@@ -491,95 +472,60 @@ if __name__ == "__main__":
         run_workflow_for_current_organism.set_get_history()
         # run_workflow_for_current_organism.get_species_history_id()
 
-        # Prepare the instance+history for the current organism (add organism and analyses in Chado) --> add argument? 
-        # (althought there is no risk as chado refuses to duplicate an analysis/organism)
-        # run_workflow_for_current_organism.prepare_history()
+        # Prepare the instance+history for the current organism (add organism and analyses in Chado) TODO: add argument "setup"
+        # (althought it should pose no problem as the "Chado add" refuses to duplicate an analysis/organism anyway)
+        run_workflow_for_current_organism.prepare_history()
 
         # Get the attributes of the instance and project data files
         run_workflow_for_current_organism.get_instance_attributes()
         run_workflow_for_current_organism.get_organism_and_analyses_ids()
 
-        # Import datasets into history (needs to be done in gga_load_data??)
-        # run_workflow_for_current_organism.import_datasets_into_history()
+        # Import datasets into history TODO: put this only for the chado load tripal sync workflow?
+        run_workflow_for_current_organism.import_datasets_into_history()
 
+        # Explicit workflow parameter names
+        # TODO: Create distinct methods to call different pre-set workflows using CL arguments/config options (i.e load-chado, jbrowse, functional-annotation, orthology, ...)
 
         workflow_parameters = dict()
 
-        # Explicit workflow parameter names
-        # TODO: Use an external mapping file instead?
-
-        # DEMO WORKFLOW
-        PARAM_LOAD_FASTA_IN_HISTORY = "0"
-        PARAM_LOAD_FASTA_IN_CHADO = "1"
-
-        # Workflow inputs
-        workflow_parameters[PARAM_LOAD_FASTA_IN_HISTORY] = {}
-        workflow_parameters[PARAM_LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-                                                          "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
-                                                          "do_update": "true"}
-        # Change "do_update": "true" to "do_update": "false" to prevent appending to the fasta file in chado
-        # It is safer to never update it and completely delete and restart the galaxy+tripal services instead (no workaround at the moment)
-        run_workflow_for_current_organism.datamap = dict()
+        GENOME_FASTA_FILE = "0"
+        GFF_FILE = "1"
+        PROTEINS_FASTA_FILE = "2"
+        TRANSCRIPTS_FASTA_FILE = "3"
+
+
+        LOAD_FASTA_IN_CHADO = "4"
+        LOAD_GFF_IN_CHADO = "5"
+        SYNC_ORGANISM_INTO_TRIPAL = "6"
+        SYNC_GENOME_ANALYSIS_INTO_TRIPAL = "7"
+        SYNC_OGS_ANALYSIS_INTO_TRIPAL = "8"
+        SYNC_FEATURES_INTO_TRIPAL = "9"
+
+        workflow_parameters[GENOME_FASTA_FILE] = {}
+        workflow_parameters[GFF_FILE] = {}
+        workflow_parameters[PROTEINS_FASTA_FILE] = {}
+        workflow_parameters[TRANSCRIPTS_FASTA_FILE] = {}
+        workflow_parameters[LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
+                                    "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
+                                    "do_update": "true"}
+        # Change "do_update": "true" to "do_update": "false" in above parameters to prevent appending to the fasta file in chado
+        # WARNING: It is safer to never update it and just change the genome/ogs versions in the config
+        workflow_parameters[LOAD_GFF_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
+                                    "analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
+        workflow_parameters[SYNC_ORGANISM_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
+        workflow_parameters[SYNC_GENOME_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
+        workflow_parameters[SYNC_OGS_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.genome_analysis_id}
+        workflow_parameters[SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
 
         # Datamap for input datasets - dataset source (type): ldda (LibraryDatasetDatasetAssociation)
-        run_workflow_for_current_organism.datamap[PARAM_LOAD_FASTA_IN_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
-
-
-        """COMMENTED FOR THE DEMO"""
-        # # Base worflow (loading data in chado and first sync into tripal)
-        # PARAM_LOAD_FILE1_INTO_HISTORY, PARAM_LOAD_FILE2_INTO_HISTORY, PARAM_LOAD_FILE3_INTO_HISTORY, PARAM_LOAD_FILE4_INTO_HISTORY = "0", "1", "2", "3"
-        # PARAM_LOAD_FASTA_IN_CHADO = "4"
-        # PARAM_LOAD_GFF_IN_CHADO = "5"
-        # PARAM_SYNC_ORGANISM_INTO_TRIPAL = "6"
-        # PARAM_SYNC_GENOME_ANALYSIS_INTO_TRIPAL = "7"
-        # PARAM_SYNC_OGS_ANALYSIS_INTO_TRIPAL = "8"
-        # PARAM_SYNC_FEATURES_INTO_TRIPAL = "9"
-
-
-        # workflow_parameters[PARAM_LOAD_FILE1_INTO_HISTORY] = {}
-        # workflow_parameters[PARAM_LOAD_FILE2_INTO_HISTORY] = {}
-        # workflow_parameters[PARAM_LOAD_FILE3_INTO_HISTORY] = {}
-        # workflow_parameters[PARAM_LOAD_FILE4_INTO_HISTORY] = {}
-        # workflow_parameters[PARAM_LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-        #                             "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
-        #                             "do_update": "true"}
-        # workflow_parameters[PARAM_LOAD_GFF_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-        #                             "analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
-        # workflow_parameters[PARAM_SYNC_ORGANISM_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-        # workflow_parameters[PARAM_SYNC_GENOME_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
-        # workflow_parameters[PARAM_SYNC_OGS_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.genome_analysis_id}
-        # workflow_parameters[PARAM_SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-
-        # Loading files into history works a bit different than the others as it's not a GMOD tool but a standard Galaxy tool
-        # It requires this additional "datamap" (conveniently named "datamap" here), requiring the source type of the file and its corresponding ID (unique)
-        # The comments taken from the bioblend docs:
-        # """
-        # A mapping of workflow inputs to datasets and dataset collections.
-        #                        The datasets source can be a LibraryDatasetDatasetAssociation (``ldda``),
-        #                        LibraryDataset (``ld``), HistoryDatasetAssociation (``hda``), or
-        #                        HistoryDatasetCollectionAssociation (``hdca``).
-        #
-        #                        The map must be in the following format:
-        #                        ``{'<input_index>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda, hdca]'}}``
-        #                        (e.g. ``{'2': {'id': '29beef4fadeed09f', 'src': 'hda'}}``)
-        #
-        #                        This map may also be indexed by the UUIDs of the workflow steps,
-        #                        as indicated by the ``uuid`` property of steps returned from the
-        #                        Galaxy API. Alternatively workflow steps may be addressed by
-        #                        the label that can be set in the workflow editor. If using
-        #                        uuid or label you need to also set the ``inputs_by`` parameter
-        #                        to ``step_uuid`` or ``name``.
-        # """
-        # run_workflow_for_current_organism.datamap = dict()
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE1_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE2_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE3_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE4_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
-
-        # Run the workflow with the parameters set above
+        run_workflow_for_current_organism.datamap = dict()
+        run_workflow_for_current_organism.datamap[GENOME_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
+        run_workflow_for_current_organism.datamap[GFF_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
+        run_workflow_for_current_organism.datamap[PROTEINS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
+        run_workflow_for_current_organism.datamap[TRANSCRIPTS_FASTA_FILE] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
+
+        # Run the Chado load Tripal sync workflow with the parameters set above
         run_workflow_for_current_organism.run_workflow(workflow_path=workflow,
                                                        workflow_parameters=workflow_parameters,
-                                                       datamap=run_workflow_for_current_organism.datamap)
-
-        # WIP: metadata
-        # metadata[genus_species_strain_sex]["workflows_run"] = metadata[genus_species_strain_sex]["workflows_run"].append("fooS")
+                                                       datamap=run_workflow_for_current_organism.datamap,
+                                                       workflow_name="Chado load Tripal synchronize")
diff --git a/templates/nginx_apollo_template.conf b/templates/nginx_apollo.conf
similarity index 100%
rename from templates/nginx_apollo_template.conf
rename to templates/nginx_apollo.conf
diff --git a/templates/nginx_download_template.conf b/templates/nginx_download.conf
similarity index 100%
rename from templates/nginx_download_template.conf
rename to templates/nginx_download.conf
diff --git a/workflows/Chado_load_Tripal_synchronize.ga b/workflows/Chado_load_Tripal_synchronize.ga
new file mode 100644
index 0000000000000000000000000000000000000000..a194b03268c48c4300a3b2a0f6b759cbf0fb6c6b
--- /dev/null
+++ b/workflows/Chado_load_Tripal_synchronize.ga
@@ -0,0 +1,413 @@
+{
+    "a_galaxy_workflow": "true",
+    "annotation": "",
+    "format-version": "0.1",
+    "name": "Chado load Tripal synchronize",
+    "steps": {
+        "0": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 0,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 200
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "89e7487e-004d-4db1-b5eb-1676b98aebde",
+            "workflow_outputs": []
+        },
+        "1": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 1,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 290
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "1d25f54c-7575-4c8d-be55-73dd7e58613f",
+            "workflow_outputs": []
+        },
+        "2": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 2,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 380
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "6c1a20fa-828a-404c-b107-76fb8ddf3954",
+            "workflow_outputs": []
+        },
+        "3": {
+            "annotation": "",
+            "content_id": null,
+            "errors": null,
+            "id": 3,
+            "input_connections": {},
+            "inputs": [],
+            "label": null,
+            "name": "Input dataset",
+            "outputs": [],
+            "position": {
+                "left": 200,
+                "top": 470
+            },
+            "tool_id": null,
+            "tool_state": "{\"optional\": false}",
+            "tool_version": null,
+            "type": "data_input",
+            "uuid": "74f22d9b-e764-45e4-b0eb-579c9b647ea0",
+            "workflow_outputs": []
+        },
+        "4": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.4+galaxy0",
+            "errors": null,
+            "id": 4,
+            "input_connections": {
+                "fasta": {
+                    "id": 3,
+                    "output_name": "output"
+                },
+                "wait_for": {
+                    "id": 3,
+                    "output_name": "output"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Chado load fasta",
+                    "name": "fasta"
+                },
+                {
+                    "description": "runtime parameter for tool Chado load fasta",
+                    "name": "wait_for"
+                }
+            ],
+            "label": null,
+            "name": "Chado load fasta",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "json"
+                }
+            ],
+            "position": {
+                "left": 486,
+                "top": 200
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.4+galaxy0",
+            "tool_shed_repository": {
+                "changeset_revision": "ba4d07fbaf47",
+                "name": "chado_feature_load_fasta",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"analysis_id\": \"1\", \"do_update\": \"false\", \"ext_db\": {\"db\": \"\", \"re_db_accession\": \"\"}, \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"match_on_name\": \"false\", \"organism\": \"2\", \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"re_name\": \"\", \"re_uniquename\": \"\", \"relationships\": {\"rel_type\": \"none\", \"__current_case__\": 0}, \"sequence_type\": \"contig\", \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "2.3.4+galaxy0",
+            "type": "tool",
+            "uuid": "ed72bf37-aa81-4b25-8ab4-dccb54bc68d9",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "c617e0d3-a44c-4fb1-b831-22a487a6be6a"
+                }
+            ]
+        },
+        "5": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.4+galaxy0",
+            "errors": null,
+            "id": 5,
+            "input_connections": {
+                "fasta": {
+                    "id": 2,
+                    "output_name": "output"
+                },
+                "gff": {
+                    "id": 1,
+                    "output_name": "output"
+                },
+                "wait_for": {
+                    "id": 4,
+                    "output_name": "results"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Chado load gff",
+                    "name": "fasta"
+                },
+                {
+                    "description": "runtime parameter for tool Chado load gff",
+                    "name": "gff"
+                },
+                {
+                    "description": "runtime parameter for tool Chado load gff",
+                    "name": "wait_for"
+                }
+            ],
+            "label": null,
+            "name": "Chado load gff",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "txt"
+                }
+            ],
+            "position": {
+                "left": 772,
+                "top": 200
+            },
+            "post_job_actions": {},
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.4+galaxy0",
+            "tool_shed_repository": {
+                "changeset_revision": "e9a6d7568817",
+                "name": "chado_feature_load_gff",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"add_only\": \"false\", \"analysis_id\": \"1\", \"fasta\": {\"__class__\": \"RuntimeValue\"}, \"gff\": {\"__class__\": \"RuntimeValue\"}, \"landmark_type\": \"\", \"no_seq_compute\": \"false\", \"organism\": \"2\", \"prot_naming\": {\"method\": \"auto\", \"__current_case__\": 0}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "2.3.4+galaxy0",
+            "type": "tool",
+            "uuid": "0b5746d7-952d-4aff-b688-4666c13cab8a",
+            "workflow_outputs": [
+                {
+                    "label": null,
+                    "output_name": "results",
+                    "uuid": "5da80c86-c510-425a-b8e1-475ab26436f3"
+                }
+            ]
+        },
+        "6": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
+            "errors": null,
+            "id": 6,
+            "input_connections": {
+                "wait_for": {
+                    "id": 5,
+                    "output_name": "results"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Synchronize an organism",
+                    "name": "organism_id"
+                }
+            ],
+            "label": null,
+            "name": "Synchronize an organism",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "txt"
+                }
+            ],
+            "position": {
+                "left": 1058,
+                "top": 200
+            },
+            "post_job_actions": {
+                "HideDatasetActionresults": {
+                    "action_arguments": {},
+                    "action_type": "HideDatasetAction",
+                    "output_name": "results"
+                }
+            },
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
+            "tool_shed_repository": {
+                "changeset_revision": "afd5d92745fb",
+                "name": "tripal_organism_sync",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "3.2.1.0",
+            "type": "tool",
+            "uuid": "05314408-41fa-4a2f-8aae-3988e2d899f6",
+            "workflow_outputs": []
+        },
+        "7": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+            "errors": null,
+            "id": 7,
+            "input_connections": {
+                "wait_for": {
+                    "id": 6,
+                    "output_name": "results"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Synchronize an analysis",
+                    "name": "analysis_id"
+                }
+            ],
+            "label": null,
+            "name": "Synchronize an analysis",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "json"
+                }
+            ],
+            "position": {
+                "left": 1344,
+                "top": 200
+            },
+            "post_job_actions": {
+                "HideDatasetActionresults": {
+                    "action_arguments": {},
+                    "action_type": "HideDatasetAction",
+                    "output_name": "results"
+                }
+            },
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+            "tool_shed_repository": {
+                "changeset_revision": "f487ff676088",
+                "name": "tripal_analysis_sync",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "3.2.1.0",
+            "type": "tool",
+            "uuid": "44c7cc7c-0848-47a7-872c-351f057803c1",
+            "workflow_outputs": []
+        },
+        "8": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+            "errors": null,
+            "id": 8,
+            "input_connections": {
+                "wait_for": {
+                    "id": 7,
+                    "output_name": "results"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Synchronize an analysis",
+                    "name": "analysis_id"
+                }
+            ],
+            "label": null,
+            "name": "Synchronize an analysis",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "json"
+                }
+            ],
+            "position": {
+                "left": 1630,
+                "top": 200
+            },
+            "post_job_actions": {
+                "HideDatasetActionresults": {
+                    "action_arguments": {},
+                    "action_type": "HideDatasetAction",
+                    "output_name": "results"
+                }
+            },
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+            "tool_shed_repository": {
+                "changeset_revision": "f487ff676088",
+                "name": "tripal_analysis_sync",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "3.2.1.0",
+            "type": "tool",
+            "uuid": "8ce8c990-39ce-4725-892b-4216a75f487d",
+            "workflow_outputs": []
+        },
+        "9": {
+            "annotation": "",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0",
+            "errors": null,
+            "id": 9,
+            "input_connections": {
+                "wait_for": {
+                    "id": 8,
+                    "output_name": "results"
+                }
+            },
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Synchronize features",
+                    "name": "organism_id"
+                }
+            ],
+            "label": null,
+            "name": "Synchronize features",
+            "outputs": [
+                {
+                    "name": "results",
+                    "type": "txt"
+                }
+            ],
+            "position": {
+                "left": 1916,
+                "top": 200
+            },
+            "post_job_actions": {
+                "HideDatasetActionresults": {
+                    "action_arguments": {},
+                    "action_type": "HideDatasetAction",
+                    "output_name": "results"
+                }
+            },
+            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0",
+            "tool_shed_repository": {
+                "changeset_revision": "64e36c3f0dd6",
+                "name": "tripal_feature_sync",
+                "owner": "gga",
+                "tool_shed": "toolshed.g2.bx.psu.edu"
+            },
+            "tool_state": "{\"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"repeat_ids\": [], \"repeat_types\": [{\"__index__\": 0, \"types\": \"mRNA\"}, {\"__index__\": 1, \"types\": \"popylpeptide\"}], \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_version": "3.2.1.0",
+            "type": "tool",
+            "uuid": "04600903-dd16-4db1-b562-552aeb003e6c",
+            "workflow_outputs": []
+        }
+    },
+    "tags": [],
+    "uuid": "4c66363e-ff14-4c79-8edf-9ab05cafa33d",
+    "version": 2
+}
\ No newline at end of file
diff --git a/workflows/Galaxy-Workflow-demo.ga b/workflows/Demo.ga
similarity index 100%
rename from workflows/Galaxy-Workflow-demo.ga
rename to workflows/Demo.ga
diff --git a/workflows/Galaxy-Workflow-preset_workflow.ga b/workflows/Galaxy-Workflow-preset_workflow.ga
deleted file mode 100644
index 5291ff05d3d4c08323ef767cc80bbed4a3f4942b..0000000000000000000000000000000000000000
--- a/workflows/Galaxy-Workflow-preset_workflow.ga
+++ /dev/null
@@ -1 +0,0 @@
-{"uuid": "7ebc1035-728c-4bca-a1c3-abd1c01bc064", "tags": [], "format-version": "0.1", "name": "preset_workflow", "version": 1, "steps": {"0": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "6956ef7f-7fec-402b-a8ea-f054a819f351", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 0, "uuid": "74f22d9b-e764-45e4-b0eb-579c9b647ea0", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 343.433349609375, "left": 201.33331298828125}, "annotation": "", "content_id": null, "type": "data_input"}, "1": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "efc230d5-5570-4446-b56b-c0213bef9ef0", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 1, "uuid": "6c1a20fa-828a-404c-b107-76fb8ddf3954", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 340.41668701171875, "left": 334.816650390625}, "annotation": "", "content_id": null, "type": "data_input"}, "2": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "90864336-6fc2-49fa-8f16-ccf11c64dc9a", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 2, "uuid": "1d25f54c-7575-4c8d-be55-73dd7e58613f", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 340.41668701171875, "left": 467.6333312988281}, "annotation": "", "content_id": null, "type": "data_input"}, "3": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "9e3d04a8-20f6-4f20-bfac-5a8b7df54557", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 3, "uuid": "89e7487e-004d-4db1-b5eb-1676b98aebde", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 337.6166687011719, "left": 600.4166717529297}, "annotation": "", "content_id": null, "type": "data_input"}, "4": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.2", "tool_version": "2.3.2", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"fasta": {"output_name": "output", "id": 0}, "wait_for": {"output_name": "output", "id": 0}}, "tool_state": "{\"do_update\": \"\\\"false\\\"\", \"relationships\": \"{\\\"__current_case__\\\": 0, \\\"rel_type\\\": \\\"none\\\"}\", \"ext_db\": \"{\\\"db\\\": \\\"\\\", \\\"re_db_accession\\\": \\\"\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"re_uniquename\": \"\\\"\\\"\", \"match_on_name\": \"\\\"false\\\"\", \"__page__\": null, \"__rerun_remap_job_id__\": null, \"psql_target\": \"{\\\"__current_case__\\\": 0, \\\"method\\\": \\\"remote\\\"}\", \"re_name\": \"\\\"\\\"\", \"fasta\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"sequence_type\": \"\\\"contig\\\"\"}", "id": 4, "tool_shed_repository": {"owner": "gga", "changeset_revision": "1421dbc33a92", "name": "chado_feature_load_fasta", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "f3655d26-08b8-408e-bfef-6e8a4aaab355", "errors": null, "name": "Chado load fasta", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Chado load fasta"}, {"name": "organism", "description": "runtime parameter for tool Chado load fasta"}], "position": {"top": 303.58331298828125, "left": 745.2333374023438}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.2", "type": "tool"}, "5": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.2", "tool_version": "2.3.2", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"fasta": {"output_name": "output", "id": 1}, "wait_for": {"output_name": "results", "id": 4}, "gff": {"output_name": "output", "id": 2}}, "tool_state": "{\"prot_naming\": \"{\\\"__current_case__\\\": 1, \\\"method\\\": \\\"regex\\\", \\\"re_protein\\\": \\\"protein\\\\\\\\1\\\", \\\"re_protein_capture\\\": \\\"^mRNA(\\\\\\\\..+)$\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"__page__\": null, \"gff\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"__rerun_remap_job_id__\": null, \"no_seq_compute\": \"\\\"false\\\"\", \"psql_target\": \"{\\\"__current_case__\\\": 0, \\\"method\\\": \\\"remote\\\"}\", \"add_only\": \"\\\"false\\\"\", \"fasta\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"landmark_type\": \"\\\"contig\\\"\"}", "id": 5, "tool_shed_repository": {"owner": "gga", "changeset_revision": "fb0651ee6d33", "name": "chado_feature_load_gff", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "236254d3-121e-4910-bcba-146d208a59a5", "errors": null, "name": "Chado load gff", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Chado load gff"}, {"name": "organism", "description": "runtime parameter for tool Chado load gff"}], "position": {"top": 285.20001220703125, "left": 957.2333374023438}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.2", "type": "tool"}, "6": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 5}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 6, "tool_shed_repository": {"owner": "gga", "changeset_revision": "afd5d92745fb", "name": "tripal_organism_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "05314408-41fa-4a2f-8aae-3988e2d899f6", "errors": null, "name": "Synchronize an organism", "post_job_actions": {}, "label": null, "inputs": [{"name": "organism_id", "description": "runtime parameter for tool Synchronize an organism"}], "position": {"top": 322, "left": 1168}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0", "type": "tool"}, "7": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 6}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 7, "tool_shed_repository": {"owner": "gga", "changeset_revision": "f487ff676088", "name": "tripal_analysis_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "44c7cc7c-0848-47a7-872c-351f057803c1", "errors": null, "name": "Synchronize an analysis", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Synchronize an analysis"}], "position": {"top": 323.58331298828125, "left": 1375.63330078125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "type": "tool"}, "8": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 7}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 8, "tool_shed_repository": {"owner": "gga", "changeset_revision": "f487ff676088", "name": "tripal_analysis_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "8ce8c990-39ce-4725-892b-4216a75f487d", "errors": null, "name": "Synchronize an analysis", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Synchronize an analysis"}], "position": {"top": 321.20001220703125, "left": 1583.63330078125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "type": "tool"}, "9": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 8}}, "tool_state": "{\"__page__\": null, \"repeat_types\": \"[{\\\"__index__\\\": 0, \\\"types\\\": \\\"mRNA\\\"}, {\\\"__index__\\\": 1, \\\"types\\\": \\\"popylpeptide\\\"}]\", \"__rerun_remap_job_id__\": null, \"organism_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"repeat_ids\": \"[]\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\"}", "id": 9, "tool_shed_repository": {"owner": "gga", "changeset_revision": "64e36c3f0dd6", "name": "tripal_feature_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "04600903-dd16-4db1-b562-552aeb003e6c", "errors": null, "name": "Synchronize features", "post_job_actions": {}, "label": null, "inputs": [{"name": "organism_id", "description": "runtime parameter for tool Synchronize features"}], "position": {"top": 321.20001220703125, "left": 1794.0333251953125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0", "type": "tool"}}, "annotation": "", "a_galaxy_workflow": "true"}
\ No newline at end of file
diff --git a/workflows/Galaxy-Workflow-jbrowse.ga b/workflows/Jbrowse.ga
similarity index 100%
rename from workflows/Galaxy-Workflow-jbrowse.ga
rename to workflows/Jbrowse.ga