diff --git a/constants.py b/constants.py
index 9fa387629fe35ec4352e2eedf8b27e912d18a91c..70724c3eb93de567dabf2855a0872c50bbb0cd83 100644
--- a/constants.py
+++ b/constants.py
@@ -7,6 +7,7 @@ ORG_PARAM_DESC_SEX = "sex"
 ORG_PARAM_DESC_STRAIN = "strain"
 ORG_PARAM_DESC_COMMON_NAME = "common_name"
 ORG_PARAM_DESC_ORIGIN = "origin"
+ORG_PARAM_DESC_PICTURE_PATH = "picture_path"
 ORG_PARAM_DESC_MAIN_SPECIES = "main_species"
 ORG_PARAM_DATA = "data"
 ORG_PARAM_DATA_GENOME_PATH = "genome_path"
@@ -23,6 +24,7 @@ ORG_PARAM_DATA_PERFORMED_BY = "performed_by"
 ORG_PARAM_SERVICES = "services"
 ORG_PARAM_SERVICES_BLAST = "blast"
 
+
 # Constants used in the config yaml file
 CONF_ALL_HOSTNAME = "hostname"
 CONF_ALL_HTTP_PORT = "http_port"
diff --git a/gga_init.py b/gga_init.py
index 1ae0ce4bb7f9916336fe3646ac8512bf3f84dcb8..e630db1238f396cdd0e294c8d38d02058bb77561 100755
--- a/gga_init.py
+++ b/gga_init.py
@@ -76,6 +76,20 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             logging.debug("Using default banner for Tripal pages")
             self.config.pop(constants.CONF_TRIPAL_BANNER_PATH, None)
 
+        # Copy the organism picture for tripal if one was specified in the input species
+        if self.picture_path is not None:
+            if os.path.isfile(self.picture_path):
+                picture_path_basename = os.path.basename(self.picture_path)
+                picture_path_filename, picture_path_extension = os.path.splitext(picture_path_basename)
+                if picture_path_extension == ".png" or picture_path_extension == ".jpg":
+                    picture_dest_name = "species%s" % picture_path_extension
+                    picture_dest_path = os.path.join(self.species_dir, picture_dest_name)
+                    shutil.copy(self.picture_path, picture_dest_path)
+                else:
+                    logging.error("Specified organism picture has wrong extension (must be '.png' or '.jpg'): {0}".format(self.picture_path))
+            else:
+                logging.error("Specified organism picture not found {0} for {1}".format(self.picture_path, self.genus_uppercase + " " + self.species))
+                
         # Create nginx dirs and write/re-write nginx conf
         make_dirs(dir_paths_li=["./nginx", "./nginx/conf"])
         try:
@@ -111,11 +125,12 @@ class DeploySpeciesStack(speciesData.SpeciesData):
         # We need a dict holding all key (variables) - values that needs to be replaced in the template as our rendering dict
         # To do so we need both input file vars and config vars
         # Create input file vars dict
+
         input_vars = {"genus": self.genus_lowercase, "Genus": self.genus_uppercase, "species": self.species,
-                      "genus_species": self.genus_species, "genus_species_strain_sex": self.species_folder_name,
-                      "genus_species_sex": "{0}_{1}_{2}".format(self.genus_lowercase, self.species.lower(), self.sex),
-                      "strain": self.strain, "sex": self.sex, "Genus_species": self.genus_species[0].upper() + self.genus_species[1:],
-                      "blast": self.blast}
+                          "genus_species": self.genus_species, "genus_species_strain_sex": self.species_folder_name,
+                          "genus_species_sex": "{0}_{1}_{2}".format(self.genus_lowercase, self.species.lower(), self.sex),
+                          "strain": self.strain, "sex": self.sex, "Genus_species": self.genus_species[0].upper() + self.genus_species[1:],
+                          "blast": self.blast, "picture_path": self.picture_path}
         if (len(self.config.keys()) == 0):
             logging.error("Empty config dictionary")
         # Merge the two dicts
diff --git a/run_workflow_phaeoexplorer.py b/run_workflow_phaeoexplorer.py
index bff96313c37cd1f177594630f386f03fc02cce54..661ea13ca3b60fd6a14b9082b355b16f6141b025 100755
--- a/run_workflow_phaeoexplorer.py
+++ b/run_workflow_phaeoexplorer.py
@@ -71,40 +71,6 @@ class RunWorkflow(speciesData.SpeciesData):
         logging.debug("Library ID: %s" % self.library_id)
         instance_source_data_folders = self.instance.libraries.get_folders(library_id=library_id)
 
-        # # Access folders via their absolute path
-        # genome_folder = self.instance.libraries.get_folders(library_id=library_id, name="/genome/" + str(self.species_folder_name) + "/v" + str(self.genome_version))
-        # annotation_folder = self.instance.libraries.get_folders(library_id=library_id, name="/annotation/" + str(self.species_folder_name) + "/OGS" + str(self.ogs_version))
-        
-        # # Get their IDs
-        # genome_folder_id = genome_folder[0]["id"]
-        # annotation_folder_id = annotation_folder[0]["id"]
-
-        # # Get the content of the folders
-        # genome_folder_content = self.instance.folders.show_folder(folder_id=genome_folder_id, contents=True)
-        # annotation_folder_content = self.instance.folders.show_folder(folder_id=annotation_folder_id, contents=True)
-
-        # # Find genome folder datasets
-        # genome_fasta_ldda_id = genome_folder_content["folder_contents"][0]["ldda_id"]
-
-        # annotation_gff_ldda_id, annotation_proteins_ldda_id, annotation_transcripts_ldda_id = None, None, None
-
-        # # Several dicts in the annotation folder content (one dict = one file)
-        # for k, v in annotation_folder_content.items():
-        #     if k == "folder_contents":
-        #         for d in v:
-        #             if "proteins" in d["name"]:
-        #                 annotation_proteins_ldda_id = d["ldda_id"]
-        #             if "transcripts" in d["name"]:
-        #                 annotation_transcripts_ldda_id = d["ldda_id"]
-        #             if ".gff" in d["name"]:
-        #                 annotation_gff_ldda_id = d["ldda_id"]
-
-        # # Minimum datasets to populate tripal views --> will not work if these files are not assigned in the input file
-        # self.datasets["genome_file"] = genome_fasta_ldda_id
-        # self.datasets["gff_file"] = annotation_gff_ldda_id
-        # self.datasets["proteins_file"] = annotation_proteins_ldda_id
-        # self.datasets["transcripts_file"] = annotation_transcripts_ldda_id
-
         return {"history_id": self.history_id, "library_id": library_id}
 
 
@@ -222,7 +188,42 @@ class RunWorkflow(speciesData.SpeciesData):
                                                                install_repository_dependencies=False,
                                                                install_resolver_dependencies=True)
 
-        logging.info("Individual tools versions and changesets validated")
+
+        sync_analysis_tool = self.instance.tools.show_tool("toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0")
+        sync_organism_tool = self.instance.tools.show_tool("toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0")
+
+        if sync_analysis_tool["version"] != "3.2.1.0":
+            toolshed_dict = sync_analysis_tool["tool_shed_repository"]
+            logging.warning("Changeset for %s is not installed" % toolshed_dict["name"])
+            changeset_revision = "f487ff676088"
+            name = toolshed_dict["name"]
+            owner = toolshed_dict["owner"]
+            toolshed = "https://" + toolshed_dict["tool_shed"]
+            logging.warning("Installing changeset revision {0} for {1}".format(changeset_revision, name))
+
+            self.instance.toolshed.install_repository_revision(tool_shed_url=toolshed, name=name, owner=owner, 
+                                                               changeset_revision=changeset_revision,
+                                                               install_tool_dependencies=True,
+                                                               install_repository_dependencies=False,
+                                                               install_resolver_dependencies=True)
+
+        if sync_organism_tool["version"] != "3.2.1.0":
+            toolshed_dict = sync_organism_tool["tool_shed_repository"]
+            logging.warning("Changeset for %s is not installed" % toolshed_dict["name"])
+            changeset_revision = "afd5d92745fb"
+            name = toolshed_dict["name"]
+            owner = toolshed_dict["owner"]
+            toolshed = "https://" + toolshed_dict["tool_shed"]
+            logging.warning("Installing changeset revision {0} for {1}".format(changeset_revision, name))
+
+            self.instance.toolshed.install_repository_revision(tool_shed_url=toolshed, name=name, owner=owner, 
+                                                               changeset_revision=changeset_revision,
+                                                               install_tool_dependencies=True,
+                                                               install_repository_dependencies=False,
+                                                               install_resolver_dependencies=True)
+
+
+        logging.info("Success: individual tools versions and changesets validated")
 
 
 
@@ -300,7 +301,15 @@ class RunWorkflow(speciesData.SpeciesData):
                 org_output = json.loads(org_json_output)
                 org_id = str(org_output["organism_id"])  # id needs to be a str to be recognized by chado tools
 
+        # Synchronize newly added organism in Tripal
+        logging.info("Synchronizing organism %s in Tripal" % self.full_name)
+        time.sleep(60)
+        org_sync = self.instance.tools.run_tool(tool_id="toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
+                                                history_id=self.history_id,
+                                                tool_inputs={"organism_id": org_id})
 
+
+        # Analyses (genome + OGS)
         get_analyses = self.instance.tools.run_tool(
             tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_analysis_get_analyses/analysis_get_analyses/%s" % tool_version,
             history_id=self.history_id,
@@ -336,7 +345,14 @@ class RunWorkflow(speciesData.SpeciesData):
             analysis_job_out_id = analysis_outputs[0]["id"]
             analysis_json_output = self.instance.datasets.download_dataset(dataset_id=analysis_job_out_id)
             analysis_output = json.loads(analysis_json_output)
-            ogs_analysis_id = str(analysis_output["analysis_id"]) 
+            ogs_analysis_id = str(analysis_output["analysis_id"])
+            
+        # Synchronize OGS analysis in Tripal
+        logging.info("Synchronizing OGS%s analysis in Tripal" % self.ogs_version)
+        time.sleep(60)
+        ogs_analysis_sync = self.instance.tools.run_tool(tool_id="toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+                                                         history_id=self.history_id,
+                                                         tool_inputs={"analysis_id": ogs_analysis_id})
                     
         if genome_analysis_id is None:
             add_genome_analysis_job = self.instance.tools.run_tool(
@@ -352,6 +368,13 @@ class RunWorkflow(speciesData.SpeciesData):
             analysis_json_output = self.instance.datasets.download_dataset(dataset_id=analysis_job_out_id)
             analysis_output = json.loads(analysis_json_output)
             genome_analysis_id = str(analysis_output["analysis_id"])
+    
+        # Synchronize genome analysis in Tripal
+        logging.info("Synchronizing genome v%s analysis in Tripal" % self.genome_version)
+        time.sleep(60)
+        genome_analysis_sync = self.instance.tools.run_tool(tool_id="toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+                                                            history_id=self.history_id,
+                                                            tool_inputs={"analysis_id": genome_analysis_id})
 
         # print({"org_id": org_id, "genome_analysis_id": genome_analysis_id, "ogs_analysis_id": ogs_analysis_id})
         return({"org_id": org_id, "genome_analysis_id": genome_analysis_id, "ogs_analysis_id": ogs_analysis_id})
@@ -420,6 +443,13 @@ class RunWorkflow(speciesData.SpeciesData):
                 org_output = json.loads(org_json_output)
                 org_id = str(org_output["organism_id"])  # id needs to be a str to be recognized by chado tools
 
+            # Synchronize newly added organism in Tripal
+            logging.info("Synchronizing organism %s in Tripal" % self.full_name)
+            time.sleep(60)
+            org_sync = self.instance.tools.run_tool(tool_id="toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0",
+                                                    history_id=self.history_id,
+                                                    tool_inputs={"organism_id": org_id})
+
 
         get_analyses = self.instance.tools.run_tool(
             tool_id="toolshed.g2.bx.psu.edu/repos/gga/chado_analysis_get_analyses/analysis_get_analyses/%s" % tool_version,
@@ -455,6 +485,13 @@ class RunWorkflow(speciesData.SpeciesData):
             analysis_output = json.loads(analysis_json_output)
             blastp_analysis_id = str(analysis_output["analysis_id"])
 
+        # Synchronize blastp analysis
+        logging.info("Synchronizing Diamong blastp OGS%s analysis in Tripal" % self.ogs_version)
+        time.sleep(60)
+        blastp_analysis_sync = self.instance.tools.run_tool(tool_id="toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+                                                            history_id=self.history_id,
+                                                            tool_inputs={"analysis_id": blastp_analysis_id})
+
         # print({"org_id": org_id, "genome_analysis_id": genome_analysis_id, "ogs_analysis_id": ogs_analysis_id})
         return({"org_id": org_id, "blastp_analysis_id": blastp_analysis_id})
 
@@ -494,60 +531,6 @@ class RunWorkflow(speciesData.SpeciesData):
 
         return self.interpro_analysis_id
 
-    def run_workflow(self, workflow_path, workflow_parameters, workflow_name, datamap):
-        """
-        Run a workflow in galaxy
-        Requires the .ga file to be loaded as a dictionary (optionally could be uploaded as a raw file)
-
-        :param workflow_name:
-        :param workflow_parameters:
-        :param datamap:
-        :return:
-        """
-
-        logging.info("Importing workflow %s" % str(workflow_path))
-
-        # Load the workflow file (.ga) in a buffer
-        with open(workflow_path, 'r') as ga_in_file:
-
-            # Then store the decoded json dictionary
-            workflow_dict = json.load(ga_in_file)
-
-            # In case of the Jbrowse workflow, we unfortunately have to manually edit the parameters instead of setting them
-            # as runtime values, using runtime parameters makes the tool throw an internal critical error ("replace not found" error)
-            # Scratchgmod test: need "http" (or "https"), the hostname (+ port)
-            if "jbrowse_menu_url" not in self.config.keys():
-                jbrowse_menu_url = "https://{hostname}/sp/{genus_sp}/feature/{Genus}/{species}/mRNA/{id}".format(hostname=self.config["hostname"], genus_sp=self.genus_species, Genus=self.genus_uppercase, species=self.species, id="{id}")
-            else:
-                jbrowse_menu_url = self.config["jbrowse_menu_url"]
-            if workflow_name == "Jbrowse":
-                workflow_dict["steps"]["2"]["tool_state"] = workflow_dict["steps"]["2"]["tool_state"].replace("__MENU_URL__", jbrowse_menu_url)
-                # The UNIQUE_ID is specific to a combination genus_species_strain_sex so every combination should have its unique workflow
-                # in galaxy --> define a naming method for these workflows
-                workflow_dict["steps"]["3"]["tool_state"] = workflow_dict["steps"]["3"]["tool_state"].replace("__FULL_NAME__", self.full_name).replace("__UNIQUE_ID__", self.species_folder_name)
-
-            # Import the workflow in galaxy as a dict
-            self.instance.workflows.import_workflow_dict(workflow_dict=workflow_dict)
-
-            # Get its attributes
-            workflow_attributes = self.instance.workflows.get_workflows(name=workflow_name)
-            # Then get its ID (required to invoke the workflow)
-            workflow_id = workflow_attributes[0]["id"]  # Index 0 is the most recently imported workflow (the one we want)
-            show_workflow = self.instance.workflows.show_workflow(workflow_id=workflow_id)
-            # Check if the workflow is found
-            try:
-                logging.debug("Workflow ID: %s" % workflow_id)
-            except bioblend.ConnectionError:
-                logging.warning("Error retrieving workflow attributes for workflow %s" % workflow_name)
-
-            # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it
-            self.instance.workflows.invoke_workflow(workflow_id=workflow_id,
-                                                    history_id=self.history_id,
-                                                    params=workflow_parameters,
-                                                    inputs=datamap,
-                                                    allow_tool_state_corrections=True)
-
-            logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, self.instance_url))
 
     def get_invocation_report(self, workflow_name):
         """
@@ -570,6 +553,7 @@ class RunWorkflow(speciesData.SpeciesData):
 
         return invocation_report
 
+
     def import_datasets_into_history(self):
         """
         Find datasets in a library, get their ID and import them into the current history if they are not already
@@ -822,9 +806,6 @@ def create_sp_workflow_dict(sp_dict, main_dir, config, workflow_type):
                                     species=run_workflow_for_current_organism.species,
                                     script_dir=run_workflow_for_current_organism.script_dir):
 
-        # Starting
-        logging.info("run_workflow.py called for %s" % run_workflow_for_current_organism.full_name)
-
         # Setting some of the instance attributes
         run_workflow_for_current_organism.main_dir = main_dir
         run_workflow_for_current_organism.species_dir = os.path.join(run_workflow_for_current_organism.main_dir,
@@ -1063,7 +1044,6 @@ if __name__ == "__main__":
             else:
                 all_sp_workflow_dict[current_sp_key][current_sp_strain_sex_key] = current_sp_strain_sex_value
 
-
         for k, v in all_sp_workflow_dict.items():
             if len(list(v.keys())) == 1:
                 logging.info("Input organism %s: 1 species detected in input dictionary" % k)
@@ -1086,7 +1066,10 @@ if __name__ == "__main__":
                 install_changesets_revisions_from_workflow(workflow_path=workflow_path, instance=instance)
 
                 organism_key_name = list(v.keys())
-                org_dict = v[organisms_key_names[0]]
+                org_dict = v[organisms_key_name[0]]
+
+                # print("\n")
+                # print(org_dict)
 
                 history_id = org_dict["history_id"]
 
@@ -1207,7 +1190,7 @@ if __name__ == "__main__":
                         logging.warning("Error finding workflow %s" % workflow_name)
 
                     # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it
-                    instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
+                    # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
 
                     logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url))
 
@@ -1443,7 +1426,7 @@ if __name__ == "__main__":
                         logging.warning("Error finding workflow %s" % workflow_name)
 
                     # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it
-                    instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
+                    # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
 
                     logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url))
 
@@ -1488,7 +1471,7 @@ if __name__ == "__main__":
             install_changesets_revisions_from_workflow(workflow_path=workflow_path, instance=instance)
 
             organism_key_name = list(v.keys())
-            org_dict = v[organisms_key_names[0]]
+            org_dict = v[organisms_key_name[0]]
 
             history_id = org_dict["history_id"]
 
@@ -1526,15 +1509,13 @@ if __name__ == "__main__":
 
             BLASTP_FILE = "0"
             LOAD_BLASTP_FILE = "1"
-            SYNC_BLASTP_ANALYSIS = "2"
-            POPULATE_MAT_VIEWS = "3"
-            INDEX_TRIPAL_DATA = "4"
+            POPULATE_MAT_VIEWS = "2"
+            INDEX_TRIPAL_DATA = "3"
 
             # Set the workflow parameters (individual tools runtime parameters in the workflow)
             workflow_parameters = {}
             workflow_parameters[BLASTP_FILE] = {}
             workflow_parameters[LOAD_BLASTP_FILE] = {"analysis_id": org_blastp_analysis_id, "organism_id": org_org_id}
-            workflow_parameters[SYNC_BLASTP_ANALYSIS] = {"analysis_id":  org_blastp_analysis_id}
             workflow_parameters[POPULATE_MAT_VIEWS] = {}
             workflow_parameters[INDEX_TRIPAL_DATA] = {}
 
@@ -1560,7 +1541,7 @@ if __name__ == "__main__":
                     logging.warning("Error finding workflow %s" % workflow_name)
 
                 # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it
-                instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
+                # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
 
                 logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url))
 
@@ -1665,10 +1646,8 @@ if __name__ == "__main__":
             BLASTP_FILE_ORG2 = "1"
             LOAD_BLASTP_FILE_ORG1 = "2"
             LOAD_BLASTP_FILE_ORG1 = "3"
-            SYNC_BLASTP_ANALYSIS_ORG1 = "4"
-            SYNC_BLASTP_ANALYSIS_ORG2 = "5"
-            POPULATE_MAT_VIEWS = "6"
-            INDEX_TRIPAL_DATA = "7"
+            POPULATE_MAT_VIEWS = "4"
+            INDEX_TRIPAL_DATA = "5"
 
             # Set the workflow parameters (individual tools runtime parameters in the workflow)
             workflow_parameters = {}
@@ -1680,12 +1659,10 @@ if __name__ == "__main__":
             # Organism 1
             workflow_parameters[LOAD_BLASTP_FILE_ORG1] = {"organism_id": org1_org_id,
                                                           "analysis_id": org1_blastp_analysis_id}
-            workflow_parameters[SYNC_BLASTP_ANALYSIS_ORG1] = {"analysis_id":  org1_blastp_analysis_id}
 
             # Organism 2
             workflow_parameters[LOAD_BLASTP_FILE_ORG2] = {"organism_id": org2_org_id,
                                                           "analysis_id": org2_blastp_analysis_id}
-            workflow_parameters[SYNC_BLASTP_ANALYSIS_ORG2] = {"analysis_id":  org2_blastp_analysis_id}
 
             workflow_parameters[POPULATE_MAT_VIEWS] = {}
             workflow_parameters[INDEX_TRIPAL_DATA] = {}
@@ -1718,7 +1695,7 @@ if __name__ == "__main__":
                     logging.warning("Error finding workflow %s" % workflow_name)
 
                 # Finally, invoke the workflow alogn with its datamap, parameters and the history in which to invoke it
-                instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
+                # instance.workflows.invoke_workflow(workflow_id=workflow_id, history_id=history_id, params=workflow_parameters, inputs=datamap, allow_tool_state_corrections=True)
 
                 logging.info("Successfully imported and invoked workflow {0}, check the galaxy instance ({1}) for the jobs state".format(workflow_name, instance_url))
 
diff --git a/speciesData.py b/speciesData.py
index 4d4b58aeb3c1f107ebc779f3a43e8bd7b1671042..49ffed5416ede69e0f49fd295608ebd20cc735e7 100755
--- a/speciesData.py
+++ b/speciesData.py
@@ -88,6 +88,11 @@ class SpeciesData:
         else:
             self.blast = "0"
 
+        if constants.ORG_PARAM_DESC_PICTURE_PATH in parameters_dictionary_description.keys():
+            self.picture_path = parameters_dictionary_description[constants.ORG_PARAM_DESC_PICTURE_PATH]
+        else:
+            self.picture_path = None
+
         self.genus_lowercase = self.genus[0].lower() + self.genus[1:]
         self.genus_uppercase = self.genus[0].upper() + self.genus[1:]
         self.chado_species_name = "{0} {1}".format(self.species, self.sex)
diff --git a/templates/gspecies_compose.yml.j2 b/templates/gspecies_compose.yml.j2
index b1f4c6f9fe5cf6ab245be4edb8f42aba7abad9f1..9b8c3f60f5f6532312a707f2d6d7c090fa6e0b02 100644
--- a/templates/gspecies_compose.yml.j2
+++ b/templates/gspecies_compose.yml.j2
@@ -42,6 +42,14 @@ services:
             - ./banner.png:/var/www/html/banner.png:ro
           {% endif %}
             #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
+          {% if 'picture_path' is defined %}
+            {% if 'png' in picture_path %}
+            - ./species.png:/var/www/html/species.png:ro
+            {% endif %}
+            {% if 'jpg' in picture_path %}
+            - ./species.jpg:/var/www/html/species.jpg:ro
+            {% endif %}
+          {% endif %}
         environment:
             DB_HOST: tripal-db.{{ genus_species }}
             BASE_URL_PATH: /sp/{{ genus_species }}
diff --git a/templates/organisms.yml.j2 b/templates/organisms.yml.j2
index 34f3c01293984e585f7fdf7cc5882844e5f28cd3..9a05ccfc4c8abfee341532cf0e2464714a8882cc 100644
--- a/templates/organisms.yml.j2
+++ b/templates/organisms.yml.j2
@@ -6,6 +6,7 @@
     {{ org_param_desc_strain }}: {{ org_param_desc_strain_value }}
     {{ org_param_desc_common_name }}: {{ org_param_desc_common_name_value }}
     {{ org_param_desc_origin }}: {{ org_param_desc_origin_value }}
+    {{ org_param_desc_picture_path }}: {{ org_param_desc_picture_path_value }}
     {% if org_param_desc_main_species_value is defined and org_param_desc_main_species_value is sameas true %}
     {{ org_param_desc_main_species }}: yes
     {% endif %}
diff --git a/utilities.py b/utilities.py
index 3d734f24a9c9bdd82f60f6127a051602493b861b..bae4e0241df54ecf4340fba978044f608b4ac992 100755
--- a/utilities.py
+++ b/utilities.py
@@ -206,6 +206,7 @@ def create_org_param_dict_from_constants():
     org_param_dict["org_param_desc_strain"] = constants.ORG_PARAM_DESC_STRAIN
     org_param_dict["org_param_desc_common_name"] = constants.ORG_PARAM_DESC_COMMON_NAME
     org_param_dict["org_param_desc_origin"] = constants.ORG_PARAM_DESC_ORIGIN
+    org_param_dict["org_param_desc_picture_path"] = constants.ORG_PARAM_DESC_PICTURE_PATH
     org_param_dict["org_param_desc_main_species"] = constants.ORG_PARAM_DESC_MAIN_SPECIES
     org_param_dict["org_param_data"] = constants.ORG_PARAM_DATA
     org_param_dict["org_param_data_genome_path"] = constants.ORG_PARAM_DATA_GENOME_PATH
diff --git a/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_1org_v1.ga b/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_1org_v1.ga
index db4e9537e0094dd39932dc823f5fe789911d3fb6..d17c30286ad67f9383761fceac0798a4cf2e629c 100644
--- a/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_1org_v1.ga
+++ b/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_1org_v1.ga
@@ -20,14 +20,14 @@
             "name": "Input dataset",
             "outputs": [],
             "position": {
-                "bottom": 416.1999969482422,
-                "height": 82.19999694824219,
-                "left": 410,
-                "right": 610,
-                "top": 334,
+                "bottom": 309.1999969482422,
+                "height": 61.19999694824219,
+                "left": 130.5,
+                "right": 330.5,
+                "top": 248,
                 "width": 200,
-                "x": 410,
-                "y": 334
+                "x": 130.5,
+                "y": 248
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -38,7 +38,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "f602d234-8cea-4db9-ab77-678cdc0d2101"
+                    "uuid": "d62b79b0-370d-4c89-9c55-ea78b7979ed7"
                 }
             ]
         },
@@ -58,10 +58,6 @@
                     "description": "runtime parameter for tool Chado load Blast results",
                     "name": "analysis_id"
                 },
-                {
-                    "description": "runtime parameter for tool Chado load Blast results",
-                    "name": "input"
-                },
                 {
                     "description": "runtime parameter for tool Chado load Blast results",
                     "name": "organism_id"
@@ -80,14 +76,14 @@
                 }
             ],
             "position": {
-                "bottom": 457.3999938964844,
-                "height": 164.39999389648438,
-                "left": 711,
-                "right": 911,
-                "top": 293,
+                "bottom": 364.6000061035156,
+                "height": 163.60000610351562,
+                "left": 384.5,
+                "right": 584.5,
+                "top": 201,
                 "width": 200,
-                "x": 711,
-                "y": 293
+                "x": 384.5,
+                "y": 201
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_load_blast/load_blast/2.3.4+galaxy0",
@@ -97,7 +93,7 @@
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"blastdb_id\": \"21\", \"input\": {\"__class__\": \"RuntimeValue\"}, \"match_on_name\": \"false\", \"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"query_type\": \"polypeptide\", \"re_name\": \"\", \"skip_missing\": \"false\", \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"blastdb_id\": \"21\", \"input\": {\"__class__\": \"ConnectedValue\"}, \"match_on_name\": \"false\", \"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"query_type\": \"polypeptide\", \"re_name\": \"\", \"skip_missing\": \"false\", \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "2.3.4+galaxy0",
             "type": "tool",
             "uuid": "10144cf8-f121-45f3-ba64-9f4d66bf1e56",
@@ -105,13 +101,13 @@
                 {
                     "label": null,
                     "output_name": "results",
-                    "uuid": "95708895-8439-4257-bff6-96e4c51a0725"
+                    "uuid": "6012c557-b288-4920-b6fa-ad3843aa836c"
                 }
             ]
         },
         "2": {
             "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
+            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_populate_mviews/db_populate_mviews/3.2.1.0",
             "errors": null,
             "id": 2,
             "input_connections": {
@@ -120,61 +116,6 @@
                     "output_name": "results"
                 }
             },
-            "inputs": [
-                {
-                    "description": "runtime parameter for tool Synchronize an analysis",
-                    "name": "analysis_id"
-                }
-            ],
-            "label": "sync blast analysis org1",
-            "name": "Synchronize an analysis",
-            "outputs": [
-                {
-                    "name": "results",
-                    "type": "json"
-                }
-            ],
-            "position": {
-                "bottom": 451.3999938964844,
-                "height": 154.39999389648438,
-                "left": 1010,
-                "right": 1210,
-                "top": 297,
-                "width": 200,
-                "x": 1010,
-                "y": 297
-            },
-            "post_job_actions": {},
-            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
-            "tool_shed_repository": {
-                "changeset_revision": "f487ff676088",
-                "name": "tripal_analysis_sync",
-                "owner": "gga",
-                "tool_shed": "toolshed.g2.bx.psu.edu"
-            },
-            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
-            "tool_version": "3.2.1.0",
-            "type": "tool",
-            "uuid": "99e7496d-ac32-467d-8c09-2efd48d0231a",
-            "workflow_outputs": [
-                {
-                    "label": "Synchronize Analysis into Tripal",
-                    "output_name": "results",
-                    "uuid": "1fb6db92-90a2-4e33-beec-f2f974e369e9"
-                }
-            ]
-        },
-        "3": {
-            "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_populate_mviews/db_populate_mviews/3.2.1.0",
-            "errors": null,
-            "id": 3,
-            "input_connections": {
-                "wait_for": {
-                    "id": 2,
-                    "output_name": "results"
-                }
-            },
             "inputs": [],
             "label": "populate mat views",
             "name": "Populate materialized views",
@@ -185,14 +126,14 @@
                 }
             ],
             "position": {
-                "bottom": 452.3999938964844,
-                "height": 154.39999389648438,
-                "left": 1295,
-                "right": 1495,
-                "top": 298,
+                "bottom": 359.3999938964844,
+                "height": 153.59999084472656,
+                "left": 640.5,
+                "right": 840.5,
+                "top": 205.8000030517578,
                 "width": 200,
-                "x": 1295,
-                "y": 298
+                "x": 640.5,
+                "y": 205.8000030517578
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_populate_mviews/db_populate_mviews/3.2.1.0",
@@ -210,18 +151,18 @@
                 {
                     "label": "Populate Tripal materialized view(s)",
                     "output_name": "results",
-                    "uuid": "0a0c9fa7-3a3c-459d-b5c7-b7a5a11459f3"
+                    "uuid": "44db552b-b5bc-4439-a0b2-4d7aeef17740"
                 }
             ]
         },
-        "4": {
+        "3": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_index/db_index/3.2.1.1",
             "errors": null,
-            "id": 4,
+            "id": 3,
             "input_connections": {
                 "wait_for": {
-                    "id": 3,
+                    "id": 2,
                     "output_name": "results"
                 }
             },
@@ -235,14 +176,14 @@
                 }
             ],
             "position": {
-                "bottom": 433.6000061035156,
-                "height": 113.60000610351562,
-                "left": 1570,
-                "right": 1770,
-                "top": 320,
+                "bottom": 337.6000061035156,
+                "height": 112.80000305175781,
+                "left": 888.5,
+                "right": 1088.5,
+                "top": 224.8000030517578,
                 "width": 200,
-                "x": 1570,
-                "y": 320
+                "x": 888.5,
+                "y": 224.8000030517578
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_index/db_index/3.2.1.1",
@@ -260,12 +201,12 @@
                 {
                     "label": "Index Tripal data",
                     "output_name": "results",
-                    "uuid": "5c0f0431-acb0-4e40-a7e4-8a562933fd97"
+                    "uuid": "d43089e2-546e-446e-9967-e2bb38e482d9"
                 }
             ]
         }
     },
     "tags": [],
-    "uuid": "80e32784-e39e-48ce-a6e3-7627de734ca6",
-    "version": 4
+    "uuid": "69787ba6-546e-4e63-a5cd-0a26bf970c64",
+    "version": 5
 }
\ No newline at end of file
diff --git a/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_2org_v1.ga b/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_2org_v1.ga
index ba2591c8dbd09e02b0ac52dcaf979709f2587bdb..e43fbf1754ba2cb1baf0a077b43c739d4e300d2f 100644
--- a/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_2org_v1.ga
+++ b/workflows_phaeoexplorer/Galaxy-Workflow-load_blast_results_2org_v1.ga
@@ -20,14 +20,14 @@
             "name": "Input dataset",
             "outputs": [],
             "position": {
-                "bottom": 230.39999389648438,
-                "height": 61.19999694824219,
-                "left": 97.5,
-                "right": 297.5,
-                "top": 169.1999969482422,
+                "bottom": 294.6000061035156,
+                "height": 61.20001220703125,
+                "left": 261,
+                "right": 461,
+                "top": 233.39999389648438,
                 "width": 200,
-                "x": 97.5,
-                "y": 169.1999969482422
+                "x": 261,
+                "y": 233.39999389648438
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -38,7 +38,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "72006174-6297-4777-95bd-ca427b9ea729"
+                    "uuid": "05659ec9-9e54-4a65-a9b2-955f4a9a78d9"
                 }
             ]
         },
@@ -58,14 +58,14 @@
             "name": "Input dataset",
             "outputs": [],
             "position": {
-                "bottom": 341.40000915527344,
+                "bottom": 376.8000030517578,
                 "height": 61.19999694824219,
-                "left": 129.5,
-                "right": 329.5,
-                "top": 280.20001220703125,
+                "left": 335,
+                "right": 535,
+                "top": 315.6000061035156,
                 "width": 200,
-                "x": 129.5,
-                "y": 280.20001220703125
+                "x": 335,
+                "y": 315.6000061035156
             },
             "tool_id": null,
             "tool_state": "{\"optional\": false}",
@@ -76,7 +76,7 @@
                 {
                     "label": null,
                     "output_name": "output",
-                    "uuid": "45971e82-4e85-4993-a9cb-9a4608e9def7"
+                    "uuid": "c0da107e-0954-4e64-b15f-dab9cec95ca9"
                 }
             ]
         },
@@ -114,14 +114,14 @@
                 }
             ],
             "position": {
-                "bottom": 255.8000030517578,
-                "height": 163.60000610351562,
-                "left": 457.5,
-                "right": 657.5,
-                "top": 92.19999694824219,
+                "bottom": 393.3999938964844,
+                "height": 163.59999084472656,
+                "left": 617,
+                "right": 817,
+                "top": 229.8000030517578,
                 "width": 200,
-                "x": 457.5,
-                "y": 92.19999694824219
+                "x": 617,
+                "y": 229.8000030517578
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_load_blast/load_blast/2.3.4+galaxy0",
@@ -139,81 +139,22 @@
                 {
                     "label": null,
                     "output_name": "results",
-                    "uuid": "cb238779-29f4-4f22-b6f3-6a8cc84857d1"
+                    "uuid": "f1e80214-1783-4201-affc-fad2d0504537"
                 }
             ]
         },
         "3": {
-            "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
-            "errors": null,
-            "id": 3,
-            "input_connections": {
-                "wait_for": {
-                    "id": 2,
-                    "output_name": "results"
-                }
-            },
-            "inputs": [
-                {
-                    "description": "runtime parameter for tool Synchronize an analysis",
-                    "name": "analysis_id"
-                },
-                {
-                    "description": "runtime parameter for tool Synchronize an analysis",
-                    "name": "wait_for"
-                }
-            ],
-            "label": "sync blast analysis org1",
-            "name": "Synchronize an analysis",
-            "outputs": [
-                {
-                    "name": "results",
-                    "type": "json"
-                }
-            ],
-            "position": {
-                "bottom": 254.8000030517578,
-                "height": 153.60000610351562,
-                "left": 787.5,
-                "right": 987.5,
-                "top": 101.19999694824219,
-                "width": 200,
-                "x": 787.5,
-                "y": 101.19999694824219
-            },
-            "post_job_actions": {},
-            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
-            "tool_shed_repository": {
-                "changeset_revision": "f487ff676088",
-                "name": "tripal_analysis_sync",
-                "owner": "gga",
-                "tool_shed": "toolshed.g2.bx.psu.edu"
-            },
-            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
-            "tool_version": "3.2.1.0",
-            "type": "tool",
-            "uuid": "c98dedf6-8857-4d23-be94-fe6630f245d7",
-            "workflow_outputs": [
-                {
-                    "label": "Synchronize Analysis into Tripal",
-                    "output_name": "results",
-                    "uuid": "1ff4b1db-b6bf-4c48-a0ab-0a8513683999"
-                }
-            ]
-        },
-        "4": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_load_blast/load_blast/2.3.4+galaxy0",
             "errors": null,
-            "id": 4,
+            "id": 3,
             "input_connections": {
                 "input": {
                     "id": 1,
                     "output_name": "output"
                 },
                 "wait_for": {
-                    "id": 3,
+                    "id": 2,
                     "output_name": "results"
                 }
             },
@@ -222,17 +163,9 @@
                     "description": "runtime parameter for tool Chado load Blast results",
                     "name": "analysis_id"
                 },
-                {
-                    "description": "runtime parameter for tool Chado load Blast results",
-                    "name": "input"
-                },
                 {
                     "description": "runtime parameter for tool Chado load Blast results",
                     "name": "organism_id"
-                },
-                {
-                    "description": "runtime parameter for tool Chado load Blast results",
-                    "name": "wait_for"
                 }
             ],
             "label": "load blast results org2",
@@ -244,14 +177,14 @@
                 }
             ],
             "position": {
-                "bottom": 439.8000183105469,
-                "height": 163.60000610351562,
-                "left": 520.5,
-                "right": 720.5,
-                "top": 276.20001220703125,
+                "bottom": 394.3999938964844,
+                "height": 163.59999084472656,
+                "left": 879,
+                "right": 1079,
+                "top": 230.8000030517578,
                 "width": 200,
-                "x": 520.5,
-                "y": 276.20001220703125
+                "x": 879,
+                "y": 230.8000030517578
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_load_blast/load_blast/2.3.4+galaxy0",
@@ -261,7 +194,7 @@
                 "owner": "gga",
                 "tool_shed": "toolshed.g2.bx.psu.edu"
             },
-            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"blastdb_id\": \"21\", \"input\": {\"__class__\": \"RuntimeValue\"}, \"match_on_name\": \"false\", \"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"query_type\": \"polypeptide\", \"re_name\": \"\", \"skip_missing\": \"false\", \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"blastdb_id\": \"21\", \"input\": {\"__class__\": \"ConnectedValue\"}, \"match_on_name\": \"false\", \"organism_id\": {\"__class__\": \"RuntimeValue\"}, \"psql_target\": {\"method\": \"remote\", \"__current_case__\": 0}, \"query_type\": \"polypeptide\", \"re_name\": \"\", \"skip_missing\": \"false\", \"wait_for\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
             "tool_version": "2.3.4+galaxy0",
             "type": "tool",
             "uuid": "a7ec5c91-7cef-4b9f-95a0-ed5542b8e142",
@@ -269,77 +202,18 @@
                 {
                     "label": null,
                     "output_name": "results",
-                    "uuid": "119f219e-3d80-4b42-bb38-d07d4583048c"
+                    "uuid": "0c8e7a1b-e8ca-4164-a048-64d4f47bcbc6"
                 }
             ]
         },
-        "5": {
-            "annotation": "",
-            "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
-            "errors": null,
-            "id": 5,
-            "input_connections": {
-                "wait_for": {
-                    "id": 4,
-                    "output_name": "results"
-                }
-            },
-            "inputs": [
-                {
-                    "description": "runtime parameter for tool Synchronize an analysis",
-                    "name": "analysis_id"
-                },
-                {
-                    "description": "runtime parameter for tool Synchronize an analysis",
-                    "name": "wait_for"
-                }
-            ],
-            "label": "sync blast analysis org2",
-            "name": "Synchronize an analysis",
-            "outputs": [
-                {
-                    "name": "results",
-                    "type": "json"
-                }
-            ],
-            "position": {
-                "bottom": 440.8000183105469,
-                "height": 153.60000610351562,
-                "left": 828.5,
-                "right": 1028.5,
-                "top": 287.20001220703125,
-                "width": 200,
-                "x": 828.5,
-                "y": 287.20001220703125
-            },
-            "post_job_actions": {},
-            "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0",
-            "tool_shed_repository": {
-                "changeset_revision": "f487ff676088",
-                "name": "tripal_analysis_sync",
-                "owner": "gga",
-                "tool_shed": "toolshed.g2.bx.psu.edu"
-            },
-            "tool_state": "{\"analysis_id\": {\"__class__\": \"RuntimeValue\"}, \"wait_for\": {\"__class__\": \"RuntimeValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
-            "tool_version": "3.2.1.0",
-            "type": "tool",
-            "uuid": "2fff7637-7904-46ff-87e1-ce2721727e75",
-            "workflow_outputs": [
-                {
-                    "label": "Synchronize Analysis into Tripal",
-                    "output_name": "results",
-                    "uuid": "924991f3-6dd4-4752-9ce2-3832d72dff57"
-                }
-            ]
-        },
-        "6": {
+        "4": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_populate_mviews/db_populate_mviews/3.2.1.0",
             "errors": null,
-            "id": 6,
+            "id": 4,
             "input_connections": {
                 "wait_for": {
-                    "id": 5,
+                    "id": 3,
                     "output_name": "results"
                 }
             },
@@ -353,14 +227,14 @@
                 }
             ],
             "position": {
-                "bottom": 368.8000030517578,
-                "height": 153.60000610351562,
-                "left": 1103.5,
-                "right": 1303.5,
-                "top": 215.1999969482422,
+                "bottom": 388.1999969482422,
+                "height": 153.59999084472656,
+                "left": 1128,
+                "right": 1328,
+                "top": 234.60000610351562,
                 "width": 200,
-                "x": 1103.5,
-                "y": 215.1999969482422
+                "x": 1128,
+                "y": 234.60000610351562
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_populate_mviews/db_populate_mviews/3.2.1.0",
@@ -378,18 +252,18 @@
                 {
                     "label": "Populate Tripal materialized view(s)",
                     "output_name": "results",
-                    "uuid": "dc519305-8c27-4c53-9150-7dd37b5090cd"
+                    "uuid": "1ad6452f-457d-4f80-8659-9f8fc8ca0e48"
                 }
             ]
         },
-        "7": {
+        "5": {
             "annotation": "",
             "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_index/db_index/3.2.1.1",
             "errors": null,
-            "id": 7,
+            "id": 5,
             "input_connections": {
                 "wait_for": {
-                    "id": 6,
+                    "id": 4,
                     "output_name": "results"
                 }
             },
@@ -403,14 +277,14 @@
                 }
             ],
             "position": {
-                "bottom": 349,
+                "bottom": 359.6000061035156,
                 "height": 112.80000305175781,
-                "left": 1373.5,
-                "right": 1573.5,
-                "top": 236.1999969482422,
+                "left": 1383,
+                "right": 1583,
+                "top": 246.8000030517578,
                 "width": 200,
-                "x": 1373.5,
-                "y": 236.1999969482422
+                "x": 1383,
+                "y": 246.8000030517578
             },
             "post_job_actions": {},
             "tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_db_index/db_index/3.2.1.1",
@@ -428,12 +302,12 @@
                 {
                     "label": "Index Tripal data",
                     "output_name": "results",
-                    "uuid": "e2911922-2412-4618-97fe-bcc783bb0865"
+                    "uuid": "b4ab82f0-8227-4ed0-8b5f-5b9e99c408c9"
                 }
             ]
         }
     },
     "tags": [],
-    "uuid": "ffae97b5-698a-41a5-8561-470300594544",
-    "version": 6
+    "uuid": "64693f78-b92c-48c3-93c9-a3b0ee9e1071",
+    "version": 7
 }
\ No newline at end of file