diff --git a/Dockerfile b/Dockerfile
index 21a0cc9c5e578173a214a8a35f78f7b28ae2f549..e95d78f288fe3de7ca9f81d2681f83fe9c9bcb19 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -19,7 +19,7 @@ RUN git clone -b 2023-1-pre-release https://schemaforge:glpat-6vGMyFgBcCn2ZtQkjk
    && rm -fr .git
 #RUN git clone -b main https://schemaforge2:glpat-YKU2NQditme95H-nrr21@git.dcc.sib.swiss/sphn-semantic-framework/dataset2rdf.git && cd dataset2rdf && git checkout aabf7d9cc5e705b9e7adb572fadb681bed82dbcf \
 #   && rm -fr .git
-RUN git clone -b proejctspecifics https://schemaforge2:glpat-YKU2NQditme95H-nrr21@git.dcc.sib.swiss/sphn-semantic-framework/dataset2rdf.git && cd dataset2rdf && git checkout \
+RUN git clone -b main https://schemaforge2:glpat-YKU2NQditme95H-nrr21@git.dcc.sib.swiss/sphn-semantic-framework/dataset2rdf.git && cd dataset2rdf && git checkout \
    && rm -fr .git
 RUN mv sphn-ontology-documentation-visualization sphn_ontology_documentation_visualization \
    && mv sphn-shacl-generator sphn_shacl_generator
diff --git a/forms_new.py b/forms_new.py
index 58c8694de1af19d91e05e97ffbc91391a5f1017b..3fe8484dc729d74bfe152726956fc258ec870df3 100644
--- a/forms_new.py
+++ b/forms_new.py
@@ -1,14 +1,18 @@
 import os
 import sys
+import time
+import threading
+import logging
+import asyncio
 sys.path.append('./sphn_ontology_documentation_visualization')
 sys.path.append('./sphn_shacl_generator')
 sys.path.append('./dataset2rdf')
 
-from flask import Flask, render_template, url_for, redirect, request, flash, session, current_app, send_file, jsonify
+from flask import Flask, render_template, url_for, redirect, request, flash, session, current_app, send_file, jsonify, make_response
 from flask_dropzone import Dropzone
 
 import yaml
-from datetime import datetime
+from datetime import datetime, timedelta
 from zipfile import ZipFile
 from shutil import copy, rmtree
 from uuid import uuid4
@@ -17,6 +21,7 @@ from pylode_main import load_external_terminologies, pylode_and_sparqler
 from sphn_ontology_documentation_visualization.generate_image_links_file import main as generate_image_links
 from sphn_shacl_generator.shacler import produceSphnShacl, produceProjectShacl
 from dataset2rdf.cli import main as dataset2rdf_fct
+from threading import Thread
 
 
 
@@ -27,6 +32,7 @@ ALLOWED_EXTENSIONS = {'xlsx', 'ttl', 'json', 'png'}
 
 app = Flask(__name__)
 dropzone = Dropzone(app)
+queue = []
 
 app.config.from_object(__name__)
 app.config['SECRET_KEY'] = "15486712346568"
@@ -48,7 +54,7 @@ def allowed_file(filename):
 
 
 
-def csv_to_ttl(ontology_filename):
+def csv_to_ttl(ontology_filename, userID):
 
     # get version from config.yaml
     with open('dataset2rdf/dataset2rdf/config.yaml', 'r') as f:
@@ -61,12 +67,16 @@ def csv_to_ttl(ontology_filename):
         ps_output, ps_path = None, None
     else:
         sphn_output = 'sphn_ontology_' + version_year + '.ttl'
+        app.logger.info(ontology_filename)
+        app.logger.info(ontology_filename.split("."))
+        app.logger.info(os.listdir(app.config['UPLOAD_FOLDER'] + userID + '/'))
+        app.logger.info(os.listdir(app.config['UPLOAD_FOLDER'] + userID + '/'))
         ps_output = ontology_filename.split(".")[0] + '.ttl'
-        ps_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ps_output)
+        ps_path = os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ps_output)
 
     try:
-        dataset2rdf_fct(dataset=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology_filename), 
-            output=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_output), 
+        dataset2rdf_fct(dataset=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology_filename), 
+            output=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_output), 
             config="dataset2rdf/dataset2rdf/config.yaml", 
             extras=["dataset2rdf/input/SPHN_replaces_annotation.ttl"],
             project_output=ps_path)
@@ -80,9 +90,9 @@ def csv_to_ttl(ontology_filename):
 
 
 
-def load_pylode_sparqler(ontology_filename, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=None, pylode=False, sparqler=False, images=False):
+def load_pylode_sparqler(ontology_filename, userID, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=None, pylode=False, sparqler=False, images=False):
     
-    folder_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/')
+    folder_path = os.path.join(app.config['UPLOAD_FOLDER'], userID + '/')
     queries_path = folder_path + "sparql-queries/"
 
     try:
@@ -168,20 +178,20 @@ def load_pylode_sparqler(ontology_filename, zipObj_pylode, zipObj_sparql, projec
 
 
 
-def load_shacl(ontology_filename, shacler_exc=None, project_specific_ontology_filename=None):
+def load_shacl(ontology_filename, userID, shacler_exc=None, project_specific_ontology_filename=None):
 
     try:
         if project_specific_ontology_filename:
             filename = project_specific_ontology_filename[:-4] + '_shacl.ttl'
-            produceProjectShacl(project_ontology=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + project_specific_ontology_filename),
-                sphn_ontology=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology_filename),
-                shacl_output=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + filename),
+            produceProjectShacl(project_ontology=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + project_specific_ontology_filename),
+                sphn_ontology=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology_filename),
+                shacl_output=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + filename),
                 exception=shacler_exc
                 )
         else:
             filename = ontology_filename[:-4] + '_shacl.ttl'
-            produceSphnShacl(sphn_ontology=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology_filename),
-                shacl_output=os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + filename),
+            produceSphnShacl(sphn_ontology=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology_filename),
+                shacl_output=os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + filename),
                 exception=shacler_exc
                 )
         return False
@@ -190,12 +200,124 @@ def load_shacl(ontology_filename, shacler_exc=None, project_specific_ontology_fi
         # return error for pylode (for messaging and not display download button)
         return True
 
+@app.before_first_request
+def processing_files(): # write this also maybe test with only queueing 
+    def processing():
+        while True:
+            if queue:
+                try:
+
+                    userID = queue[0]
+                    run_dataset2rdf = False
+                    cnt_xlsx, cnt_shacl, cnt_sphn, cnt_o = 0,0,0,0
+                    shacl_exception = None
+                    ontology = None
+                    sphn_ontology = None
+                    pylode_images = False
+
+                    # check for false uploads
+                    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], userID)):
+                        if f.endswith('.xlsx'):
+                            run_dataset2rdf = True
+                            xlsx_file = f
+                            cnt_xlsx += 1
+                        if f.endswith('.json'):
+                            shacl_exception = f
+                            cnt_shacl += 1
+                        if f.endswith('.ttl'):
+                            if f.startswith('sphn'):
+                                sphn_ontology = f
+                                cnt_sphn += 1
+                            else:
+                                ontology = f
+                                cnt_o += 1
+
+                    pylode_images = False
+                    if os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/documentation/html/images')):
+                        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/documentation/html/images')):
+                            if f.endswith('.png'):
+                                pylode_images = True
+
+                    # if dataset is provided run tools from beginning
+                    if run_dataset2rdf: #file ends in xlsx
+                        # run dataset2rdf
+                        sphn_output, ps_output, error_dataset2rdf = csv_to_ttl(xlsx_file, userID)
+
+                        # store output and run pylode, sparqler with dataset2rdf output
+                        if ps_output:
+                            zipObj = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/schemas.zip' ), 'w')
+                            zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ps_output), arcname=ps_output)
+                            zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_output), arcname=sphn_output)
+                            zipObj.close()
+
+                            zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ps_output[:-4] + '_pylode.zip'), 'w')
+                            zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ps_output[:-4] + '_sparql.zip'), 'w')
+                        else:          
+                            zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_output[:-4] + '_pylode.zip'), 'w')
+                            zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_output[:-4] + '_sparql.zip'), 'w')
+
+                        error_pylode = load_pylode_sparqler(sphn_output, userID, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=ps_output, pylode=True, sparqler=True, images=pylode_images)
+
+                        # run shacler
+                        error_shacl = load_shacl(sphn_output, userID, shacler_exc=shacl_exception, project_specific_ontology_filename=ps_output)
+
+                    # if ttl is provided skip dataset2rdf
+                    else:
+                        # run pylode, sparqler
+                        if ontology:
+                            zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology[:-4] + '_pylode.zip'), 'w')
+                            zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology[:-4] + '_sparql.zip'), 'w')
+                        else:
+                            zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_ontology[:-4] + '_pylode.zip'), 'w')
+                            zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_ontology[:-4] + '_sparql.zip'), 'w')
+
+                        error_dataset2rdf = False
+
+                        error_pylode = load_pylode_sparqler(sphn_ontology, userID, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=ontology, pylode=True, sparqler=True, images=pylode_images)
+
+                        # run shacler
+                        error_shacl = load_shacl(sphn_ontology, userID, shacler_exc=shacl_exception, project_specific_ontology_filename=ontology)
+
+                    zipObj_pylode.close()
+                    zipObj_sparql.close()
+
+                    # if ps ontology ttl, store ps and sphn in zip
+                    if ontology:
+                        zipObj = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/schemas.zip' ), 'w')
+                        zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology),
+                            arcname=ontology)
+                        zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_ontology),
+                            arcname=sphn_ontology)
+                        zipObj.close()
+
+                        os.remove(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + ontology))
+                        os.remove(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/' + sphn_ontology))
+
+                    with open(os.path.join(app.config['UPLOAD_FOLDER'], userID + '/success.txt'), 'w') as f:
+                        f.write('Pipeline completed successfully!')
+
+                    app.logger.info(os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], userID)))
+                    time.sleep(5)
+                    queue.pop(0)
+                except Exception as e:
+                    userID = queue.pop(0)
+                    app.logger.info("Request failed: " + userID)
+                    app.logger.info(e)
+            else:
+                app.logger.info('Empty Queue')
+                app.logger.info(queue)
+                time.sleep(5)
+
+    thread = Thread(target=processing)
+    thread.start()
+
+    # This wont be executed, were staying in the loop
+    return '', 202
 
 
 # runs before each request, check if session expired
 @app.before_request
 def before_request():
-
     if 'id' not in session:
         session['id'] = str(uuid4())
         session['upload_started'] = False
@@ -218,26 +340,13 @@ def before_request():
         pass
 
     # update session last_active each request
-    session['last_active'] = now
+    session['last_active'] = now # TODO: change to uid
 
 
 
 # set session id when app is started
 @app.route("/", methods=['GET'])
 def start():
-
-    session['id'] = str(uuid4())
-    session['upload_started'] = False
-
-    return redirect(url_for('Upload'))
-
-
-
-# tab upload files and run tools
-@app.route("/Upload", methods=['GET', 'POST'])
-def Upload():
-
-    # reset dict for render_template
     t_dict = {}
     t_dict["download"] = False
     t_dict["remove_uploads"] = False
@@ -248,157 +357,190 @@ def Upload():
     t_dict["error_dataset2rdf"] = False
     t_dict["error_pylode"] = False
     t_dict["error_shacl"] = False
+    session['upload_started'] = False
 
-    # upload files
-    if request.method == 'POST':
+    expire_date = datetime.now() + timedelta(days=2)
+    if request.cookies.get('userID'):
+        session['userID'] = request.cookies.get('userID')
+    else:
+        session['userID'] = session['id']
+    
+    resp = make_response(render_template('Upload.html', t_dict=t_dict))
+    resp.set_cookie('userID', session['userID'], expires=expire_date)            
 
-        # add upload folder with unique session id
-        if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
-            os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id']))
+    return resp
 
-        # store files that are uploaded with dropzone 
-        for key, f in request.files.items():
-            if key.startswith('file') and allowed_file(f.filename):
 
-                if f.filename.endswith('.png'):
-                    if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/documentation/html/images')):
-                        os.makedirs(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/documentation/html/images'))
+# Endpoint that saves the files
+@app.route("/Upload", methods=['POST'])
+def Upload():
+    session['userID'] = request.cookies.get('userID')
+    # add upload folder with unique session id
+    if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
+        os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID']))
+    else:
+        # Could automatically remove multiple uploads here but Run has a check that ensures exactly 1 file
+        pass
 
-                    f.save(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/documentation/html/images/' + f.filename))
-                else:
-                    f.save(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f.filename))
+    # store files that are uploaded with dropzone 
+    for key, f in request.files.items():
+        if key.startswith('file') and allowed_file(f.filename):
 
-        session['upload_started'] = True
+            if f.filename.endswith('.png'):
+                if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/documentation/html/images')):
+                    os.makedirs(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/documentation/html/images'))
 
-    # make sure there are no old uploaded files from last download and inform user to upload before running
-    if request.method == 'GET' and os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])) and not session ['upload_started']:
-        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
-                os.remove(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f))
+                f.save(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/documentation/html/images/' + f.filename))
+            else:
+                f.save(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f.filename))
 
-        t_dict["nothing_uploaded"] = True
+    session['upload_started'] = True
+    return '', 204
 
-    #run tools
-    if request.method == 'GET' and session['upload_started']:
 
-        session['upload_started'] = False
-        
-        run_dataset2rdf = False
-        cnt_xlsx, cnt_shacl, cnt_sphn, cnt_o = 0,0,0,0
-        shacl_exception = None
-        ontology = None
-        sphn_ontology = None
-        pylode_images = False
-
-        # check for false uploads
-        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
-            if f.endswith('.xlsx'):
-                run_dataset2rdf = True
-                xlsx_file = f
-                cnt_xlsx += 1
-            if f.endswith('.json'):
-                shacl_exception = f
-                cnt_shacl += 1
-            if f.endswith('.ttl'):
-                if f.startswith('sphn'):
-                    sphn_ontology = f
-                    cnt_sphn += 1
-                else:
-                    ontology = f
-                    cnt_o += 1
-
-        if os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/documentation/html/images')):
-            for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/documentation/html/images')):
-                if f.endswith('.png'):
-                    pylode_images = True
-
-        # Through message if no .xlsx or .ttl
-        if ((not run_dataset2rdf and not sphn_ontology)
-            or (run_dataset2rdf and sphn_ontology)
-            ):
-            t_dict["missing"] = True
-            return render_template('Upload.html', t_dict=t_dict)
-
-        # Through message if too many files are uploaded
-        if cnt_o > 1: 
-            t_dict["cnt"] = "project-specific ontology (.ttl)"
-            return render_template('Upload.html', t_dict=t_dict)
-        if cnt_sphn > 1: 
-            t_dict["cnt"] = "SPHN RDF schema (.ttl)"
-            return render_template('Upload.html', t_dict=t_dict)
-        if cnt_shacl > 1: 
-            t_dict["cnt"] = "SHACL exceptions (.json)"
-            return render_template('Upload.html', t_dict=t_dict)
-        if cnt_xlsx > 1: 
-            t_dict["cnt"] = "SPHN dataset (.xlsx)"
-            return render_template('Upload.html', t_dict=t_dict)
-
-        # if dataset is provided run tools from beginning
-        if run_dataset2rdf:
-            # run dataset2rdf
-            sphn_output, ps_output, error_dataset2rdf = csv_to_ttl(xlsx_file)
-
-            # store output and run pylode, sparqler with dataset2rdf output
-            if ps_output:
-                zipObj = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/schemas.zip' ), 'w')
-                zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ps_output), arcname=ps_output)
-                zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_output), arcname=sphn_output)
-                zipObj.close()
-
-                zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ps_output[:-4] + '_pylode.zip'), 'w')
-                zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ps_output[:-4] + '_sparql.zip'), 'w')
-            else:          
-                zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_output[:-4] + '_pylode.zip'), 'w')
-                zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_output[:-4] + '_sparql.zip'), 'w')
-
-            error_pylode = load_pylode_sparqler(sphn_output, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=ps_output, pylode=True, sparqler=True, images=pylode_images)
-
-            # run shacler
-            error_shacl = load_shacl(sphn_output, shacler_exc=shacl_exception, project_specific_ontology_filename=ps_output)
-
-        # if ttl is provided skip dataset2rdf
-        else:
-            # run pylode, sparqler
-            if ontology:
-                zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology[:-4] + '_pylode.zip'), 'w')
-                zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology[:-4] + '_sparql.zip'), 'w')
-            else:
-                zipObj_pylode = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_ontology[:-4] + '_pylode.zip'), 'w')
-                zipObj_sparql = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_ontology[:-4] + '_sparql.zip'), 'w')
+# Endpoint that is used to trigger the calculation of the Backend
+@app.route("/Run", methods=['GET'])
+def Run():
+    t_dict = {}
+    t_dict["download"] = False
+    t_dict["remove_uploads"] = False
+    t_dict["files_not_found"] = False
+    t_dict["nothing_uploaded"] = False
+    t_dict["missing"] = False
+    t_dict["cnt"] = ""
+    t_dict["error_dataset2rdf"] = False
+    t_dict["error_pylode"] = False
+    t_dict["error_shacl"] = False
+    t_dict["get_status"] = False
+    t_dict["running"] = False
 
-            error_dataset2rdf = False
+    session['userID'] = request.cookies.get('userID')
 
-            error_pylode = load_pylode_sparqler(sphn_ontology, zipObj_pylode, zipObj_sparql, project_specific_ontology_filename=ontology, pylode=True, sparqler=True, images=pylode_images)
+    run_dataset2rdf = False
+    cnt_xlsx, cnt_shacl, cnt_sphn, cnt_o = 0,0,0,0
+    shacl_exception = None
+    ontology = None
+    sphn_ontology = None
+    pylode_images = False
+    already_calc = False
 
-            # run shacler
-            error_shacl = load_shacl(sphn_ontology, shacler_exc=shacl_exception, project_specific_ontology_filename=ontology)
 
-        zipObj_pylode.close()
-        zipObj_sparql.close()
+    # check for no uploads
+    if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
+        app.logger.info("checking for uploads")
+        t_dict["missing"] = True
+        return render_template('Upload.html', t_dict=t_dict)
 
-        # if ps ontology ttl, store ps and sphn in zip
-        if ontology:
-            zipObj = ZipFile(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/schemas.zip' ), 'w')
-            zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology),
-                arcname=ontology)
-            zipObj.write(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_ontology),
-                arcname=sphn_ontology)
-            zipObj.close()
+    # check for false uploads
+    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
+        if f.endswith('.xlsx'):
+            run_dataset2rdf = True
+            xlsx_file = f
+            cnt_xlsx += 1
+        if f.endswith('.json'):
+            shacl_exception = f
+            cnt_shacl += 1
+        if f.endswith('.ttl'):
+            if f.startswith('sphn'):
+                sphn_ontology = f
+                cnt_sphn += 1
+            else:
+                ontology = f
+                cnt_o += 1
+        if f == "success.txt":
+            already_calc = True
+
+    # check for png images
+    if os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/documentation/html/images')):
+        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/documentation/html/images')):
+            if f.endswith('.png'):
+                pylode_images = True
+
+    # Through message if no .xlsx or .ttl
+    if (not run_dataset2rdf and not sphn_ontology):
+        t_dict["missing"] = True
+        return render_template('Upload.html', t_dict=t_dict)
+    elif already_calc:
+        t_dict["generic"] = "Please remove uploads before re-running"
+        t_dict["get_status"] = True
+        return render_template('Upload.html', t_dict=t_dict)
+    elif cnt_o > 1: 
+        t_dict["cnt"] = "project-specific ontology (.ttl)"
+        return render_template('Upload.html', t_dict=t_dict)
+    elif cnt_sphn > 1: 
+        t_dict["cnt"] = "SPHN RDF schema (.ttl)"
+        return render_template('Upload.html', t_dict=t_dict)
+    elif cnt_shacl > 1: 
+        t_dict["cnt"] = "SHACL exceptions (.json)"
+        return render_template('Upload.html', t_dict=t_dict)
+    elif cnt_xlsx > 1: 
+        t_dict["cnt"] = "SPHN dataset (.xlsx)"
+        return render_template('Upload.html', t_dict=t_dict)
+
+    # Check for requeueing error
+    elif session['userID'] in queue: 
+        app.logger.info('UID already in queue: ' + session['userID'])
+        app.logger.info('Queue: ')
+        app.logger.info(queue)
+        t_dict["running"] = True
+        t_dict["get_status"] = True
+        return render_template('Upload.html', t_dict=t_dict) 
 
-            os.remove(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + ontology))
-            os.remove(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + sphn_ontology))
+    # The only correct run, just queue the UID
+    else:
+        t_dict["get_status"] = True
+        t_dict["success"] = True
+        app.logger.info('Queued') 
+        queue.append(session['userID'])
+        app.logger.info(queue)
+        return render_template('Upload.html', t_dict=t_dict)
+    return '', 204
+
+# Endpoint that is used to trigger the calculation of the Backend
+@app.route("/data", methods=['GET'])
+def get_Status():
+    session['userID'] = request.cookies.get('userID')
+    if session['userID'] in queue:
+        app.logger.info('Request still running found in queue: ' + session['userID'])
+        app.logger.info('Queue: ')
+        app.logger.info(queue)
+        return "Running"
+    else:
+        def file_error(array, file):
+            for f in array:
+                if f.endswith(file):
+                    return False
+            return True
 
+        t_dict = {}
         t_dict["download"] = True
-        t_dict["error_dataset2rdf"] = error_dataset2rdf
-        t_dict["error_pylode"] = error_pylode
-        t_dict["error_shacl"] = error_shacl
 
-    return render_template('Upload.html', t_dict=t_dict)
+        # Checking for present files
+        files = os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID']))
+        t_dict["error_pylode"] = file_error(files, "pylode.zip")
+        t_dict["error_shacl"] = file_error(files, "shacl.ttl")
+        t_dict["error_sparql"] = file_error(files, "sparql.zip")
+        t_dict["error_logs"] = file_error(files, "app.log")
+        
+        app.logger.info(t_dict)
+        # Logging output 
+        app.logger.info('Request done: ' + session['userID'])
+        app.logger.info('Queue: ')
+        app.logger.info(queue)
 
+        return render_template('buttons.html', t_dict=t_dict)
+    return '', 204
 
 
 @app.route("/remove_uploads")
 def remove_uploads():
 
+    if session['userID'] in queue:
+        t_dict = {}
+        t_dict["get_status"] = True
+        t_dict["generic"] = "Please don't delete the files while the queue is running"
+        return render_template('Upload.html', t_dict=t_dict)
+
     # reset dict for render_template
     t_dict = {}
     t_dict["download"] = False
@@ -411,17 +553,19 @@ def remove_uploads():
     t_dict["error_dataset2rdf"] = False
     t_dict["error_pylode"] = False
     t_dict["error_shacl"] = False
+    session['userID'] = request.cookies.get('userID')
 
     files_found = False
+    
 
     # remove uploaded files
-    if os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
-        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+    if os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
+        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
             files_found = True
-            if os.path.isdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)):
-                rmtree(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f))
+            if os.path.isdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)):
+                rmtree(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f))
             else:
-                os.remove(os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f))
+                os.remove(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f))
 
     session['upload_started'] = False
 
@@ -436,23 +580,24 @@ def remove_uploads():
 
 @app.route("/download_ttl")
 def download_ttl():
+    session['userID'] = request.cookies.get('userID')
 
     zipf = False
     ttlf = False
 
-    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
         if f.endswith('schemas.zip'):
             zipf = True
             name = f
-            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)
+            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)
 
     if not zipf:
-        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+        for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
             if f.endswith('.ttl'):
                 if not f.endswith('shacl.ttl'):
                     ttlf = True
                     name = f
-                    file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)
+                    file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)
 
     if zipf:
         return send_file(
@@ -475,14 +620,15 @@ def download_ttl():
 
 @app.route("/download_html")
 def download_html():
+    session['userID'] = request.cookies.get('userID')
 
     html = False
 
-    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
         if f.endswith('pylode.zip'):
             html = True
             name = f
-            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)
+            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)
 
     if html:
         return send_file(
@@ -498,14 +644,15 @@ def download_html():
 
 @app.route("/download_shacl")
 def download_shacl():
+    session['userID'] = request.cookies.get('userID')
 
     shacl = False
 
-    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
         if f.endswith('shacl.ttl'):
             shacl = True
             name = f
-            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)
+            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)
 
     if shacl:
         return send_file(
@@ -521,14 +668,15 @@ def download_shacl():
     
 @app.route("/download_sparql")
 def download_sparql():
+    session['userID'] = request.cookies.get('userID')
 
     sparql = False
 
-    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['id'])):
+    for f in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'], session['userID'])):
         if f.endswith('sparql.zip'):
             sparql = True
             name = f
-            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['id'] + '/' + f)
+            file_path = os.path.join(app.config['UPLOAD_FOLDER'], session['userID'] + '/' + f)
 
     if sparql:
         return send_file(
@@ -541,13 +689,12 @@ def download_sparql():
     return redirect(url_for('Upload'))
 
 
-
+logging.basicConfig(level=logging.DEBUG)
 if __name__ == "__main__":
-
     # load ext terminologies for pylode before requests
     global external_g
     print("Load external terminology labels for pyLODE")
     external_g = load_external_terminologies()
 
     # start app
-    app.run(host='0.0.0.0', port=9090, debug=False)
+    app.run(host='0.0.0.0', port=9090, debug=True)
diff --git a/requirements.txt b/requirements.txt
index e152930266906e09a50e1d09aa869c9677ba1f97..43d7cc4bce44e7cf24bbe360e6f93454b2a01215 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -12,3 +12,5 @@ isodate==0.6.1
 pyparsing==3.0.7
 six==1.16.0
 pyaml
+threaded == 4.1.0
+python-time == 0.3.0
diff --git a/templates/Upload.html b/templates/Upload.html
index e28223e670397b8aac4c41a03a096b9f1318f3b7..2ef265e66c82f9fab47de86f4a6cc02962943f24 100644
--- a/templates/Upload.html
+++ b/templates/Upload.html
@@ -67,7 +67,6 @@
   {{ dropzone.create(action='Upload') }}
   {{ dropzone.config() }}
 
-  <div id="Running"  class="Running" style="display:none; border-radius:10px" >SUCCESS: Tools are running. This can take 2-5 minutes.</div>
 
   {%- if t_dict["files_not_found"] -%}
   <div id="Removed"  class="Removed" style="display:block;border-radius:10px" >SUCCESS: No uploaded files found. Nothing to remove.</div>
@@ -103,11 +102,53 @@
 
   {%- if t_dict["cnt"] -%}
   <div id="Missing"  class="Missing" style="display:block;border-radius:10px" >ERROR: You uploaded more than one {{ t_dict["cnt"] }} file. Please remove your uploaded files and try again.</div>
+  {%- endif -%}    
+  
+  {%- if t_dict["running"] -%}
+  <div id="Missing"  class="Missing" style="display:block;border-radius:10px" >ERROR: You're already running an instance of SchemaForge, patience young Padawan</div>
   {%- endif -%}  
 
-  <div style="vertical-align: middle;"><a class="btn upload" href="{{url_for('Upload')}}" onclick="showDiv()">Run</a></div>
+  {%- if t_dict["runtime_error"] -%}
+  <div id="Missing"  class="Missing" style="display:block;border-radius:10px" >ERROR: There has been an internal server error on your request, behold:  <br>{{ t_dict["runtime_error"] }}  </div>
+  {%- endif -%}
+  
+  {%- if t_dict["success"] -%}
+  <div id="Running"  class="Running" style="display:block; border-radius:10px" >SUCCESS: Tools are running. This can take 2-5 minutes.</div>
+  {%- endif -%} 
+
+  {%- if "generic" in t_dict -%}
+  <div id="Missing"  class="Missing" style="display:block; border-radius:10px" >ERROR: {{t_dict["generic"]}}.</div>
+  {%- endif -%} 
+
+
+
+  <div style="vertical-align: middle;"><a class="btn upload" href="{{ ('Run')}}" onclick="showDiv()">Run</a></div>
   <div style="vertical-align: middle;"><a class="btn remove" href="{{url_for('remove_uploads')}}" >Remove uploaded files</a></div>
 
+  {%- if t_dict["get_status"] -%}
+  <div id="data"></div>
+  <script>
+    function requestData() {
+      // Send an HTTP request to the server
+      fetch('/data')
+        .then(response => response.text())
+        .then(data => {
+          // Update the UI with the latest data
+          if (data == "Running"){
+            document.getElementById('data').innerHTML = ""; //"Avoid displaying here";
+          } else {
+            document.getElementById('data').innerHTML = data;
+          }
+          // Schedule the next request
+          setTimeout(requestData, 15000);
+        });
+    }
+
+    // Start the first request
+    requestData();
+  </script>
+  {%- endif -%}  
+
   {%- if t_dict["download"] -%}
   {%- if not t_dict["error_dataset2rdf"] -%}
   <h4>Download options:</h4>
diff --git a/templates/base_new.html b/templates/base_new.html
index e5f98cd649f19080ef8f7620f9f15a46af45dd4e..9af3551547419cff4a438f94d5cb410c959476cd 100644
--- a/templates/base_new.html
+++ b/templates/base_new.html
@@ -26,6 +26,7 @@
 <body>
 <nav class="navbar navbar-default navbar-fixed-top">
     <div class="container"> 
+        <!-- Thats the broken button -->
         <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar"
                   aria-expanded="false" aria-controls="navbar">
             <span class="sr-only">Toggle navigation</span>
diff --git a/templates/buttons.html b/templates/buttons.html
new file mode 100644
index 0000000000000000000000000000000000000000..c8b6c883c501d3c999434e28104e9b637c2e427a
--- /dev/null
+++ b/templates/buttons.html
@@ -0,0 +1,14 @@
+{%- if t_dict["download"] -%}
+{%- if not t_dict["error_dataset2rdf"] -%}
+<h4>Download options:</h4>
+<div class="row">
+  <a class="btn download" href="{{url_for('download_ttl')}}" >TTL File(s)</a>
+  {%- if not t_dict["error_pylode"] -%}<a class="btn download" href="{{url_for('download_html')}}" >HTML Documentation</a>{%- endif -%}  
+</div>
+<div class="row">
+  {%- if not t_dict["error_shacl"] -%}<a class="btn download" href="{{url_for('download_shacl')}}" >SHACL constraints</a>{%- endif -%}  
+  {%- if not t_dict["error_pylode"] -%}<a class="btn download" href="{{url_for('download_sparql')}}" >SPARQL Queries</a>{%- endif -%}  
+</div> 
+<br>
+{%- endif -%}
+{%- endif -%}
\ No newline at end of file