diff --git a/files/create_partition.sh b/files/create_partition.sh
deleted file mode 100644
index c1061d84337fda989fb3acd80165cda686ddea73..0000000000000000000000000000000000000000
--- a/files/create_partition.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-apt-get -y install e2fsprogs parted apt-file
-
-# only create partition on second disk if it doesn't exist already
-
-# alternatively use condition: if [[ ! $( ls -l /dev/disk/by-label | grep "SUBAPP" ) ]]; then
-if	( [[ ! $( lsblk -o label | grep "SUBAPP" ) ]] && \
-	  [[ ! $( mount | grep "/dev/sdb" ) ]] && \
-	  [[ ! $( fdisk -l | grep "/dev/sdb[0-9]" ) ]] ); then
-	echo "partition 'SUBAPP' doesn't exist yet, creating it now."
-	parted --script /dev/sdb \
-		mklabel msdos \
-		mkpart primary ext4 1MiB 100MiB
-	[[ $? -eq 0 ]] && echo "Partition created." || exit 1
-	mkfs.ext4 /dev/sdb1
-	[[ $? -eq 0 ]] && echo "Partition formatted." || exit 1
-	e2label /dev/sdb1 SUBAPP
-	[[ $? -eq 0 ]] && echo "Label assigned." || exit 1
-else
-	echo "partition 'SUBAPP' already exists, doing nothing."
-fi
diff --git a/meta/main.yml b/meta/main.yml
index d6367f4de56bce36bc1dde854372e29eb1796f69..22f22e89fe726b6375b3ed57becce8e5e97c32f2 100644
--- a/meta/main.yml
+++ b/meta/main.yml
@@ -19,8 +19,8 @@ galaxy_info:
   platforms:
     - name: Debian
       versions:
-        - 9
         - 10
+       #- 11
   galaxy_tags: []
     # List tags for your role here, one per line. A tag is a keyword that describes and categorizes the role. Users find roles by searching for tags. Be sure to remove the '[]' above, if you 
     # add tags to this list.
diff --git a/tasks/cleanup_legacy.yml b/tasks/cleanup_legacy.yml
new file mode 100644
index 0000000000000000000000000000000000000000..4f1a9aed29e1f259ce10db94c364fff4b3664ef2
--- /dev/null
+++ b/tasks/cleanup_legacy.yml
@@ -0,0 +1,68 @@
+---
+- name: remove SubApp-CMK-plugin
+  file:
+    path: "/usr/lib/check_mk_agent/plugins/checkmk_submission_application.sh"
+    state: absent
+
+- name: remove packages
+  apt:
+    name: [
+      'libarchive-zip-perl',
+    ]
+    state: absent
+
+- name: SubApp-Migration bereinigen
+  block:
+    - name: find service unit file
+      find:
+        path: "/etc/systemd/user/"
+        pattern: "subapp_bagit.service"
+      register: res_service
+    - name: Unit stoppen
+      systemd:
+        name: "{{ item }}"
+        state: stopped
+        enabled: false
+      loop:
+        - "subapp_bagit.service"
+        - "subapp.service"
+      when: res_service.matched > 0
+    - name: Unitfile & .gsa-Config entfernen
+      file:
+        path: "{{ item }}"
+        state: absent
+      loop:
+        - "/etc/systemd/user/subapp_bagit.service"
+        - "{{ gsa_vars.files.gsa.path }}"
+        - "/home/{{ vault_subapp_user }}/.gsa"
+    - name: Listfiles finden
+      find:
+        path: "/home/{{ vault_subapp_user }}/.subapp/"
+        patterns: "usa_{{ item }}_file.yam*"
+      register: listfiles
+      loop:
+        - "bookmark"
+        - "dependency"
+        - "isarchived"
+        - "mdupdate"
+    - name: Listfiles entfernen
+      file:
+        path: "{{ item.path }}"
+        state: absent
+      loop: listfiles.files
+    - name: remove old SubApp config file (path doesn't adhere to FHS 3.0)
+      file:
+        path: "/home/{{ vault_subapp_user }}/.subapp/.subapp.cfg"
+        state: absent
+    - name: find old Stichprobe services
+      find:
+        path: "/etc/systemd/user/"
+        patterns: "stichprobe-*-daily-report*"
+      register: stichprobe
+    - name: remove old Stichprobe services
+      file:
+        path: "{{ item.path }}"
+        state: absent
+      loop: "{{ stichprobe.files }}"
+    - systemd:
+        daemon-reload: "true"
diff --git a/tasks/configure_iptables_externalusers.yml b/tasks/configure_iptables_externalusers.yml
index f2ae8f2df141dfd020522295fabe239b75755922..8102bc43cb89ce170220488f94f68092adacc4a8 100644
--- a/tasks/configure_iptables_externalusers.yml
+++ b/tasks/configure_iptables_externalusers.yml
@@ -44,28 +44,6 @@
     - save iptables rules
   tags: [iptables]
 
-#- name: iptables-Regeln setzen (WLB)
-#  iptables:
-#    action: append
-#    chain: INPUT
-#    comment: "{{ item.comment }}"
-#    destination: "{{ item.dest | default(omit) }}"
-#    destination_port: "{{ item.dest_port | default(omit) }}"
-#    jump: ACCEPT
-##    limit: 100/s
-##    limit_burst: 1000/s
-#    protocol: tcp
-##    rule_num: 1
-#    source: "{{ item.src | default(omit) }}"
-#    source_port: "{{ item.src_port | default(omit) }}"
-#    state: "{{ item.state | default('present') }}"
-#    table: filter
-#  loop: "{{ vault_iptables_wlb|flatten(levels=1) }}"
-#  when: ansible_hostname is search("wlb")
-#  notify:
-#    - save iptables rules
-#  tags: [iptables]
-
 - name: iptables-Regeln setzen (Mediathek)
   iptables:
     action: append
diff --git a/tasks/configure_nfs_mounts.yml b/tasks/configure_nfs_mounts.yml
index c73513e0349f286cc73a20322896ca060c6efaab..8c2d49f511a9a40b69b142826faeaf10b0d0fe31 100644
--- a/tasks/configure_nfs_mounts.yml
+++ b/tasks/configure_nfs_mounts.yml
@@ -1,14 +1,4 @@
 ---
-#- name: DEBUG
-#  debug:
-#    #msg: "{{ nfs_mounts_subapp.nfs_opts.v3 | default ( nfs_mounts_subapp.nfs_opts.v4 )}} )"
-#    #msg: "{{ nfs_mounts_subapp.hosts.{{ ansible_hostname }} | default ( nfs_mounts_subapp.nfs_opts.v4 )}} )"
-#    #msg: "{% for item in root[ansible_hostname]['key'] %}{{ item }}{% endfor %}"
-#    msg: "{{ nfs_mounts_subapp.hosts[ansible_hostname]['access']['nfs_share'] }}"
-#  tags: [always]
-
-
-
 # We want to manually create mountpoint directories in case they don't exist,
 # because then we get to set permissions that deny ANY write access as long as
 # there's no NFS share mounted. This will help with cases where the share
@@ -25,9 +15,6 @@
     - "{{ nfs_mounts_subapp.hosts[ansible_hostname]['sftp_download']['path'] | default('/home/import/download') }}"
   register: stat_result
   tags: [nfs]
-- debug:
-    var: stat_result.results
-  tags: [nfs]
 - name: if dir doesn't exist, create it with correct permissions
   file:
     path: "{{ item.item }}"
diff --git a/tasks/configure_processing_user.yml b/tasks/configure_processing_user.yml
index d2a74a2b9c5d8a0739104beb65bc69368354d1a0..0587e536fe78aa50652de674deecf97e148cec63 100644
--- a/tasks/configure_processing_user.yml
+++ b/tasks/configure_processing_user.yml
@@ -1,10 +1,15 @@
 ---
 ### VIM SKEL FÜR PROCESSING USER ###
+- name: check if vim skel exists
+  stat:
+    path: "/etc/skel/.vimrc"
+  register: vimrc_skel
 - name: copy vim skel to processing users
   copy:
     remote_src: true
     src: "/etc/skel/.vimrc"
     dest: "/home/{{ vault_subapp_user }}/.vimrc"
+  when: vimrc_skel.stat.exists
   tags: [users]
 
 ### ALIASES ERSTELLEN ###
diff --git a/tasks/create_users_groups.yml b/tasks/create_users_groups.yml
index 5460cb3a34bb35c3905b13434b59c92de3ce9dd0..0967040ef36cee6d50ece06f2a49cf6f91e6c369 100644
--- a/tasks/create_users_groups.yml
+++ b/tasks/create_users_groups.yml
@@ -1,19 +1,4 @@
 ---
-### GRUPPEN & BENUTZER ERSTELLEN ###
-
-#  - name: DEBUG output
-#    debug:
-#      msg: "Key: {{ item.key }}, Values: {{ item.value }}END"
-#      verbosity: 1
-#    with_dict: "{{ vault_groups }}"
-#    tags: [ always ]
-#  - name: DEBUG output
-#    debug:
-#      msg: "{{ item }}END"
-#      verbosity: 1
-#    with_dict: "{{ users }}"
-#    tags: [ always ]
-
 - name: create groups
   group:
     name: "{{ item.key }}"
@@ -30,16 +15,6 @@
   loop: "{{ vault_human_users }}"
   tags: [users]
 
-# do NOT run this before skel configuration has been rolled out!
-#  - name: create individual primary user group
-#    group:
-#      name: "{{ item.key }}"
-#      state: present
-#      gid: "{{ item.value.uid }}"
-#    with_dict: "{{ users }}"
-#    tags: [users]
-
-
 #  - name: Service webservice_status_SLUBarchiv.service stoppen, um Modifikationen am User zu ermöglichen
 #    command: systemctl stop webservice_status_SLUBarchiv.service
 #    tags: [users]
@@ -48,7 +23,7 @@
 #    command: systemctl stop chmod_sip_uploads.service
 #    tags: [users]
 
-- name: create users and set groups
+- name: create ROBOT users and set groups
   user:
     comment: "{{ item.value.comment | default(omit) }}"
     create_home: "yes"
diff --git a/tasks/install_checkmk_plugins.yml b/tasks/install_checkmk_plugins.yml
index 62e31844a879dda9dd0667c1722555d1667ac543..3c11f3c4dab2fe77784836a8adf4f1831d4d6d6c 100644
--- a/tasks/install_checkmk_plugins.yml
+++ b/tasks/install_checkmk_plugins.yml
@@ -20,11 +20,4 @@
     mode: "0750"
   loop:
     - "check_subapp_ws_status.sh"
-#    - "checkmk_submission_application.sh"
-  tags: [monitoring]
-
-- name: remove SubApp-CMK-plugin
-  file:
-    path: "/usr/lib/check_mk_agent/plugins/checkmk_submission_application.sh"
-    state: absent
   tags: [monitoring]
diff --git a/tasks/install_packages.yml b/tasks/install_packages.yml
index 2128ad741f499c5732a9152ca65afb3323a291fc..b8c0dda4ac17e498b499ff1bc177e08228da4d9c 100644
--- a/tasks/install_packages.yml
+++ b/tasks/install_packages.yml
@@ -48,11 +48,3 @@
     ]
     state: present
   tags: [apt, subapp]
-
-- name: remove packages
-  apt:
-    name: [
-      'libarchive-zip-perl',
-    ]
-    state: absent
-  tags: [apt, subapp]
diff --git a/tasks/install_subapp.yml b/tasks/install_subapp.yml
index 443212414100ce0c90a724709bf8348120c827e2..1b5a9683fbe98bf4536eab92229b6255d53e057d 100644
--- a/tasks/install_subapp.yml
+++ b/tasks/install_subapp.yml
@@ -1,22 +1,5 @@
 ---
-- name: Mountpoints anlegen
-  file:
-    path: "/home/{{ vault_subapp_user }}/.subapp/"
-    state: directory
-    owner: "{{ vault_subapp_user }}"
-    group: "{{ vault_subapp_group }}"
-    mode: "0555"
-  tags: [subapp]
-
-- name: mount SubApp configuration working directory
-  mount:
-    name: "/home/{{ vault_subapp_user }}/.subapp/"
-    src: "LABEL=SUBAPP"
-    state: mounted
-    fstype: "ext4"
-  tags: [subapp]
-
-- name: Berechtigungen für NFS-Share ".subapp" korrigieren
+- name: Berechtigungen für Blockdevice ".subapp" korrigieren
   file:
     path: "/home/{{ vault_subapp_user }}/.subapp/"
     state: directory
@@ -35,6 +18,10 @@
   tags: [subapp]
 
 # erst nach der Erstellung der User/Gruppen durchführen!
+- name: update Sources
+  apt:
+    update_cache: "yes"
+  tags: [subapp]
 - name: Submission Application installieren
   apt:
     name: "submissionapplication4rosetta"
@@ -42,7 +29,7 @@
     allow_unauthenticated: "true"
   tags: [subapp]
 
-- name: Systemd-Unitfiles installieren
+- name: Systemd-Unitfiles installieren (Templates)
   template:
     src: "etc/systemd/user/{{ item }}.j2"
     dest: "/etc/systemd/user/{{ item }}"
@@ -54,35 +41,7 @@
     - "subapp.service"
   tags: [systemd, subapp]
 
-- name: bereinigen
-  block:
-    - name: find service unit file
-      find:
-        path: "/etc/systemd/user/"
-        pattern: "subapp_bagit.service"
-      register: res_service
-    - debug:
-        msg: "{{ res_service }}"
-    - name: Unit stoppen
-      systemd:
-        name: "{{ item }}"
-        state: stopped
-        enabled: false
-      loop:
-        - "subapp_bagit.service"
-        - "subapp.service"
-      when: res_service.matched > 0
-    - name: Unitfile & .gsa-Config entfernen
-      file:
-        path: "{{ item }}"
-        state: absent
-      loop:
-        - "/etc/systemd/user/subapp_bagit.service"
-        - "{{ gsa_vars.files.gsa.path }}"
-        - "/home/{{ vault_subapp_user }}/.gsa"
-  tags: [subapp]
-
-- name: Systemd-Unitfiles installieren
+- name: Systemd-Unitfiles installieren (Files)
   copy:
     src: "etc/systemd/user/{{ item }}"
     dest: "/etc/systemd/user/{{ item }}"
@@ -117,55 +76,8 @@
     - "chown_dip_access.service"
   tags: [systemd]
 
-- name: update Sources
-  apt:
-    update_cache: "yes"
-  tags: [subapp]
-
-- name: block to get SubApp status
-  block:
-    - name: see if SubApp is upgradeable
-      shell: 'apt list --upgradable | grep submission'
-      failed_when: subapp_upgradeable.rc != 0 and subapp_upgradeable.rc != 1
-      register: subapp_upgradeable
-      tags: [subapp]
-    - name: register SubApp runstatus
-      command: 'systemctl status subapp.service'        # noqa 303
-      register: subapp_runstatus
-      failed_when: subapp_runstatus.rc != 0 and subapp_runstatus.rc != 3
-      tags: [subapp]
-
-- name: block to stop Services
-  block:
-    - name: stop Webservice
-      systemd:
-        name: "webservice_status_SLUBarchiv.service"
-        state: stopped
-      tags: [subapp]
-    - name: stop SubApp
-      systemd:
-        name: "subapp.service"
-        state: stopped
-      tags: [subapp]
-  when:
-    - ( subapp_runstatus.rc == 0 )
-    - ( subapp_upgradeable.rc == 0 )
-  tags: [subapp]
-
-- name: update SubApp
-  apt:
-    name: "submissionapplication4rosetta"
-    state: latest        # noqa 403
-    allow_unauthenticated: "yes"
-  when:
-    - ( subapp_upgradeable.rc == 0 )
-  notify:
-    - restart subapp
-    - restart webservice_status_SLUBarchiv
-  tags: [subapp]
-
 ### KONFIGURATIONEN FÜR SUBMISSION APPLICATION INSTALLIEREN ###
-- name: GSA-Config einspielen
+- name: SubApp-Config einspielen
   block:
     - name: create blacklist file if it doesn't exist
       file:
@@ -175,26 +87,17 @@
         mode: "{{ vault_subapp_vars.files.blacklist.mode }}"
         state: touch
       tags: [subapp]
-#      - debug:
-#          msg: "Hostnamepath: '{{ vault_subapp_vars.hosts[ansible_hostname] }}'"
-#        tags: [ always ]
     - name: write new SubApp config file
       template:
-        src: ".subapp.cfg.j2"
+        src: "subapp.cfg.j2"
         dest: "{{ vault_subapp_vars.files.subapp.path }}"
         owner: "{{ vault_subapp_vars.files.subapp.owner }}"
         group: "{{ vault_subapp_vars.files.subapp.group }}"
         mode: "{{ vault_subapp_vars.files.subapp.mode }}"
       with_items: "{{ vault_subapp_vars.hosts[ansible_hostname] | default(vault_subapp_vars.hosts.molecule-test) }}"
       tags: [subapp]
-    - name: remove old SubApp config file (path doesn't adhere to FHS 3.0) - TEMP TASK
-      file:
-        path: "/home/{{ vault_subapp_user }}/.subapp/.subapp.cfg"
-        state: absent
-      tags: [subapp]
   tags: [subapp, gsa]
 
-### VORBEREITUNGEN SUBMISSION APPLICATION ###
 - name: Quarantaeneverzeichnis & Lockverzeichnis anlegen
   file:
     path: "{{ item }}"
@@ -235,7 +138,7 @@
 - name: Stichprobenpruefung Timer
   template:
     src: "stichprobe-daily-report.timer.j2"
-    dest: "/etc/systemd/user/stichprobe-{{ item.value.institute_name }}-daily-report.timer"
+    dest: "/etc/systemd/user/stichprobe-daily-report.timer"
     owner: "root"
     group: "root"
     mode: "0644"
@@ -247,7 +150,7 @@
 - name: Stichprobenpruefung Service
   template:
     src: "stichprobe-daily-report.service.j2"
-    dest: "/etc/systemd/user/stichprobe-{{ item.value.institute_name }}-daily-report.service"
+    dest: "/etc/systemd/user/stichprobe-daily-report.service"
     owner: "root"
     group: "root"
     mode: "0644"
@@ -268,39 +171,25 @@
   no_log: true
   tags: [subapp]
 
-- name: enable Service
-  command: "systemctl enable /etc/systemd/user/stichprobe-{{ item.value.institute_name }}-daily-report.service"        # noqa 303
+- name: enable Stichprobe Services
+  command: "systemctl enable /etc/systemd/user/stichprobe-daily-report.service"        # noqa 303
   loop: "{{ lookup('dict', vault_stichprobe_hosts) }}"
   when: ansible_hostname == item.key
   no_log: true
   tags: [subapp]
 
-- name: enable Timer
-  command: "systemctl enable /etc/systemd/user/stichprobe-{{ item.value.institute_name }}-daily-report.timer"        # noqa 303
+- name: enable Stichprobe Timers
+  command: "systemctl enable /etc/systemd/user/stichprobe-daily-report.timer"        # noqa 303
   loop: "{{ lookup('dict', vault_stichprobe_hosts) }}"
   when: ansible_hostname == item.key
   no_log: true
   tags: [subapp]
 
-- name: restart services & timers
+- name: restart Stichprobe Timers
   systemd:
-    name: "stichprobe-{{ item.value.institute_name }}-daily-report.timer"
+    name: "stichprobe-daily-report.timer"
     state: restarted
   loop: "{{ lookup('dict', vault_stichprobe_hosts) }}"
   when: ansible_hostname == item.key
   ignore_errors: "yes"
   tags: [subapp]
-
-
-
-### TEMPORÄR BIS INBETRIEBNAHME FOTOTHEK ###
-#- name: include vars Fotothek
-#  include_vars: "{{ role_path }}/../ansible_vaults/{{ role_name }}/TEMP_fotothek.vault"
-#- name: temporary corrections for Fotothek .gsa file
-#  lineinfile:
-#    path: "/home/{{ vault_subapp_user }}/.subapp/.gsa"
-#    regexp: "{{ item.regexp }}"
-#    line: "{{ item.line }}"
-#  loop: "{{ vault_fotothek_temp }}"
-#  when: ( 'sdvlzasubappfotothek' == ansible_hostname )
-#  tags: [subapp, gsa]
diff --git a/tasks/main.yml b/tasks/main.yml
index e04133de95fdb739875600e7e947bbb9d4e68a4d..f5a2dbfa31df47e962e097edc195f6d188397f97 100644
--- a/tasks/main.yml
+++ b/tasks/main.yml
@@ -26,14 +26,6 @@
   import_tasks: configure_kernel_parameters.yml
   tags: [subapp]
 
-- name: Partition anlegen und formatieren, falls nötig
-  script: create_partition.sh
-  tags: [subapp]
-
-# - name: Xalan-Bibliotheken laden
-#   command: ldconfig -v /usr/local/lib/        # noqa 301
-#   tags: [subapp]
-
 - name: configure Client HTTP Proxy
   import_tasks: configure_proxy.yml
   tags: [always]
@@ -69,3 +61,7 @@
 - name: iptables-Regeln für externe Produzenten setzen
   import_tasks: configure_iptables_externalusers.yml
   tags: [iptables]
+
+- name: cleanup remainders of METS-based SubApp
+  import_tasks: cleanup_legacy.yml
+  tags: [always]
diff --git a/templates/.subapp.cfg.j2 b/templates/.subapp.cfg.j2
deleted file mode 100644
index af46319f8e0a4fc78130500e8fe915ddaa2bda14..0000000000000000000000000000000000000000
--- a/templates/.subapp.cfg.j2
+++ /dev/null
@@ -1,38 +0,0 @@
-### OPTIONAL PARAMETERS
-blacklist_sip_file:/home/{{ vault_subapp_user }}/.subapp/usa_blacklist_file.csv
-pid_file:/run/subapp/subapp_bagit.pid
-
-### MANDATORY PARAMETERS
-bookmark_file:/home/{{ vault_subapp_user }}/.subapp/usa_bookmark_file.yaml
-database_file:/home/{{ vault_subapp_user }}/.subapp/subapp.db
-dependency_file:/home/{{ vault_subapp_user }}/.subapp/usa_dependency_file.yaml
-deposit_file_regex:^[S|s][I|i][P|p]\.xml$
-# HINT: only 'METS/MODS' for now
-deposit_file_type:METS/MODS
-directory_export:/mnt/{{ ansible_hostname }}_access/
-directory_lock:/home/{{ vault_subapp_user }}/.subapp/lockdir/
-directory_quarantine:/home/{{ vault_subapp_user }}/.subapp/quarantine/
-directory_shared_aipupdate:/mnt/{{ ansible_hostname }}_ingest/aipupdate/
-directory_shared_import:/mnt/import/
-directory_shared_ingest:/mnt/{{ ansible_hostname }}_ingest/
-fullname_workflow:{{ vault_subapp_vars.hosts[ansible_hostname].fullname_workflow | default("WORKFLOW_NAME_TEMPLATE") }}
-general_cleanup_age_in_seconds:1209600
-isarchived_file:/home/{{ vault_subapp_user }}/.subapp/usa_isarchived_file.yaml
-logger_producer_email:{{ vault_subapp_vars.hosts[ansible_hostname].logger_producer_email | default("LOGGER_PRODUCER_EMAIL_TEMPLATE") }}
-logger_staff_email:{{ vault_subapp_vars.hosts[ansible_hostname].logger_staff_email | default("LOGGER_STAFF_EMAIL_TEMPLATE") }}
-max_allowed_SIP_size_in_bytes:100000000000
-max_allowed_compressed_SIP_size_in_bytes:{{ vault_subapp_vars.hosts[ansible_hostname].max_allowed_compressed_SIP_size | default("268435456000") }}
-max_allowed_single_file_size_in_bytes:{{ vault_subapp_vars.hosts[ansible_hostname].max_allowed_single_file_size | default("268435456000") }}
-mdupdate_file:/home/{{ vault_subapp_user }}/.subapp/usa_mdupdate_file.yaml
-mets_filegroup_to_archive:LZA
-min_required_disk_space_in_bytes:{{ vault_subapp_vars.hosts[ansible_hostname].min_required_disk_space | default("268435456000") }}
-owner_group_export_dir:access
-owner_group_import_dir:import
-owner_group_ingest_dir:{{ vault_subapp_group }}
-owner_user_ingest_dir:{{ vault_subapp_user }}
-rosetta_host:{{ vault_subapp_vars.hosts[ansible_hostname].RosettaHost | default("ROSETTA_HOSTNAME_TEMPLATE") }}
-rosetta_institute:{{ vault_subapp_vars.hosts[ansible_hostname].Institute | default("INSTITUTE_NAME_TEMPLATE") }}
-rosetta_materialflowId:{{ vault_subapp_vars.hosts[ansible_hostname].MaterialFlowID | default("MATERIAL_FLOW_ID_TEMPLATE") }}
-rosetta_password:{{ vault_subapp_vars.hosts[ansible_hostname].Rosetta_Password }}
-rosetta_pdshost:{{ vault_subapp_vars.hosts[ansible_hostname].PdsHost | default("PDS_HOSTNAME_TEMPLATE") }}
-rosetta_user:{{ vault_subapp_vars.hosts[ansible_hostname].User | default("SUBMISSION_APPLICATION_USER_TEMPLATE") }}
diff --git a/templates/etc/systemd/user/subapp.service.j2 b/templates/etc/systemd/user/subapp.service.j2
index de03761e483af283539f45c55ac0c7b734cde5a1..bc4a4ca55cb4742a5742104c5fc742b47f0d6aeb 100644
--- a/templates/etc/systemd/user/subapp.service.j2
+++ b/templates/etc/systemd/user/subapp.service.j2
@@ -13,8 +13,10 @@ User={{ vault_subapp_user }}
 Group={{ vault_subapp_group }}
 # EXAMPLE: TimeoutSec=600
 TimeoutSec=infinity
-# PIDFile=subapp_bagit.pid
 PIDFile=/run/subapp/subapp_bagit.pid
+# DO NOT REMOVE!!! (based on SubApp Issue #68)
+# Do not kill any processes for a save shutdowns with a valid SubApp database.
+KillMode=none
 
 ### Security features
 # documented at https://www.freedesktop.org/software/systemd/man/systemd.exec.html
diff --git a/templates/.subapp.cfg.j2_rc2020.2 b/templates/subapp.cfg.j2
similarity index 83%
rename from templates/.subapp.cfg.j2_rc2020.2
rename to templates/subapp.cfg.j2
index d2c575e00f2c1f0a2cd604a580ae9bb6df9ef531..4c43532352bb7b9a90e1779941d007413ecc4063 100644
--- a/templates/.subapp.cfg.j2_rc2020.2
+++ b/templates/subapp.cfg.j2
@@ -76,19 +76,6 @@ fullname_workflow:{{ vault_subapp_vars.hosts[ansible_hostname].fullname_workflow
 
 
 
-### disk space considerations
-
-# UNcompressed SIPs may not exceed the size in Bytes that is configured here.
-max_allowed_SIP_size_in_bytes:100000000000
-# COMPRESSED SIPs may not exceed the size in Bytes that is configured here.
-max_allowed_single_file_size_in_bytes:{{ vault_subapp_vars.hosts[ansible_hostname].max_allowed_single_file_size | default("268435456000") }}
-# Required free disk space on directory_shared_import. If the actual free disk space is below the configured value, then the submission application will not extract any further ZIPped SIPs.
-min_required_disk_space_in_bytes:{{ vault_subapp_vars.hosts[ansible_hostname].min_required_disk_space | default("268435456000") }}
-# time until old producer protocols are removed
-general_cleanup_age_in_seconds:1209600
-
-
-
 ### email notification configuration
 
 # notification email address for producers (high level error information)
@@ -102,11 +89,4 @@ logger_staff_email:{{ vault_subapp_vars.hosts[ansible_hostname].logger_staff_ema
 
 # Absolute path to SQLite database file for storing and loading message queues and SIP states
 # Hints: using /tmp is not allowed by SQLite, furthermore security requires the parent directory to be set to at least '750' (drwxr-x---)
-database_file:/home/processing/.subapp/subapp.db
-
-
-
-### deposit settings - LEGACY
-# 
-deposit_file_regex:^[S|s][I|i][P|p]\.xml$
-deposit_file_type:METS/MODS
+database_file:/home/{{ vault_subapp_user }}/.subapp/subapp.db
diff --git a/templates/usr/lib/check_mk_agent/plugins/checkmk_submission_application.sh.j2 b/templates/usr/lib/check_mk_agent/plugins/checkmk_submission_application.sh.j2
deleted file mode 100755
index fbb7848785cd10ec292ec0e3596b42bafd95ff45..0000000000000000000000000000000000000000
--- a/templates/usr/lib/check_mk_agent/plugins/checkmk_submission_application.sh.j2
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-# copy it to the "/usr/lib/check_mk_agent/plugin" directory
-# test it via: check_mk_agent | grep -v grep | grep -A 3 "<<<local>>>"
-LOCKFILE="/var/lock/check_subapp_status.lock"
-INC=$( /usr/bin/perl -e "print qq(@INC)" | sed -e "s/ / -I /g" )
-CMK_PERL_SCRIPT="/usr/local/bin/checkmk_submission_application.pl"
-CALL="/usr/bin/perl -I $INC -I /usr/local/perl/ ${CMK_PERL_SCRIPT}"
-
-if [[ ! $( dpkg -l | grep "submissionapplication4rosetta" ) ]]; then
-	echo "3 subapp - SubApp package not installed."
-elif [[ ! -e "${CMK_PERL_SCRIPT}" ]]; then
-	echo "3 subapp - SubApp package is installed, but '${CMK_PERL_SCRIPT}' couldn't be found."
-fi
-
-# IMPORTANT: Create lockfile using "flock", NOT "touch"!!! It's atomic and doesn't have to be cleared after the script ran.
-output=$( flock ${LOCKFILE} su {{ vault_subapp_user }} -s /bin/sh -c "cd ; $CALL" )
-echo "${output}"