Compare commits

...

11 commits

Author SHA1 Message Date
Tobias Brunner 2e98d7760e
add pdftk hint 2024-01-10 09:31:20 +01:00
Tobias Brunner e873c1ff63
add hacky script to generate einsatzrapporte 2024-01-10 09:23:49 +01:00
Tobias Brunner 3753c1210b more robust mail parsing
All checks were successful
continuous-integration/drone/push Build is passing
2021-12-09 20:51:02 +01:00
Tobias Brunner 63482d5f2e process einsatzrapport from webdav inbox 2021-12-09 20:42:59 +01:00
Tobias Brunner f9b86f3c8f handle Einsatzrapport without f_id
All checks were successful
continuous-integration/drone/push Build is passing
2021-12-07 21:25:09 +01:00
Tobias Brunner 8419ef4973 use kubernetes for drone build
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-24 21:33:15 +02:00
Tobias Brunner 15b46d19c6 master of comparison failed
Some checks reported errors
continuous-integration/drone/push Build is passing
continuous-integration/drone Build was killed
2021-07-13 20:00:41 +02:00
Tobias Brunner ad57bb9f9f f_id must not be shorter than 8
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-13 19:52:15 +02:00
Tobias Brunner cadfe1aa55 mark Einsatzrapport seen when processed
All checks were successful
continuous-integration/drone/push Build is passing
2021-03-09 19:52:34 +01:00
Tobias Brunner 44d7a2f364 hotfix einsatzrapport upload
All checks were successful
continuous-integration/drone/push Build is passing
2021-03-09 08:06:11 +01:00
Tobias Brunner 5f8d2a7109 rewrite email handling
All checks were successful
continuous-integration/drone/push Build is passing
2021-03-02 21:52:37 +01:00
9 changed files with 358 additions and 199 deletions

View file

@ -1,4 +1,5 @@
kind: pipeline
type: kubernetes
name: default
steps:

4
.gitignore vendored
View file

@ -1,4 +1,6 @@
__pycache__/
.vscode/
.env
test.py
pylokid/temp_test.py
test.py
tmp/

View file

@ -0,0 +1,54 @@
import uno
from com.sun.star.beans import PropertyValue
from com.sun.star.uno import RuntimeException
def connect_to_libreoffice(port=2002):
localContext = uno.getComponentContext()
resolver = localContext.ServiceManager.createInstanceWithContext(
"com.sun.star.bridge.UnoUrlResolver", localContext)
try:
context = resolver.resolve(f"uno:socket,host=localhost,port={port};urp;StarOffice.ComponentContext")
return context.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", context)
except RuntimeException:
raise Exception("Make sure LibreOffice is running with a listening port (e.g., soffice --accept=\"socket,port=2002;urp;\" --norestore --nologo --nodefault)")
def export_to_pdf(doc, file_name):
pdf_export_properties = tuple([
PropertyValue(Name="FilterName", Value="writer_pdf_Export")
])
doc.storeToURL(f"file:///{file_name}", pdf_export_properties)
def replace_text(doc, old_text, new_text):
replaceable = doc.createReplaceDescriptor()
replaceable.setSearchString(old_text)
replaceable.setReplaceString(new_text)
doc.replaceAll(replaceable)
def main(start_counter, num_documents, document_path):
desktop = connect_to_libreoffice()
for i in range(num_documents):
counter = start_counter + i
load_props = PropertyValue(Name="Hidden", Value=True),
doc = desktop.loadComponentFromURL(f"file:///{document_path}", "_blank", 0, load_props)
replace_text(doc, "counter", str(counter))
file_name = f"/home/tobru/Documents/Feuerwehr/Stab/Fourier/EinsatzrapporteLeer/Einsatzrapport2024-{counter}.pdf"
export_to_pdf(doc, file_name)
doc.dispose()
if __name__ == "__main__":
document_path = "/home/tobru/Documents/Feuerwehr/Stab/Fourier/FW-Einsatzrapport FWU.odt"
start_counter = 1
num_documents = 100
# Start libreoffice with:
# soffice --accept="socket,port=2002;urp;" --norestore --nologo --nodefault
main(start_counter, num_documents, document_path)
# after generation, run:
# pdftk $(ls -v *.pdf) cat output Einsatzrapporte2024.pdf

View file

@ -1,7 +1,3 @@
"""
Pylokid. From Mail to Lodur - all automated.
"""
__version__ = "3.0.2"
__git_version__ = "0"
__url__ = "https://github.com/tobru/pylokid"

View file

@ -14,7 +14,7 @@ _EMAIL_SUBJECTS = '(OR OR SUBJECT "Einsatzausdruck_FW" SUBJECT "Einsatzprotokoll
class EmailHandling:
""" Email handling """
"""Email handling"""
def __init__(self, server, username, password, mailbox, tmp_dir):
self.logger = logging.getLogger(__name__)
@ -34,7 +34,9 @@ class EmailHandling:
self.logger.info("IMAP connection successful")
def search_emails(self):
""" searches for emails matching the configured subject """
"""searches for emails matching the configured subject"""
msg_ids = []
self.logger.info("Searching for messages matching: %s", _EMAIL_SUBJECTS)
try:
@ -49,65 +51,91 @@ class EmailHandling:
self.logger.error("IMAP search aborted - exiting: %s", str(err))
raise SystemExit(1)
num_messages = len(msg_ids[0].split())
self.logger.info("Found %s matching messages", str(num_messages))
return num_messages, msg_ids
def store_attachments(self, msg_ids):
""" stores the attachments to filesystem """
data = {}
for msg_id in msg_ids[0].split():
# download message from imap
typ, msg_data = self.imap.fetch(msg_id, "(BODY.PEEK[])")
msg_list = msg_ids[0].split()
self.logger.info("Found %s matching messages", str(len(msg_list)))
# Retrieve subjects
msg_id_subject = {}
for msg in msg_list:
msg_id = msg.decode("utf-8")
typ, msg_data = self.imap.fetch(msg, "(BODY.PEEK[HEADER.FIELDS (SUBJECT)])")
if typ != "OK":
self.logger.error("Error fetching message")
continue
# extract attachment
for response_part in msg_data:
if isinstance(response_part, tuple):
mail = email.message_from_string(str(response_part[1], "utf-8"))
self.logger.error("Error fetching subject")
msg_id_subject[msg_id] = "unknown"
else:
try:
mail = email.message_from_string(str(msg_data[0][1], "utf-8"))
subject = mail["subject"]
f_type, f_id = self.parse_subject(subject)
self.logger.info('[%s] Getting attachment from "%s"', f_id, subject)
for part in mail.walk():
file_name = part.get_filename()
if not file_name:
self.logger.debug(
"Most probably not an attachment as no filename found"
)
continue
self.logger.info("Message ID %s has subject '%s'", msg_id, subject)
msg_id_subject[msg_id] = subject
except TypeError:
self.logger.error("Could not decode mail - %s", msg_data[0][1])
# Deduplicate messages - usually the same message arrives multiple times
self.logger.info("Deduplicating messages")
temp = []
msg_id_subject_deduplicated = dict()
for key, val in msg_id_subject.items():
if val not in temp:
temp.append(val)
msg_id_subject_deduplicated[key] = val
self.logger.info(
"Adding Message ID %s '%s' to list to process", key, val
)
else:
self.mark_seen(key, key)
return msg_id_subject_deduplicated
def store_attachment(self, msg_id):
"""stores the attachment to filesystem"""
# download message from imap
typ, msg_data = self.imap.fetch(msg_id, "(BODY.PEEK[])")
if typ != "OK":
self.logger.error("Error fetching message")
return None, None
# extract attachment
for response_part in msg_data:
if isinstance(response_part, tuple):
mail = email.message_from_string(str(response_part[1], "utf-8"))
subject = mail["subject"]
f_type, f_id = self.parse_subject(subject)
self.logger.info('[%s] Getting attachment from "%s"', f_id, subject)
for part in mail.walk():
file_name = part.get_filename()
if not file_name:
self.logger.debug(
"Most probably not an attachment as no filename found"
)
continue
self.logger.info('[%s] Extracting attachment "%s"', f_id, file_name)
if bool(file_name):
f_type, _ = self.parse_subject(subject)
renamed_file_name = f_type + "_" + file_name
# save attachment to filesystem
file_path = os.path.join(self.tmp_dir, renamed_file_name)
self.logger.info(
'[%s] Extracting attachment "%s"', f_id, file_name
'[%s] Saving attachment to "%s"', f_id, file_path
)
if not os.path.isfile(file_path):
file = open(file_path, "wb")
file.write(part.get_payload(decode=True))
file.close()
if bool(file_name):
f_type, _ = self.parse_subject(subject)
renamed_file_name = f_type + "_" + file_name
# save attachment to filesystem
file_path = os.path.join(self.tmp_dir, renamed_file_name)
return renamed_file_name
self.logger.info(
'[%s] Saving attachment to "%s"', f_id, file_path
)
if not os.path.isfile(file_path):
file = open(file_path, "wb")
file.write(part.get_payload(decode=True))
file.close()
data[subject] = renamed_file_name
return data
def mark_seen(self, msg_id):
def mark_seen(self, msg_id, f_id):
self.logger.info("[%s] Marking E-Mail message as seen", f_id)
self.imap.store(msg_id, "+FLAGS", "(\\Seen)")
def parse_subject(self, subject):
""" extract f id and type from subject """
"""extract f id and type from subject"""
# This regex matches the subjects filtered already in IMAP search
parsed = re.search("([a-zA-Z_]*):? ?(F[0-9].*)?", subject)

View file

@ -192,7 +192,7 @@ class Lodur:
self.submit_form_einsatzrapport(lodur_data)
# Upload scan to Alarmdepesche
self.einsatzrapport_alarmdepesche(
self.upload_alarmdepesche(
f_id,
file_path,
webdav_client,

View file

@ -4,6 +4,7 @@
import os
import json
import re
from datetime import datetime
import logging
import asyncio
@ -11,7 +12,7 @@ import aioeasywebdav
class WebDav:
""" WebDav Client """
"""WebDav Client"""
def __init__(self, url, username, password, webdav_basedir, tmp_dir):
self.logger = logging.getLogger(__name__)
@ -32,7 +33,7 @@ class WebDav:
self.logger.info("WebDAV connection successfull")
def upload(self, file_name, f_id, check_exists=True):
""" uploads a file to webdav - checks for existence before doing so """
"""uploads a file to webdav - checks for existence before doing so"""
# upload with webdav
if f_id == None:
@ -63,8 +64,12 @@ class WebDav:
)
self.logger.info('[%s] File "%s" uploaded', f_id, file_name)
def delete(self, file_name):
"""delete file on webdav"""
self.loop.run_until_complete(self.webdav.delete(file_name))
def einsatz_exists(self, f_id):
""" check if an einsatz is already created """
"""check if an einsatz is already created"""
remote_upload_dir = (
self.webdav_basedir + "/" + str(datetime.now().year) + "/" + f_id
@ -75,8 +80,33 @@ class WebDav:
else:
return False
def einsatzrapport_inbox_check(self, tmp_dir):
"""check if an einsatzrapport with an f_id exists in the WebDav Inbox and process it"""
rapporte_to_process = []
filelist = self.loop.run_until_complete(
self.webdav.ls(f"{self.webdav_basedir}/Inbox")
)
for file in filelist:
full_path = file[0]
parsed = re.search(".*Einsatzrapport_(F[0-9].*)\.pdf", full_path)
if parsed:
f_id = parsed.group(1)
self.logger.info("[%s] Found %s - Downloading", f_id, full_path)
# Download PDF for later processing
self.loop.run_until_complete(
self.webdav.download(
full_path, f"{tmp_dir}/Einsatzrapport_{f_id}.pdf"
)
)
rapporte_to_process.append(f_id)
return rapporte_to_process
def store_data(self, f_id, file_name, data):
""" stores data on webdav """
"""stores data on webdav"""
file_path = os.path.join(self.tmp_dir, file_name)
@ -88,7 +118,7 @@ class WebDav:
self.upload(file_name, f_id, False)
def get_lodur_data(self, f_id, filetype="_lodur.json"):
""" gets lodur data if it exists """
"""gets lodur data if it exists"""
file_name = f_id + filetype
file_path = os.path.join(self.tmp_dir, file_name)

View file

@ -38,12 +38,12 @@ PUSHOVER_USER_KEY = os.getenv("PUSHOVER_USER_KEY")
def main():
""" main """
"""main"""
# Logging configuration
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
format="%(asctime)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger("pylokid")
logger.info("Starting pylokid version %s", version("pylokid"))
@ -80,170 +80,218 @@ def main():
pdf = PDFParsing()
# Main Loop
logger.info("** Starting to process E-Mails **")
while True:
attachments = {}
num_messages, msg_ids = imap_client.search_emails()
if num_messages:
attachments = imap_client.store_attachments(msg_ids)
if attachments:
for subject in attachments:
f_type, f_id = imap_client.parse_subject(subject)
file_name = attachments[subject]
# Search for matchting E-Mails
msg_ids = imap_client.search_emails()
# Upload file to cloud
webdav_client.upload(file_name, f_id)
for msg, subject in msg_ids.items():
logger.info("Processing IMAP message ID %s", msg)
file_name = imap_client.store_attachment(msg)
# Take actions - depending on the type
if f_type == "Einsatzausdruck_FW":
logger.info("[%s] Processing type %s", f_id, f_type)
# If the message couldn't be parsed, skip to next message
if not file_name:
pass
# Check if the PDF isn't already parsed
if webdav_client.get_lodur_data(f_id, "_pdf.json"):
logger.info("[%s] PDF already parsed", f_id)
else:
# Extract information from PDF
pdf_data = pdf.extract_einsatzausdruck(
os.path.join(TMP_DIR, file_name),
f_id,
)
# Figure out event type and F ID by parsing the subject
f_type, f_id = imap_client.parse_subject(subject)
# publish Einsatz on Pushover
logger.info("[%s] Publishing message on Pushover", f_id)
pushover.send_message(
"<b>{}</b>\n\n* Ort: {}\n* Melder: {}\n* Hinweis: {}\n* {}\n\n{}\n\n{}".format(
pdf_data["einsatz"],
pdf_data["ort"],
pdf_data["melder"].replace("\n", " "),
pdf_data["hinweis"],
pdf_data["sondersignal"],
pdf_data["bemerkungen"],
pdf_data["disponierteeinheiten"],
),
title="Feuerwehr Einsatz - {}".format(f_id),
url="https://www.google.com/maps/search/?api=1&query={}".format(
pdf_data["ort"]
),
url_title="Ort auf Karte suchen",
html=1,
)
# Upload extracted attachment to cloud
webdav_client.upload(file_name, f_id)
# Upload extracted data to cloud
webdav_client.store_data(f_id, f_id + "_pdf.json", pdf_data)
# Take actions - depending on the type
if f_type == "Einsatzausdruck_FW":
logger.info("[%s] Processing type %s", f_id, f_type)
if webdav_client.get_lodur_data(f_id):
logger.info("[%s] Lodur data already retrieved", f_id)
else:
# Retrieve data from Lodur
lodur_id = lodur_client.get_einsatzrapport_id(f_id)
if lodur_id:
logger.info(
"[%s] Einsatzrapport available in Lodur with ID %s",
f_id,
lodur_id,
)
logger.info(
"%s?modul=36&what=144&event=%s&edit=1",
LODUR_BASE_URL,
lodur_id,
)
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(
f_id, f_id + "_lodur.json", lodur_data
)
# upload Alarmdepesche PDF to Lodur
lodur_client.upload_alarmdepesche(
f_id,
os.path.join(TMP_DIR, file_name),
webdav_client,
)
# Marking message as seen, no need to reprocess again
for msg_id in msg_ids:
logger.info("[%s] Marking E-Mail message as seen", f_id)
imap_client.mark_seen(msg_id)
else:
logger.warn("[%s] Einsatzrapport NOT found in Lodur", f_id)
elif f_type == "Einsatzprotokoll":
lodur_id = webdav_client.get_lodur_data(f_id)["event_id"]
pdf_data = webdav_client.get_lodur_data(f_id, "_pdf.json")
logger.info(
"[%s] Processing type %s with Lodur ID %s",
# Check if the PDF isn't already parsed
if webdav_client.get_lodur_data(f_id, "_pdf.json"):
logger.info("[%s] PDF already parsed", f_id)
else:
# Extract information from PDF
pdf_data = pdf.extract_einsatzausdruck(
os.path.join(TMP_DIR, file_name),
f_id,
f_type,
lodur_id,
)
# Retrieve Lodur data again and store it in Webdav
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(f_id, f_id + "_lodur.json", lodur_data)
# publish Einsatz on Pushover
logger.info("[%s] Publishing message on Pushover", f_id)
pushover.send_message(
"<b>{}</b>\n\n* Ort: {}\n* Melder: {}\n* Hinweis: {}\n* {}\n\n{}\n\n{}".format(
pdf_data["einsatz"],
pdf_data["ort"],
pdf_data["melder"].replace("\n", " "),
pdf_data["hinweis"],
pdf_data["sondersignal"],
pdf_data["bemerkungen"],
pdf_data["disponierteeinheiten"],
),
title="Feuerwehr Einsatz - {}".format(f_id),
url="https://www.google.com/maps/search/?api=1&query={}".format(
pdf_data["ort"]
),
url_title="Ort auf Karte suchen",
html=1,
)
if (
"aut_created_report" in lodur_data
and lodur_data["aut_created_report"] == "finished"
):
logger.info("[%s] Record in Lodur ready to be updated", f_id)
# Upload extracted data to cloud
webdav_client.store_data(f_id, f_id + "_pdf.json", pdf_data)
# Upload Einsatzprotokoll to Lodur
if webdav_client.get_lodur_data(f_id):
logger.info("[%s] Lodur data already retrieved", f_id)
# Marking message as seen, no need to reprocess again
imap_client.mark_seen(msg, f_id)
else:
# Retrieve data from Lodur
lodur_id = lodur_client.get_einsatzrapport_id(f_id)
if lodur_id:
logger.info(
"[%s] Einsatzrapport available in Lodur with ID %s",
f_id,
lodur_id,
)
logger.info(
"%s?modul=36&what=144&event=%s&edit=1",
LODUR_BASE_URL,
lodur_id,
)
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(f_id, f_id + "_lodur.json", lodur_data)
# upload Alarmdepesche PDF to Lodur
lodur_client.upload_alarmdepesche(
f_id,
os.path.join(TMP_DIR, file_name),
webdav_client,
)
# Update entry in Lodur
lodur_client.einsatzprotokoll(
f_id, lodur_data, pdf_data, webdav_client
)
# Einsatz finished - publish on pushover
logger.info("[%s] Publishing message on Pushover", f_id)
pushover.send_message(
"Einsatz beendet",
title="Feuerwehr Einsatz beendet - {}".format(f_id),
)
# Marking message as seen, no need to reprocess again
for msg_id in msg_ids:
logger.info("[%s] Marking E-Mail message as seen", f_id)
imap_client.mark_seen(msg_id)
imap_client.mark_seen(msg, f_id)
else:
logger.warn(
"[%s] Record in Lodur NOT ready yet to be updated", f_id
)
logger.warn("[%s] Einsatzrapport NOT found in Lodur", f_id)
# This is usually a scan from the Depot printer
elif f_type == "Einsatzrapport":
elif f_type == "Einsatzprotokoll":
logger.info("[%s] Processing type %s", f_id, f_type)
lodur_id = webdav_client.get_lodur_data(f_id)["event_id"]
pdf_data = webdav_client.get_lodur_data(f_id, "_pdf.json")
logger.info(
"[%s] Processing type %s with Lodur ID %s",
f_id,
f_type,
lodur_id,
)
# Attach scan in Lodur if f_id is available
# f_id can be empty when scan was misconfigured
if f_id != None:
lodur_id = webdav_client.get_lodur_data(f_id)["event_id"]
# Retrieve Lodur data again and store it in Webdav
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(f_id, f_id + "_lodur.json", lodur_data)
lodur_client.einsatzrapport_scan(
f_id,
lodur_data,
os.path.join(TMP_DIR, file_name),
webdav_client,
)
# Retrieve Lodur data again and store it in Webdav
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(f_id, f_id + "_lodur.json", lodur_data)
if (
"aut_created_report" in lodur_data
and lodur_data["aut_created_report"] == "finished"
):
logger.info("[%s] Record in Lodur ready to be updated", f_id)
# Upload Einsatzprotokoll to Lodur
lodur_client.upload_alarmdepesche(
f_id,
os.path.join(TMP_DIR, file_name),
webdav_client,
)
# Update entry in Lodur
lodur_client.einsatzprotokoll(
f_id, lodur_data, pdf_data, webdav_client
)
# Correct time on AdF to round up to one hour
# curl 'https://lodur-zh.ch/urdorf/tunnel.php?modul=36&what=1082'
# -H 'Referer: https://lodur-zh.ch/urdorf/index.php?modul=36&what=145&event=3485&edit=1'
# -H 'Cookie: PHPSESSID=85pnahp3q83apv7qsbi8hrj5g7'
# --data-raw 'dtv_d=03&dtv_m=04&dtv_y=2021&dtb_d=03&dtb_m=04&dtb_y=2021&ztv_h=18&ztv_m=26&ztb_h=19&ztb_m=26'
# ztb_m -> same as ztv_m
# Einsatz finished - publish on pushover
logger.info("[%s] Publishing message on Pushover", f_id)
pushover.send_message(
"Scan {} wurde bearbeitet und in Cloud geladen".format(f_id),
title="Feuerwehr Scan bearbeitet - {}".format(f_id),
"Einsatz beendet",
title="Feuerwehr Einsatz beendet - {}".format(f_id),
)
# Marking message as seen, no need to reprocess again
imap_client.mark_seen(msg, f_id)
else:
logger.warn(
"[%s] Record in Lodur NOT ready yet to be updated", f_id
)
# This is usually a scan from the Depot printer
elif f_type == "Einsatzrapport":
logger.info("[%s] Processing type %s", f_id, f_type)
# Attach scan in Lodur if f_id is available
# f_id can be empty when scan was misconfigured
if f_id != None and len(f_id) > 8:
lodur_id = webdav_client.get_lodur_data(f_id)["event_id"]
# Retrieve Lodur data again and store it in Webdav
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(f_id, f_id + "_lodur.json", lodur_data)
lodur_client.einsatzrapport_scan(
f_id,
lodur_data,
os.path.join(TMP_DIR, file_name),
webdav_client,
)
else:
logger.error("[%s] Unknown type: %s", f_id, f_type)
f_id = f_type
logger.info("[%s] Publishing message on Pushover", f_id)
pushover.send_message(
"Scan {} wurde bearbeitet und in Cloud geladen".format(f_id),
title="Feuerwehr Scan bearbeitet - {}".format(f_id),
)
# Marking message as seen, no need to reprocess again
imap_client.mark_seen(msg, f_id)
else:
logger.error("[%s] Unknown type: %s", f_id, f_type)
logger.info("Checking WebDav Inbox folder for Einsatzrapporte to process")
rapporte_to_process = webdav_client.einsatzrapport_inbox_check(TMP_DIR)
if rapporte_to_process:
for f_id_rapport in rapporte_to_process:
filename = f"Einsatzrapport_{f_id_rapport}.pdf"
local_file = os.path.join(TMP_DIR, filename)
# Upload to f_id folder
webdav_client.upload(filename, f_id_rapport)
# Process it for Lodur
lodur_id = webdav_client.get_lodur_data(f_id_rapport)["event_id"]
# Retrieve Lodur data again and store it in Webdav
lodur_data = lodur_client.retrieve_form_data(lodur_id)
webdav_client.store_data(
f_id_rapport, f_id_rapport + "_lodur.json", lodur_data
)
lodur_client.einsatzrapport_scan(
f_id_rapport,
lodur_data,
local_file,
webdav_client,
)
# Delete processed Einsatzrapport from Inbox and local temp dir
logger.info("Einsatzrapport processed - deleting file in Inbox")
webdav_client.delete(f"{WEBDAV_BASEDIR}/Inbox/{filename}")
os.remove(local_file)
pushover.send_message(
f"Einsatzrapport {f_id_rapport} wurde bearbeitet.",
title=f"Feuerwehr Einsatzrapport bearbeitet - {f_id_rapport}",
)
# send heartbeat
requests.get(HEARTBEAT_URL)

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "pylokid"
version = "3.0.3"
version = "3.2.0"
description = ""
authors = ["Tobias Brunner <tobias@tobru.ch>"]
license = "MIT"