I have functions.py file that is executed every time my main.py file is run:
import subprocess
import pandas as pd
from move import move_to_obsidian
pd.options.mode.copy_on_write = False
OBSIDIAN_MD_PATH = "D://Gdrive//ObsidianVault//WatsonFrames//duzeltme1.md"
OBSIDIAN_PROCESSED_PATH = 'reports/4back_to_obsidian.md'
def time_formatting(df):
df['start'] = pd.to_datetime(df['start'], format="%d/%m/%y %H:%M:%S", errors="ignore")
df['stop'] = pd.to_datetime(df['stop'], format="%d/%m/%y %H:%M:%S", errors="ignore")
def get_logs_from_obsidian():
df = pd.read_table(OBSIDIAN_MD_PATH,
index_col=0, sep="|")
df = df.iloc[1: , :]
df = df.reset_index(drop=True)
try: df = df.drop(' ', axis=1)
except: pass
try: df = df.dropna(axis=1)
except: pass
try:
for i in range(0, 100, 1):
if 'Unnamed ' in df.columns[i]:
print(True)
df = df.drop(df.columns[i], axis=1)
except:
pass
df.columns = df.columns.str.strip()
time_formatting(df=df)
# print('-----Obsidian-----')
# print(df['start'].tail(2), '//', '\n', df['stop'].tail(2))
df.to_csv('reports/1obsidianlogs.csv', index=False)
df.to_html('reports/1obsidianlogs.html')
print('-----Obsidian-----' + '\n')
return df
# get_logs_from_obsidian()
def get_logs_from_watson():
subprocess.run(["watson", "log", "--all", "-s", ">", "D:/AUIVVII/Udemy/Inspired/Mildew/new_report.csv"], shell=True)
df = pd.read_csv("new_report.csv")
df['notes'] = ' - '
df['id'] = df['id'].apply(lambda x: f"[[WatsonFrames/ids/{x}]]")
time_formatting(df)
df1 = df.tail(1)
df1.reset_index(drop=True, inplace=True)
df1.columns = df.columns.str.strip()
# print('-----Watson-----')
# print(df1.tail(2))
df.to_csv('reports/2watson_notcut_logs.csv')
df1.to_csv('reports/2watsonlogs.csv', index=False)
df1.to_html('reports/2watsonlogs.html')
print('-----Watson-----')
return df1
# get_logs_from_watson()
def concatting_two_dfs(first_df=get_logs_from_obsidian(), appended_df=get_logs_from_watson()):
result = pd.concat([first_df, appended_df], axis=0)
result.reset_index(inplace=True, drop=True)
result.to_html('reports/3concatting_two_dfs.html')
print(result.tail(5))
return result
# concatting_two_dfs()
def back_to_md(df=concatting_two_dfs(), buf=OBSIDIAN_PROCESSED_PATH):
df.to_markdown(index=False, buf=buf)
df.to_html(index=False, buf='reports/4back_to_obsidian.html')
done = print(f'---- Result can be viewed in {buf} ----')
return done
def main():
back_to_md()
move_to_obsidian()
if __name__ == '__main__':
main()
and here is the main.py:
import sys
import time
import json
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import subprocess
def exec_main():
from functions import main
from backup import aws_backup
time.sleep(2)
aws_backup()
time.sleep(1)
main()
time.sleep(2)
aws_backup()
class MyHandler(FileSystemEventHandler):
def on_modified(self, event):
if event.is_directory:
return
if event.src_path.endswith("frames"):
with open(event.src_path, "r") as file:
frames = json.load(file)
print(f"New entries in frames.json: {frames[-1]}")
print('Waiting a few sec for logs to be created')
time.sleep(3)
exec_main()
# Perform your desired action here
if __name__ == "__main__":
path = "C://Users//sarpy//AppData//Roaming//watson"
event_handler = MyHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=False)
observer.start()
print('Observer started')
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
It concats the two pandas dataframes correctly the first time it is created, however after the first time, it is not reliable anymore and won't often work. Both dataframes are in same format and concat works fine if it's not run on loop. However I am going to schedule this to run every time my computer opens and forget about it. I've also added bunch of time.sleep() to see if it was logs not being created in time but that's not the case