-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathfswatch.py
More file actions
executable file
·213 lines (173 loc) · 6.77 KB
/
fswatch.py
File metadata and controls
executable file
·213 lines (173 loc) · 6.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
#!/usr/local/bin/python3
from collections import OrderedDict
import configparser
from datetime import datetime as dt
import ftplib
import json
import os
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from orgtools import get_org_table_as_json
# TODO: consider using a virtualenv
# get environment variables from cfg files, to simplify daemon configuration
Config = configparser.ConfigParser()
cwd = os.path.dirname(os.path.realpath(__file__))
Config.read([cwd + '/work-macbook.cfg', os.path.expanduser('~') + '/website.cfg'])
DROPBOX = Config.get('local', 'DROPBOX')
SYNC = Config.get('local', 'SYNC')
input_path = DROPBOX + '/txt'
local_path = SYNC + '/webmirror'
server_url = Config.get('website', 'URL')
user = Config.get('website', 'FTP_USERNAME')
password = Config.get('website', 'FTP_PASSWORD')
ignore_list = ['.DS_Store', '.git'] # ignore by simple substring match
def process_org_table(fin, fout, table_name, columns=None, rowfunc=None):
"""
given input and output filenames, and a table name,
read org table in input file, convert to json, and write to output file
- if columns are specified, only use those columns, else use all
- if json result is identical to current file, do not write (thus NOT triggering an ftp sync)
- add some metadata
- run rowfunc on each row
"""
# TODO: try this out for all current lists
# TODO: update backend handler to accept this common format
# get table from org file
table = get_org_table_as_json(fin, table_name)
# use only selected columns, in specified order
if columns is not None:
subtable = [OrderedDict([(k, row[k]) for k in columns]) for row in table]
else:
subtable = table
data = {
'metadata': {
'last-updated': dt.strftime(dt.now(), '%Y%m%d'),
},
'data': subtable,
}
with open(fout, 'r') as f:
current = json.load(f)
if current == table:
print('no change to resulting json, not syncing')
return
with open(fout, 'w') as f:
f.write(json.dumps(data, indent=2))
print('processed %s -> %s' % (fin, fout))
class ProcessorHandler(FileSystemEventHandler):
def __init__(self):
print('ProcessorHandler')
def on_modified(self, event):
print(event)
filename = event.src_path
if 'todo.txt' in filename:
# TODO: define these in a list of json objects
self.process_calls_table(filename)
# TODO process_org_table(filename, table_name='calls', fout=SYNC + '/webmirror/data/calls.json')
elif 'buy.txt' in filename:
self.process_buy_table(filename)
# TODO process_org_table(filename, table_name='buy', fout=SYNC + '/webmirror/data/buy.json', columns=['item', 'shops', 'tags', 'notes'])
elif 'read.txt' in filename:
self.process_books_table(filename)
elif 'restaurants.txt' in filename:
self.process_restaurants_table(filename)
def process_restaurants_table(filename):
pass
def process_books_table(filename):
# TODO add a checkbox for "want to buy" that filters out both "have" and "read"
pass
def process_buy_table(self, filename):
table_name = 'buy'
columns = ['item', 'shops', 'tags', 'notes']
fout = SYNC + '/webmirror/data/buy.json'
# get table from org file
table = get_org_table_as_json(filename, table_name)
# use only selected columns, in specified order
subtable = [OrderedDict([(k, row[k]) for k in columns]) for row in table]
data = {
'metadata': {
'last-updated': dt.strftime(dt.now(), '%Y%m%d'),
},
'buy': subtable,
}
with open(fout, 'w') as f:
f.write(json.dumps(data, indent=2))
print('buy.txt#buy -> data/buy.json')
def process_calls_table(self, filename):
table_name = 'calls'
fout = SYNC + '/webmirror/data/calls.json'
# get table from org file
table = get_org_table_as_json(filename, table_name)
table = {str(n): v for n, v in enumerate(table, 1)}
# read the file currently in webmirror, only write if changed
with open(fout, 'r') as f:
current = json.load(f)
if current == table:
print('no change to calls table, not syncing')
return
# write to file in webmirror directory
with open(fout, 'w') as f:
f.write(json.dumps(table, indent=2))
print('processed todo.txt#calls -> data/calls.json')
class WebSyncHandler(FileSystemEventHandler):
def __init__(self, local_path, url, username, password):
self.local_path = local_path
self.url = url
self.username = username
self.password = password
print('WebSyncHandler: %s' % local_path)
# def on_any_event(self, event):
def on_created(self, event):
# TODO: implement
print(event)
print('handler not implemented')
def on_modified(self, event):
# would be nice for syncing to work properly with create, delete, move, etc.
# but lets just start with the simple stuff
print(event)
if not event.__class__.__name__ == 'FileModifiedEvent':
return
local_name = event.src_path
remote_name = local_name.replace(self.local_path, '')
for ig in ignore_list:
if ig in local_name:
return
# ftp rsync to remote server
try:
ftp_session = ftplib.FTP_TLS(self.url, self.username, self.password)
except Exception as exc:
print(exc)
print('error connecting. are you online?')
self.make_ftp_directories(remote_name)
with open(local_name, 'rb') as f:
ftp_session.storlines('STOR ' + remote_name, f)
print('synced %s -> http://alanbernstein.net%s' % (local_name, remote_name))
ftp_session.quit()
def make_ftp_directories(self, remote_name):
# TODO: implement
# ftp_session.mkd(pathname) # make dir
# .cwd(pathname) # set current working directory
# .pwd get current directory
pass
def get_web_observer():
observer = Observer()
observer.schedule(WebSyncHandler(local_path, server_url, user, password), local_path, recursive=True)
return observer
def get_processor_observer():
observer = Observer()
observer.schedule(ProcessorHandler(), input_path, recursive=True)
return observer
def main():
ob1 = get_web_observer()
ob2 = get_processor_observer()
ob1.start()
ob2.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
ob1.stop()
ob2.stop()
ob1.join()
ob2.join()
main()