Cleanup script.

This commit is contained in:
Timothy Allen 2018-05-10 22:53:21 +02:00
parent 93a4c412c5
commit f2fd1a0acd
1 changed files with 6 additions and 26 deletions

View File

@ -35,27 +35,7 @@ def main():
'accept': 'application/json',
'Content-Type': 'application/json',
}
print(secret_hash)
for page in ( ):
#for page in ( '/status.json', '/entries.json', '/treatments.json' ):
#for page in ( '/entries.json?count=500', '/treatments.json?count=100' ):
url = args.server + page
#pp.pprint(url)
#pp.pprint(headers)
try:
req = urllib.request.Request(url, headers = headers)
with urllib.request.urlopen(req) as response:
status = response.read().decode('utf-8')
status_json = json.loads(status)
if re.match(page, '^/entries') is not None:
status_json.sort(key=lambda e: e['date'])
pp.pprint(status_json)
except urllib.error.HTTPError as err:
print('{}: URL {}'.format(err, url))
#sys.exit(1)
#print(secret_hash)
with open(args.input_file, 'r', newline='') as f:
rows = from_csv(f)
@ -157,13 +137,13 @@ def main():
if i < len(entries)-1:
(entries[i], entries[i+1]) = calculate_entry_delta(entries[i], entries[i+1])
pp.pprint(entries)
sys.exit(1)
#pp.pprint(entries)
sys.exit(1) # Remove this line to upload!
''' Upload data (either treatment or entry) to Nightscout '''
actions = {
#'/treatments': treatments,
#'/entries': entries,
'/treatments': treatments,
'/entries': entries,
}
for page, form in actions.items():
''' Upload data in groups of 100 entries, to avoid timeouts from large datasets '''
@ -176,7 +156,7 @@ def main():
with urllib.request.urlopen(req) as response:
status = response.read().decode('utf-8')
status_json = json.loads(status)
pp.pprint(status_json)
print(status_json)
print('Completed')
except urllib.error.HTTPError as err:
print('{}: URL {}'.format(err, url))