1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
|
#!/usr/local/bin/python3
import os
from datetime import datetime, timedelta
from pathlib import Path
from threading import Lock
import requests
from flask import Flask, request, Response
app = Flask(__name__)
api_key = os.environ["POLYGON_API_KEY"]
log_file = "/tmp/etlas.log"
def get_tickers():
tab = []
with open("tickers.txt", "r") as f:
raw = f.read()
with open(log_file, "w") as log:
log.write(raw)
lines = raw.split('\n')
for line in lines:
if line:
parts = line.split()
tab.append({
"name": parts[0],
"price": parts[1]
})
return tab
n = 0
mtx = Lock()
@app.route("/prices")
def get_stock_prices():
global n, updated
with mtx:
tickers = get_tickers()
if len(tickers) == 0:
return Response("", status=204, mimetype="text/plain")
# tickers have been deleted from config
if n > len(tickers) - 1:
n = 0
ticker = tickers[n]["name"]
price = tickers[n]["price"]
date = datetime.today()
e_date = date.strftime("%Y-%m-%d")
s_date = (date - timedelta(days=90)).strftime("%Y-%m-%d")
base = "https://api.massive.com/v2/aggs/ticker"
query = f"adjusted=true&sort=asc&apiKey={api_key}"
url = f"{base}/{ticker}/range/1/day/{s_date}/{e_date}?{query}"
res = requests.get(url)
with open(log_file, "a") as log:
log.write("\n\n")
log.write(res.text)
result = f"{ticker}\n{price}\n"
if res.status_code == 200:
data = res.json()
if "results" in data:
for item in data["results"]:
result += f"{item['c']:.2f}\n"
n = (n + 1) % len(tickers)
return Response(result, status=res.status_code, mimetype="text/plain")
if __name__ == '__main__':
from flup.server.fcgi import WSGIServer
sock = Path('/var/www/run/etlas.sock').resolve()
try:
sock.unlink()
except:
pass
WSGIServer(app, bindAddress=str(sock), umask=0o007).run()
|