-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbigquery.py
More file actions
50 lines (38 loc) · 1.3 KB
/
bigquery.py
File metadata and controls
50 lines (38 loc) · 1.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import streamlit as st
from google.oauth2 import service_account
from google.cloud import bigquery
import requests
#need update
# Create API client
credentials = service_account.Credentials.from_service_account_info(
st.secrets["gcp_service_account"]
)
client = bigquery.Client(credentials=credentials)
st.sidebar.title("Options")
option = st.sidebar.selectbox("Which Dashboard?", ('wallstreetbets','chart','pattern','twitter','stockwits'))
if option == 'stockwits':
symbol = st.sidebar.text_input("Symbol", max_chars=5)
st.subheader('stockwits')
r = requests.get(f"https://api.stocktwits.com/api/2/streams/symbol/{symbol}.json")
data = r.json()
for message in data['messages']:
st.image(message['user']['avatar_url'])
st.write(message['user']['username'])
st.write(message['created_at'])
st.write(message['body'])
QUERY = (
'SELECT * FROM `tvv-airflow-tutorial-demo.ARK_ETF.history`'
'LIMIT 100'
)
@st.cache(ttl=600)
def run_query(query):
query_job = client.query(query)
rows_raw = query_job.result()
# Convert to list of dicts. Required for st.cache to hash the return value.
rows = [dict(row) for row in rows_raw]
return rows
rows = run_query(QUERY)
##for row in rows:
#st.write("check this data")
# print(row)
#st.write(rows)