import time
import sqlalchemy as saq
import config as cf
import pandas as pd
import urllib.parse
import pymssql, os
import tda
try:
import pyodbc
except ImportError:
import pypyodbc as pyodbc
# os.system('apt-get install -y libodbc1')
# os.system('apt-get install -y unixodbc')
# os.system('apt-get install -y curl')
# os.system('su')
# os.system('curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -')
# os.system('curl https://packages.microsoft.com/config/ubuntu/18.04/prod.list > /etc/apt/sources.list.d/mssql-release.list')
# os.system('exit')
# os.system('apt-get -y update')
# os.system('ACCEPT_EULA=Y apt-get install -y msodbcsql17')
# print('available drivers: ', pyodbc.drivers())
try:
client = tda.auth.client_from_token_file('token.json', cf.api_key)
except ValueError:
pass
except FileNotFoundError:
pass
# ==================================== #
def get_options_data(c, ticker, strkes):
response = c.get_option_chain(str(ticker), strike_count=strkes)
# print(json.dumps(response.json(), indent=4))
response2 = response.json()
return response2
# STEP 3: parse json output and return a dataframe
def json_data_to_dataframe(json_response):
# instantiating a dataframe to house all of our data for one ticker
df = pd.DataFrame()
# parsing json output for CALL data
for key in json_response.keys():
for item in json_response.keys():
if item == 'callExpDateMap':
for date in json_response[item]:
for strike in json_response[item][date]:
if df.empty:
df = pd.DataFrame(json_response[item][date][strike],
columns=json_response[item][date][strike][0].keys())
else:
df2 = pd.DataFrame(json_response[item][date][strike],
columns=json_response[item][date][strike][0].keys())
df = df.append(df2)
# parsing json output for PUT data
for key in json_response.keys():
for item in json_response.keys():
if item == 'putExpDateMap':
for date in json_response[item]:
for strike in json_response[item][date]:
if df.empty:
df = pd.DataFrame(json_response[item][date][strike],
columns=json_response[item][date][strike][0].keys())
else:
df2 = pd.DataFrame(json_response[item][date][strike],
columns=json_response[item][date][strike][0].keys())
df = df.append(df2)
# DATA CLEANING STEP TO AVOID DATA ERRORS WHEN LOADING INTO MYSQL
# this converts all numeric valued columns to numeric columns and will return all NaN's to float Nan that can then be
# clearerd with a simple fillna() fxn
cols = df.columns.values.tolist()
numeric_cols = cols[4::]
df[numeric_cols] = df[numeric_cols].apply(pd.to_numeric, errors='coerce')
df.fillna(0)
# print(df)
return df
# ===================================== #
engine = saq.create_engine(cf.ENGINE_URL)
conn = engine.connect()
raw_data = get_options_data(client, 'JPM', 5)
df = json_data_to_dataframe(raw_data)
df.to_sql('anotherTest', con=engine, index=True, if_exists='append')
print('Yay!')