diff --git a/analysis.py b/analysis.py index bef3db1..75f524d 100644 --- a/analysis.py +++ b/analysis.py @@ -10,6 +10,9 @@ from logs import Logs from twitter import Twitter +from dotenv import load_dotenv +load_dotenv() + # The URL for a GET request to the Wikidata API. The string parameter is the # SPARQL query. WIKIDATA_QUERY_URL = "https://query.wikidata.org/sparql?query=%s&format=JSON" diff --git a/company_main.py b/company_main.py new file mode 100644 index 0000000..748ad5c --- /dev/null +++ b/company_main.py @@ -0,0 +1,173 @@ +from datetime import datetime +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer +from threading import Event +from threading import Thread +from time import sleep + +from analysis import Analysis +from logs import Logs +from logs import Logs +from logs_trade import LogsTrade +from trading import Trading +from company_twitter import Twitter + +# Whether to send all logs to the cloud instead of a local file. +LOGS_TO_CLOUD = False + +# The duration of the smallest backoff step in seconds. +BACKOFF_STEP_S = 0.1 + +# The maximum number of retry steps, equivalent to 0.1 * (2^12 - 1) = 409.5 +# seconds of total delay. This is the largest interval that one backoff +# sequence may take. +MAX_TRIES = 12 + +# The time in seconds after which to reset a backoff sequence. This is the +# smallest interval at which backoff sequences may repeat normally. +BACKOFF_RESET_S = 30 * 60 + +# The host for the monitor Web server. +MONITOR_HOST = "0.0.0.0" + +# The port for the monitor Web server. +MONITOR_PORT = 80 + +# The message returned by the monitor Web server. +MONITOR_MESSAGE = "OK" + + +class Monitor: + """A monitor exposing a Web server while the main loop is running.""" + + def __init__(self): + """Creates a Web server on a background thread.""" + + self.server = HTTPServer((MONITOR_HOST, MONITOR_PORT), + self.MonitorHandler) + self.thread = Thread(target=self.server.serve_forever) + self.thread.daemon = True + + def start(self): + """Starts the Web server background thread.""" + + self.thread.start() + + def stop(self): + """Stops the Web server and background thread.""" + + self.server.shutdown() + self.server.server_close() + + class MonitorHandler(BaseHTTPRequestHandler): + """An HTTP request handler that responds with "OK" while running.""" + + def _set_headers(self): + self.send_response(200) + self.send_header("Content-type", "text/plain") + self.end_headers() + + def do_GET(self): + self._set_headers() + self.wfile.write(MONITOR_MESSAGE.encode("utf-8")) + + def do_HEAD(self): + self._set_headers() + + +class Main: + """A wrapper for the main application logic and retry loop.""" + + def __init__(self): + self.logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD) + self.twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD) + + def twitter_callback(self, tweet): + """Analyzes Trump tweets, trades stocks, and tweets about it.""" + + # Initialize the Analysis, Logs, Trading, and Twitter instances inside + # the callback to create separate httplib2 instances per thread. + analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD) + logs = Logs(name="main-callback", to_cloud=LOGS_TO_CLOUD) + + logs_trade = LogsTrade(name="main", to_cloud=LOGS_TO_CLOUD) + + # Analyze the tweet. + companies = analysis.find_companies(tweet) + logs.info("Using companies: %s" % companies) + if not companies: + return + + logs_trade.info("%s" % companies) + + # Trade stocks. + # trading = Trading(logs_to_cloud=LOGS_TO_CLOUD) + # trading.make_trades(companies) + + # Tweet about it. + # twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD) + # twitter.tweet(companies, tweet) + + def run_session(self): + """Runs a single streaming session. Logs and cleans up after + exceptions. + """ + + self.logs.info("Starting new session.") + try: + self.twitter.start_streaming(self.twitter_callback) + except: + self.logs.catch() + finally: + self.twitter.stop_streaming() + self.logs.info("Ending session.") + + def backoff(self, tries): + """Sleeps an exponential number of seconds based on the number of + tries. + """ + + delay = BACKOFF_STEP_S * pow(2, tries) + self.logs.warn("Waiting for %.1f seconds." % delay) + sleep(delay) + + def run(self): + """Runs the main retry loop with exponential backoff.""" + + tries = 0 + while True: + + # The session blocks until an error occurs. + self.run_session() + + # Remember the first time a backoff sequence starts. + now = datetime.now() + if tries == 0: + self.logs.debug("Starting first backoff sequence.") + backoff_start = now + + # Reset the backoff sequence if the last error was long ago. + if (now - backoff_start).total_seconds() > BACKOFF_RESET_S: + self.logs.debug("Starting new backoff sequence.") + tries = 0 + backoff_start = now + + # Give up after the maximum number of tries. + if tries >= MAX_TRIES: + self.logs.warn("Exceeded maximum retry count.") + break + + # Wait according to the progression of the backoff sequence. + self.backoff(tries) + + # Increment the number of tries for the next error. + tries += 1 + + +if __name__ == "__main__": + monitor = Monitor() + monitor.start() + try: + Main().run() + finally: + monitor.stop() diff --git a/company_twitter.py b/company_twitter.py new file mode 100644 index 0000000..2dddc80 --- /dev/null +++ b/company_twitter.py @@ -0,0 +1,375 @@ +from json import loads +from os import getenv +from queue import Empty +from queue import Queue +from threading import Event +from threading import Thread +from time import time +from tweepy import API +from tweepy import Cursor +from tweepy import OAuthHandler +from tweepy import Stream +from tweepy.streaming import StreamListener + +from logs import Logs + +from dotenv import load_dotenv +load_dotenv() + +# The keys for the Twitter account we're using for API requests and tweeting +# alerts (@Trump2Cash). Read from environment variables. +TWITTER_ACCESS_TOKEN = getenv("TWITTER_ACCESS_TOKEN") +TWITTER_ACCESS_TOKEN_SECRET = getenv("TWITTER_ACCESS_TOKEN_SECRET") + +# The keys for the Twitter app we're using for API requests +# (https://apps.twitter.com/app/13239588). Read from environment variables. +TWITTER_CONSUMER_KEY = getenv("TWITTER_CONSUMER_KEY") +TWITTER_CONSUMER_SECRET = getenv("TWITTER_CONSUMER_SECRET") + +# The user ID of @realDonaldTrump. +TRUMP_USER_ID = "25073877" + +# The user ID of @Trump2Cash. +TRUMP2CASH_USER_ID = "812529080998432769" + +# The URL pattern for links to tweets. +TWEET_URL = "https://twitter.com/%s/status/%s" + +# Some emoji. +EMOJI_THUMBS_UP = "\U0001f44d" +EMOJI_THUMBS_DOWN = "\U0001f44e" +EMOJI_SHRUG = "¯\\_(\u30c4)_/¯" + +# The maximum number of characters in a tweet. +MAX_TWEET_SIZE = 140 + +# The number of worker threads processing tweets. +NUM_THREADS = 100 + +# The maximum time in seconds that workers wait for a new task on the queue. +QUEUE_TIMEOUT_S = 5 * 60 + +# The number of retries to attempt when an error occurs. +API_RETRY_COUNT = 60 + +# The number of seconds to wait between retries. +API_RETRY_DELAY_S = 1 + +# The HTTP status codes for which to retry. +API_RETRY_ERRORS = [400, 401, 500, 502, 503, 504] + + +class Twitter: + """A helper for talking to Twitter APIs.""" + + def __init__(self, logs_to_cloud): + self.logs_to_cloud = logs_to_cloud + self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) + self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, + TWITTER_CONSUMER_SECRET) + self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, + TWITTER_ACCESS_TOKEN_SECRET) + self.twitter_api = API(auth_handler=self.twitter_auth, + retry_count=API_RETRY_COUNT, + retry_delay=API_RETRY_DELAY_S, + retry_errors=API_RETRY_ERRORS, + wait_on_rate_limit=True, + wait_on_rate_limit_notify=True) + self.twitter_listener = None + + def start_streaming(self, callback): + """Starts streaming tweets and returning data to the callback.""" + + self.twitter_listener = TwitterListener( + callback=callback, logs_to_cloud=self.logs_to_cloud) + twitter_stream = Stream(self.twitter_auth, self.twitter_listener) + + self.logs.debug("Starting stream.") + twitter_stream.filter(track=["stockpick", "uptrend", "stockmarket", "NASDAQ", '"Stock of the Day"', '"stock pick"', '"stocks pick"']) + + # If we got here because of an API error, raise it. + if self.twitter_listener and self.twitter_listener.get_error_status(): + raise Exception("Twitter API error: %s" % + self.twitter_listener.get_error_status()) + + def stop_streaming(self): + """Stops the current stream.""" + + if not self.twitter_listener: + self.logs.warn("No stream to stop.") + return + + self.logs.debug("Stopping stream.") + self.twitter_listener.stop_queue() + self.twitter_listener = None + + def tweet(self, companies, tweet): + """Posts a tweet listing the companies, their ticker symbols, and a + quote of the original tweet. + """ + + link = self.get_tweet_link(tweet) + text = self.make_tweet_text(companies, link) + + self.logs.info("Tweeting: %s" % text) + self.twitter_api.update_status(text) + + def make_tweet_text(self, companies, link): + """Generates the text for a tweet.""" + + # Find all distinct company names. + names = [] + for company in companies: + name = company["name"] + if name not in names: + names.append(name) + + # Collect the ticker symbols and sentiment scores for each name. + tickers = {} + sentiments = {} + for name in names: + tickers[name] = [] + for company in companies: + if company["name"] == name: + ticker = company["ticker"] + tickers[name].append(ticker) + sentiment = company["sentiment"] + # Assuming the same sentiment for each ticker. + sentiments[name] = sentiment + + # Create lines for each name with sentiment emoji and ticker symbols. + lines = [] + for name in names: + sentiment_str = self.get_sentiment_emoji(sentiments[name]) + tickers_str = " ".join(["$%s" % t for t in tickers[name]]) + line = "%s %s %s" % (name, sentiment_str, tickers_str) + lines.append(line) + + # Combine the lines and ellipsize if necessary. + lines_str = "\n".join(lines) + size = len(lines_str) + 1 + len(link) + if size > MAX_TWEET_SIZE: + self.logs.warn("Ellipsizing lines: %s" % lines_str) + lines_size = MAX_TWEET_SIZE - len(link) - 2 + lines_str = "%s\u2026" % lines_str[:lines_size] + + # Combine the lines with the link. + text = "%s\n%s" % (lines_str, link) + + return text + + def get_sentiment_emoji(self, sentiment): + """Returns the emoji matching the sentiment.""" + + if not sentiment: + return EMOJI_SHRUG + + if sentiment > 0: + return EMOJI_THUMBS_UP + + if sentiment < 0: + return EMOJI_THUMBS_DOWN + + self.logs.warn("Unknown sentiment: %s" % sentiment) + return EMOJI_SHRUG + + def get_tweet(self, tweet_id): + """Looks up metadata for a single tweet.""" + + # Use tweet_mode=extended so we get the full text. + status = self.twitter_api.get_status(tweet_id, tweet_mode="extended") + if not status: + self.logs.error("Bad status response: %s" % status) + return None + + # Use the raw JSON, just like the streaming API. + return status._json + + def get_all_tweets(self): + """Looks up metadata for the most recent Trump tweets.""" + + tweets = [] + + # Only the 3,200 most recent tweets are available through the API. Use + # the @Trump2Cash account to filter down to the relevant ones. + for status in Cursor(self.twitter_api.user_timeline, + user_id=TRUMP2CASH_USER_ID, + exclude_replies=True).items(): + + # Extract the quoted @realDonaldTrump tweet, if available. + try: + quoted_tweet_id = status.quoted_status_id + except AttributeError: + self.logs.warn('Skipping tweet: %s' % status) + continue + + # Get the tweet details and add it to the list. + quoted_tweet = self.get_tweet(quoted_tweet_id) + tweets.append(quoted_tweet) + + self.logs.debug("Got tweets: %s" % tweets) + + return tweets + + def get_tweet_text(self, tweet): + """Returns the full text of a tweet.""" + + # The format for getting at the full text is different depending on + # whether the tweet came through the REST API or the Streaming API: + # https://dev.twitter.com/overview/api/upcoming-changes-to-tweets + try: + if "extended_tweet" in tweet: + self.logs.debug("Decoding extended tweet from Streaming API.") + return tweet["extended_tweet"]["full_text"] + elif "full_text" in tweet: + self.logs.debug("Decoding extended tweet from REST API.") + return tweet["full_text"] + else: + self.logs.debug("Decoding short tweet.") + return tweet["text"] + except KeyError: + self.logs.error("Malformed tweet: %s" % tweet) + return None + + def get_tweet_link(self, tweet): + """Creates the link URL to a tweet.""" + + if not tweet: + self.logs.error("No tweet to get link.") + return None + + try: + screen_name = tweet["user"]["screen_name"] + id_str = tweet["id_str"] + except KeyError: + self.logs.error("Malformed tweet for link: %s" % tweet) + return None + + link = TWEET_URL % (screen_name, id_str) + return link + + +class TwitterListener(StreamListener): + """A listener class for handling streaming Twitter data.""" + + def __init__(self, callback, logs_to_cloud): + self.logs_to_cloud = logs_to_cloud + self.logs = Logs(name="twitter-listener", to_cloud=self.logs_to_cloud) + self.callback = callback + self.error_status = None + self.start_queue() + + def start_queue(self): + """Creates a queue and starts the worker threads.""" + + self.queue = Queue() + self.stop_event = Event() + self.logs.debug("Starting %s worker threads." % NUM_THREADS) + self.workers = [] + for worker_id in range(NUM_THREADS): + worker = Thread(target=self.process_queue, args=[worker_id]) + worker.daemon = True + worker.start() + self.workers.append(worker) + + def stop_queue(self): + """Shuts down the queue and worker threads.""" + + # First stop the queue. + if self.queue: + self.logs.debug("Stopping queue.") + self.queue.join() + else: + self.logs.warn("No queue to stop.") + + # Then stop the worker threads. + if self.workers: + self.logs.debug("Stopping %d worker threads." % len(self.workers)) + self.stop_event.set() + for worker in self.workers: + # Block until the thread terminates. + worker.join() + else: + self.logs.warn("No worker threads to stop.") + + def process_queue(self, worker_id): + """Continuously processes tasks on the queue.""" + + # Create a new logs instance (with its own httplib2 instance) so that + # there is a separate one for each thread. + logs = Logs("twitter-listener-worker-%s" % worker_id, + to_cloud=self.logs_to_cloud) + + logs.debug("Started worker thread: %s" % worker_id) + while not self.stop_event.is_set(): + try: + data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S) + start_time = time() + self.handle_data(logs, data) + self.queue.task_done() + end_time = time() + qsize = self.queue.qsize() + logs.debug("Worker %s took %.f ms with %d tasks remaining." % + (worker_id, end_time - start_time, qsize)) + except Empty: + logs.debug("Worker %s timed out on an empty queue." % + worker_id) + continue + except Exception: + # The main loop doesn't catch and report exceptions from + # background threads, so do that here. + logs.catch() + logs.debug("Stopped worker thread: %s" % worker_id) + + def on_error(self, status): + """Handles any API errors.""" + + self.logs.error("Twitter error: %s" % status) + self.error_status = status + self.stop_queue() + return False + + def get_error_status(self): + """Returns the API error status, if there was one.""" + return self.error_status + + def on_data(self, data): + """Puts a task to process the new data on the queue.""" + + # Stop streaming if requested. + if self.stop_event.is_set(): + return False + + # Put the task on the queue and keep streaming. + self.queue.put(data) + return True + + def handle_data(self, logs, data): + """Sanity-checks and extracts the data before sending it to the + callback. + """ + + try: + tweet = loads(data) + except ValueError: + logs.error("Failed to decode JSON data: %s" % data) + return + + try: + user_id_str = tweet["user"]["id_str"] + screen_name = tweet["user"]["screen_name"] + except KeyError: + logs.error("Malformed tweet: %s" % tweet) + return + + # We're only interested in tweets from Mr. Trump himself, so skip the + # rest. + # if user_id_str != TRUMP_USER_ID: + # logs.debug("Skipping tweet from user: %s (%s)" % + # (screen_name, user_id_str)) + # return + + logs.info("Examining tweet: %s" % tweet) + + # Call the callback. + self.callback(tweet) diff --git a/dave/__init__.py b/dave/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dave/twitter.py b/dave/twitter.py new file mode 100644 index 0000000..f3a190e --- /dev/null +++ b/dave/twitter.py @@ -0,0 +1,265 @@ +from json import loads +from os import getenv +from queue import Empty +from queue import Queue +from threading import Event +from threading import Thread +from time import time +from tweepy import API +from tweepy import Cursor +from tweepy import OAuthHandler +from tweepy import Stream +from tweepy.streaming import StreamListener + +from logs import Logs + +from dotenv import load_dotenv +load_dotenv() + +# The keys for the Twitter account we're using for API requests and tweeting +# alerts (@Trump2Cash). Read from environment variables. +TWITTER_ACCESS_TOKEN = getenv("TWITTER_ACCESS_TOKEN") +TWITTER_ACCESS_TOKEN_SECRET = getenv("TWITTER_ACCESS_TOKEN_SECRET") + +# The keys for the Twitter app we're using for API requests +# (https://apps.twitter.com/app/13239588). Read from environment variables. +TWITTER_CONSUMER_KEY = getenv("TWITTER_CONSUMER_KEY") +TWITTER_CONSUMER_SECRET = getenv("TWITTER_CONSUMER_SECRET") + +# The number of worker threads processing tweets. +NUM_THREADS = 1 + +# The maximum time in seconds that workers wait for a new task on the queue. +QUEUE_TIMEOUT_S = 5 * 60 + +# The number of retries to attempt when an error occurs. +API_RETRY_COUNT = 10 + +# The number of seconds to wait between retries. +API_RETRY_DELAY_S = 1 + +# The HTTP status codes for which to retry. +API_RETRY_ERRORS = [400, 401, 500, 502, 503, 504] + + +class Twitter: + """A helper for talking to Twitter APIs.""" + + def __init__(self, logs_to_cloud, twitter_user_id): + self.logs_to_cloud = logs_to_cloud + self.twitter_user_id = twitter_user_id + self.logs = Logs(name="twitter-dave", to_cloud=self.logs_to_cloud) + self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, + TWITTER_CONSUMER_SECRET) + self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, + TWITTER_ACCESS_TOKEN_SECRET) + self.twitter_api = API(auth_handler=self.twitter_auth, + retry_count=API_RETRY_COUNT, + retry_delay=API_RETRY_DELAY_S, + retry_errors=API_RETRY_ERRORS, + wait_on_rate_limit=True, + wait_on_rate_limit_notify=True) + self.twitter_listener = None + + def start_streaming(self, callback): + """Starts streaming tweets and returning data to the callback.""" + + self.twitter_listener = TwitterListener( + callback=callback, logs_to_cloud=self.logs_to_cloud, twitter_user_id=self.twitter_user_id) + twitter_stream = Stream(self.twitter_auth, self.twitter_listener) + + self.logs.debug("Starting stream.") + twitter_stream.filter(follow=[self.twitter_user_id]) + + # If we got here because of an API error, raise it. + if self.twitter_listener and self.twitter_listener.get_error_status(): + raise Exception("Twitter API error: %s" % + self.twitter_listener.get_error_status()) + + def stop_streaming(self): + """Stops the current stream.""" + + if not self.twitter_listener: + self.logs.warn("No stream to stop.") + return + + self.logs.debug("Stopping stream.") + self.twitter_listener.stop_queue() + self.twitter_listener = None + + def get_tweet(self, tweet_id): + """Looks up metadata for a single tweet.""" + + # Use tweet_mode=extended so we get the full text. + status = self.twitter_api.get_status(tweet_id, tweet_mode="extended") + if not status: + self.logs.error("Bad status response: %s" % status) + return None + + # Use the raw JSON, just like the streaming API. + return status._json + + def get_tweet_text(self, tweet): + """Returns the full text of a tweet.""" + + # The format for getting at the full text is different depending on + # whether the tweet came through the REST API or the Streaming API: + # https://dev.twitter.com/overview/api/upcoming-changes-to-tweets + try: + if "extended_tweet" in tweet: + self.logs.debug("Decoding extended tweet from Streaming API.") + return tweet["extended_tweet"]["full_text"] + elif "full_text" in tweet: + self.logs.debug("Decoding extended tweet from REST API.") + return tweet["full_text"] + else: + self.logs.debug("Decoding short tweet.") + return tweet["text"] + except KeyError: + self.logs.error("Malformed tweet: %s" % tweet) + return None + + def get_tweet_link(self, tweet): + """Creates the link URL to a tweet.""" + + if not tweet: + self.logs.error("No tweet to get link.") + return None + + try: + screen_name = tweet["user"]["screen_name"] + id_str = tweet["id_str"] + except KeyError: + self.logs.error("Malformed tweet for link: %s" % tweet) + return None + + link = TWEET_URL % (screen_name, id_str) + return link + + +class TwitterListener(StreamListener): + """A listener class for handling streaming Twitter data.""" + + def __init__(self, callback, logs_to_cloud, twitter_user_id): + self.logs_to_cloud = logs_to_cloud + self.twitter_user_id = twitter_user_id + self.logs = Logs(name="twitter-dave-listener", + to_cloud=self.logs_to_cloud) + self.callback = callback + self.error_status = None + self.start_queue() + + def start_queue(self): + """Creates a queue and starts the worker threads.""" + + self.queue = Queue() + self.stop_event = Event() + self.logs.debug("Starting %s worker threads." % NUM_THREADS) + self.workers = [] + for worker_id in range(NUM_THREADS): + worker = Thread(target=self.process_queue, args=[worker_id]) + worker.daemon = True + worker.start() + self.workers.append(worker) + + def stop_queue(self): + """Shuts down the queue and worker threads.""" + + # First stop the queue. + if self.queue: + self.logs.debug("Stopping queue.") + self.queue.join() + else: + self.logs.warn("No queue to stop.") + + # Then stop the worker threads. + if self.workers: + self.logs.debug("Stopping %d worker threads." % len(self.workers)) + self.stop_event.set() + for worker in self.workers: + # Block until the thread terminates. + worker.join() + else: + self.logs.warn("No worker threads to stop.") + + def process_queue(self, worker_id): + """Continuously processes tasks on the queue.""" + + # Create a new logs instance (with its own httplib2 instance) so that + # there is a separate one for each thread. + logs = Logs("twitter-dave-listener-worker-%s" % worker_id, + to_cloud=self.logs_to_cloud) + + logs.debug("Started worker thread: %s" % worker_id) + while not self.stop_event.is_set(): + try: + data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S) + start_time = time() + self.handle_data(logs, data) + self.queue.task_done() + end_time = time() + qsize = self.queue.qsize() + logs.debug("Worker %s took %.f ms with %d tasks remaining." % + (worker_id, end_time - start_time, qsize)) + except Empty: + logs.debug("Worker %s timed out on an empty queue." % + worker_id) + continue + except Exception: + # The main loop doesn't catch and report exceptions from + # background threads, so do that here. + logs.catch() + logs.debug("Stopped worker thread: %s" % worker_id) + + def on_error(self, status): + """Handles any API errors.""" + + self.logs.error("Twitter error: %s" % status) + self.error_status = status + self.stop_queue() + return False + + def get_error_status(self): + """Returns the API error status, if there was one.""" + return self.error_status + + def on_data(self, data): + """Puts a task to process the new data on the queue.""" + # Stop streaming if requested. + # print(data) + if self.stop_event.is_set(): + return False + + # Put the task on the queue and keep streaming. + self.queue.put(data) + return True + + def handle_data(self, logs, data): + """Sanity-checks and extracts the data before sending it to the + callback. + """ + + try: + tweet = loads(data) + except ValueError: + logs.error("Failed to decode JSON data: %s" % data) + return + + try: + user_id_str = tweet["user"]["id_str"] + screen_name = tweet["user"]["screen_name"] + except KeyError: + logs.error("Malformed tweet: %s" % tweet) + return + + # We're only interested in tweets from Mr. Trump himself, so skip the + # rest. + # if user_id_str != self.twitter_user_id: + # logs.debug("Skipping tweet from user: %s (%s)" % + # (screen_name, user_id_str)) + # return + + logs.info("Examining tweet: %s" % tweet) + + # Call the callback. + self.callback(tweet) diff --git a/dave_baisc.py b/dave_baisc.py new file mode 100644 index 0000000..79d6932 --- /dev/null +++ b/dave_baisc.py @@ -0,0 +1,16 @@ +from tinydb import TinyDB, Query + + + +class DaveBasic: + def make_trades(self): + ... + + + + def checkAccount(self): + # db = TinyDB('db.json') + + + + diff --git a/gcloud-credentials-file.json b/gcloud-credentials-file.json new file mode 100644 index 0000000..59fbd84 --- /dev/null +++ b/gcloud-credentials-file.json @@ -0,0 +1,12 @@ +{ + "type": "service_account", + "project_id": "sincere-tape-275103", + "private_key_id": "6000dd34960302fd5d1ac302302ad57234a6857d", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDHBLha5FM7fzOx\nZykZQYHIlncXzR34LauJQOHUehDBmqoUrAus1CA7s+mMwrLlex5VnwmWpjRxdDKt\n/jRBkDyyoz1n7cuuo5jMGJKKGKyD7S09ZSZQJEZURNdGfDKp9KvG6vnQd88PKO3I\nzUHgkcZpoKudiYuHoysrMw+2A6VqvDiRTI7wVSW5JJcGdzXC+tUBbX8XurDN3YTN\njLUEDNIBpihJV8wRXGOyy4VmuIY0AiRkSj3oCUOaeC5PGsqH1o1R1Gdl/s34nVRF\nRw+m7ZiaWodZ7tBI1LdRjD/71beI2+hhKIhX9Ct5+PP8WOHvAu3DXDVwFiLiwQnw\nUnfWPDUBAgMBAAECggEAB+bvPlCli2MOwDhVNexXb7RQZeYI5Biayy2D1KYaHI3R\nu1YPdUfLldicE9uL3eJoacQ8TbqAsjvdDUfezExNVKx4R4mgwOjnAWeSXfWtLTnR\ncN10ytF952JVB+FDjmL+ymb8ZKN1LxK+ykPNc1IRG5LmbYVkwGMhV4BEsJiZd1Dl\noYb6fYYZR+bf3V/zEIIsicoOQpJRgWyxjkzfJJ3DwH2hUsOlZgt9rtS1buLCHPuA\nRn6MvZfHxw26b4U8JO0pBLLgodcGRozfiy6QXXrB38TQ130dRncUTA7A9HCL/ltz\n7igl5Ux3iKH48FJiaVU0ddM6UQnZiEm24KzsQY/+aQKBgQDoNpmbMVMuGcvyhdhw\nqwtp3U8WSHDjhMaGMpyZeeQjjEW7fa8xEfgFQANQuw+k8ajtgaEIS8lDpbSNKX+Q\nunahwH/hi+jkug7CCGYAopOsXqsOJjUNAYAUUVLBGCj/Zsy9JCtCQlfo4kTbO5Nr\new+POxARyH+IS65GrvV8zDR9hQKBgQDbZ6Q+Hu91VcafAF4wj5cq3rxNwlwsxZHO\nlIizKQB1C5jyKM+IBj2E9xBj7sCKUTFLztJoRzcXw46zzFOW2np4wZh4d6ACHFKV\nUKBeldvlhj0MEbT/i3Ch+IeTM700VIy0qed6P0mjRgPb7O2pHQmKmn7bPMwdiUJt\nft67zIjkTQKBgQCdzw1Q1JKMz+c0KBXRa8xT45ytrglZ9Y/c/2RBQP7z0Y5KPr3i\ngS2mk7wtDxMwqzJ8Rg4jzy02FdS/4iXZO38HqgS+SRC68F67Km33nITzLVreQMTo\n084xAJtWso6aE0iseXoUi3WmxK66tqAW+Qi39BGVpEZXX+dODfSzHSvG6QKBgCq0\nXY+Jzen0rBg+phalnDLH77bi4bYFLoAbARB+JoyhmT9pGhvLuyWqQRJyYh+J9iJf\nOyz4z4xcj+YHGy39kGD4rYilIAvJH3nYm55NZOOj4TX8LFKg/WdJTHomU+lfm5N+\njoOI1fhlpFKmJwwHjtffglmB1+jX0oola4z6HprxAoGAVHBaQjC1s/5C30uOBk1F\nNZD2HQRSHo9lpwuKvqxa4MAV+vk+kf8oxKzmjRMqdgEBArdU7+rscR/jUBcGZqvs\nby0xabVaZOigDguXmGD7J2XWbGvrr9zPXpw5fvcbsfOTDHFO0HB2Fk6UefLnSHCo\nPTQLWMYxlO7rd4ah8z/X5Dk=\n-----END PRIVATE KEY-----\n", + "client_email": "cash-453@sincere-tape-275103.iam.gserviceaccount.com", + "client_id": "103575627835058845707", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/cash-453%40sincere-tape-275103.iam.gserviceaccount.com" +} diff --git a/logs.py b/logs.py index 6105458..b6f0028 100644 --- a/logs.py +++ b/logs.py @@ -9,6 +9,9 @@ from sys import exc_info from traceback import format_exception +from dotenv import load_dotenv +load_dotenv() + # The format for local logs. LOGS_FORMAT = ("%(asctime)s " "%(name)s " @@ -18,10 +21,10 @@ "%(message)s") # The path to the log file for local logging. -LOG_FILE = "/tmp/trump2cash.log" +LOG_FILE = "./tmp/trump2cash.log" # The path to the log file for the local fallback of cloud logging. -FALLBACK_LOG_FILE = "/tmp/trump2cash-fallback.log" +FALLBACK_LOG_FILE = "./tmp/trump2cash-fallback.log" # The maximum size in bytes for each local log file. MAX_LOG_BYTES = 10 * 1024 * 1024 @@ -53,7 +56,7 @@ def __init__(self, name, to_cloud=True): def get_local_logger(self, name, log_file): """Returns a local logger with a file handler.""" - handler = RotatingFileHandler(log_file, maxBytes=MAX_LOG_BYTES) + handler = RotatingFileHandler(log_file, maxBytes=MAX_LOG_BYTES, encoding="utf-8") handler.setFormatter(Formatter(LOGS_FORMAT)) handler.setLevel(DEBUG) diff --git a/logs_trade.py b/logs_trade.py new file mode 100644 index 0000000..a17a857 --- /dev/null +++ b/logs_trade.py @@ -0,0 +1,157 @@ +from backoff import expo +from backoff import on_exception +from google.cloud import error_reporting +from google.cloud import logging +from logging import Formatter +from logging import getLogger +from logging import DEBUG +from logging.handlers import RotatingFileHandler +from sys import exc_info +from traceback import format_exception + +from dotenv import load_dotenv +load_dotenv() + +# The format for local logs. +LOGS_FORMAT = ("%(asctime)s " + "%(name)s " + "%(process)d " + "%(thread)d " + "%(levelname)s " + "%(message)s") + +# The path to the log file for local logging. +LOG_FILE = "./tmp/trades.log" + +# The path to the log file for the local fallback of cloud logging. +FALLBACK_LOG_FILE = "./tmp/trump2cash-fallback.log" + +# The maximum size in bytes for each local log file. +MAX_LOG_BYTES = 10 * 1024 * 1024 + + +class LogsTrade: + """A helper for logging locally or in the cloud.""" + + def __init__(self, name, to_cloud=True): + self.to_cloud = to_cloud + + if self.to_cloud: + # Initialize the Stackdriver logging and error reporting clients. + self.cloud_logger = logging.Client().logger(name) + self.error_client = error_reporting.Client() + + # Initialize the local fallback logger. + self.fallback_logger, fallback_handler = self.get_local_logger( + name, FALLBACK_LOG_FILE) + + # Redirect the backoff logs to the local fallback handler. + backoff_logger = getLogger("backoff") + backoff_logger.setLevel(DEBUG) + backoff_logger.handlers = [fallback_handler] + else: + # Initialize the local file logger. + self.local_logger, _ = self.get_local_logger(name, LOG_FILE) + + def get_local_logger(self, name, log_file): + """Returns a local logger with a file handler.""" + + handler = RotatingFileHandler(log_file, maxBytes=MAX_LOG_BYTES, encoding="utf-8") + handler.setFormatter(Formatter(LOGS_FORMAT)) + handler.setLevel(DEBUG) + + logger = getLogger(name) + logger.setLevel(DEBUG) + logger.handlers = [handler] + + return (logger, handler) + + def debug(self, text): + """Logs at the DEBUG level.""" + + if self.to_cloud: + self.safe_cloud_log_text(text, severity="DEBUG") + else: + self.local_logger.debug(text) + + def info(self, text): + """Logs at the INFO level.""" + + if self.to_cloud: + self.safe_cloud_log_text(text, severity="INFO") + else: + self.local_logger.info(text) + + def warn(self, text): + """Logs at the WARNING level.""" + + if self.to_cloud: + self.safe_cloud_log_text(text, severity="WARNING") + else: + self.local_logger.warning(text) + + def error(self, text): + """Logs at the ERROR level.""" + + if self.to_cloud: + self.safe_cloud_log_text(text, severity="ERROR") + else: + self.local_logger.error(text) + + def catch(self): + """Logs the latest exception.""" + + exception_str = self.format_exception() + + if self.to_cloud: + self.safe_report_exception(exception_str) + self.safe_cloud_log_text(exception_str, severity="CRITICAL") + else: + self.local_logger.critical(exception_str) + + def safe_cloud_log_text(self, text, severity): + """Logs to the cloud, retries if necessary, and eventually fails over + to local logs. + """ + + try: + self.retry_cloud_log_text(text, severity) + except Exception: + exception_str = self.format_exception() + self.fallback_logger.error("Failed to log to cloud: %s %s\n%s" % + (severity, text, exception_str)) + + @on_exception(expo, Exception, max_tries=8) + def retry_cloud_log_text(self, text, severity): + """Logs to the cloud and retries up to 10 times with exponential + backoff (51.2 seconds max total) if the upload fails. + """ + + self.cloud_logger.log_text(text, severity=severity) + + def safe_report_exception(self, exception_str): + """Reports the exception, retries if necessary, and eventually fails + over to local logs. + """ + + try: + self.retry_report_exception(exception_str) + except Exception: + meta_exception_str = self.format_exception() + self.fallback_logger.error("Failed to report exception: %s\n%s" % + (exception_str, meta_exception_str)) + + @on_exception(expo, Exception, max_tries=8) + def retry_report_exception(self, exception_str): + """Reports the exception and retries up to 10 times with exponential + backoff (51.2 seconds max total) if the upload fails. + """ + + self.error_client.report(exception_str) + + def format_exception(self): + """Grabs the latest exception and formats it.""" + + exc_type, exc_value, exc_traceback = exc_info() + exc_format = format_exception(exc_type, exc_value, exc_traceback) + return "".join(exc_format).strip() diff --git a/main.dave.py b/main.dave.py new file mode 100644 index 0000000..197430d --- /dev/null +++ b/main.dave.py @@ -0,0 +1,201 @@ +from datetime import datetime +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer +from threading import Event +from threading import Thread +from time import sleep +import ally + +from analysis import Analysis +from logs import Logs +from trading import Trading +from dave.twitter import Twitter + + +# Whether to send all logs to the cloud instead of a local file. +LOGS_TO_CLOUD = False + +# The duration of the smallest backoff step in seconds. +BACKOFF_STEP_S = 0.1 + +# The maximum number of retry steps, equivalent to 0.1 * (2^12 - 1) = 409.5 +# seconds of total delay. This is the largest interval that one backoff +# sequence may take. +MAX_TRIES = 12 + +# The time in seconds after which to reset a backoff sequence. This is the +# smallest interval at which backoff sequences may repeat normally. +BACKOFF_RESET_S = 30 * 60 + +# The host for the monitor Web server. +MONITOR_HOST = "0.0.0.0" + +# The port for the monitor Web server. +MONITOR_PORT = 80 + +# The message returned by the monitor Web server. +MONITOR_MESSAGE = "OK" + + +class Monitor: + """A monitor exposing a Web server while the main loop is running.""" + + def __init__(self): + """Creates a Web server on a background thread.""" + + self.server = HTTPServer((MONITOR_HOST, MONITOR_PORT), + self.MonitorHandler) + self.thread = Thread(target=self.server.serve_forever) + self.thread.daemon = True + + def start(self): + """Starts the Web server background thread.""" + + self.thread.start() + + def stop(self): + """Stops the Web server and background thread.""" + + self.server.shutdown() + self.server.server_close() + + class MonitorHandler(BaseHTTPRequestHandler): + """An HTTP request handler that responds with "OK" while running.""" + + def _set_headers(self): + self.send_response(200) + self.send_header("Content-type", "text/plain") + self.end_headers() + + def do_GET(self): + self._set_headers() + self.wfile.write(MONITOR_MESSAGE.encode("utf-8")) + + def do_HEAD(self): + self._set_headers() + + +class Main: + """A wrapper for the main application logic and retry loop.""" + + def __init__(self): + + print('starting ally') + a = ally.Ally('pyallykeys.json') + quotes = a.quote(symbols=['spy', 'gLD', 'F', 'Ibm'], fields=['bid', 'ask', 'last']) + print(quotes.loc['SPY']) + + print('starting streaming') + quotes = a.stream(symbols=['spy', 'gLD', 'F', 'Ibm']) + for quote in quotes: + print(quote) + + + + self.logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD) + self.twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD, twitter_user_id= "25073877") # "18196832") + + # trading = Trading(logs_to_cloud=LOGS_TO_CLOUD) +# trading.make_trades([ + +# {"name": 'Elastic', +# "ticker": 'ESTC', +# "exchange": 'NYSE', +# 'sentiment': 1} + +# ]) + + def twitter_callback(self, tweet): + """Analyzes tweets, trades stocks, and tweets about it.""" + + # Initialize the Analysis, Logs, Trading, and Twitter instances inside + # the callback to create separate httplib2 instances per thread. + analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD) + logs = Logs(name="main-callback", to_cloud=LOGS_TO_CLOUD) + + print('tweet') + print(tweet['text']) + + symbols = [] + for t in tweet.split(): + if t[0] == '$': + symbols.append(t[1:]) + # Splits at space + print(symbols) + logs.info("Using symbols: %s" % symbols) + + + # Analyze the tweet. + # companies = analysis.find_companies(tweet) + logs.info("Using symbols: %s" % symbols) + # if not companies: + # return + + # Trade stocks. + trading = Trading(logs_to_cloud=LOGS_TO_CLOUD) + trading.make_trades(companies) + + def run_session(self): + """Runs a single streaming session. Logs and cleans up after + exceptions. + """ + + # self.logs.info("Starting new session.") + # try: + # self.twitter.start_streaming(self.twitter_callback) + # except: + # self.logs.catch() + # finally: + # self.twitter.stop_streaming() + # self.logs.info("Ending session.") + + def backoff(self, tries): + """Sleeps an exponential number of seconds based on the number of + tries. + """ + + delay = BACKOFF_STEP_S * pow(2, tries) + self.logs.warn("Waiting for %.1f seconds." % delay) + sleep(delay) + + def run(self): + """Runs the main retry loop with exponential backoff.""" + + tries = 0 + while True: + + # The session blocks until an error occurs. + self.run_session() + + # Remember the first time a backoff sequence starts. + now = datetime.now() + if tries == 0: + self.logs.debug("Starting first backoff sequence.") + backoff_start = now + + # Reset the backoff sequence if the last error was long ago. + if (now - backoff_start).total_seconds() > BACKOFF_RESET_S: + self.logs.debug("Starting new backoff sequence.") + tries = 0 + backoff_start = now + + # Give up after the maximum number of tries. + if tries >= MAX_TRIES: + self.logs.warn("Exceeded maximum retry count.") + break + + # Wait according to the progression of the backoff sequence. + self.backoff(tries) + + # Increment the number of tries for the next error. + tries += 1 + + +if __name__ == "__main__": + # monitor = Monitor() + # monitor.start() + try: + Main().run() + finally: + # monitor.stop() + ... diff --git a/main.py b/main.py index d64e772..e1f3b43 100644 --- a/main.py +++ b/main.py @@ -11,7 +11,7 @@ from twitter import Twitter # Whether to send all logs to the cloud instead of a local file. -LOGS_TO_CLOUD = True +LOGS_TO_CLOUD = False # The duration of the smallest backoff step in seconds. BACKOFF_STEP_S = 0.1 diff --git a/pyallykeys.json b/pyallykeys.json new file mode 100644 index 0000000..cb7f80d --- /dev/null +++ b/pyallykeys.json @@ -0,0 +1,7 @@ +{ + "ALLY_CONSUMER_SECRET":"odDwwPJxPMYqKpdsF0MO7RUpYknpoi29R4lZzTM19as4", + "ALLY_CONSUMER_KEY":"qtXvsI07sX6nSm6GxkAv6M3CigvPEIESfvgwZJqb5Co0", + "ALLY_OAUTH_SECRET":"dvBTVom7gN3BrQxGg82lqtmz03P2gVHqpAZw6Vt0yaI6", + "ALLY_OAUTH_TOKEN":"PLJPjD8888l854J6TVL8Q6lbox87Dpk8CqHgY8ucbrk5", + "ALLY_ACCOUNT_NBR":"64356007" +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 78dfd27..8e157bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,3 +11,4 @@ pytz==2020.1 requests==2.23.0 tqdm==4.46.0 tweepy==3.8.0 +python-dotenv=0.13.0 diff --git a/technical.py b/technical.py new file mode 100644 index 0000000..ed75216 --- /dev/null +++ b/technical.py @@ -0,0 +1,7 @@ +from alpha_vantage.techindicators import TechIndicators + +ti = TechIndicators(key='8HY3ZTQRP0WR4QF1') +data, meta_data = ti.get_rsi(symbol='MSFT', interval='1min', time_period=20, series_type='close') + +print(data) +print(meta_data) \ No newline at end of file diff --git a/trading.py b/trading.py index 7b72cb4..ebbe3f7 100644 --- a/trading.py +++ b/trading.py @@ -16,6 +16,9 @@ from logs import Logs +from dotenv import load_dotenv +load_dotenv() + # Read the authentication keys for TradeKing from environment variables. TRADEKING_CONSUMER_KEY = getenv("TRADEKING_CONSUMER_KEY") TRADEKING_CONSUMER_SECRET = getenv("TRADEKING_CONSUMER_SECRET") @@ -41,7 +44,7 @@ FIXML_HEADERS = {"Content-Type": "text/xml"} # The amount of cash in dollars to hold from being spent. -CASH_HOLD = 1000 +CASH_HOLD = 800 # The fraction of the stock price at which to set order limits. LIMIT_FRACTION = 0.1 @@ -88,7 +91,7 @@ def make_trades(self, companies): # Calculate the budget per strategy. balance = self.get_balance() budget = self.get_budget(balance, len(actionable_strategies)) - + if not budget: self.logs.warn("No budget for trading: %s %s %s" % (budget, balance, actionable_strategies)) @@ -139,16 +142,16 @@ def get_strategy(self, company, market_status): # TODO: Figure out some strategy for the markets closed case. # Don't trade unless the markets are open or are about to open. - if market_status != "open" and market_status != "pre": + if market_status != "open" and market_status != "pre" and market_status != "after": strategy["action"] = "hold" strategy["reason"] = "market closed" return strategy # Can't trade without sentiment. - if sentiment == 0: - strategy["action"] = "hold" - strategy["reason"] = "neutral sentiment" - return strategy + # if sentiment == 0: + # strategy["action"] = "hold" + # strategy["reason"] = "neutral sentiment" + # return strategy # Determine bull or bear based on sentiment direction. if sentiment > 0: @@ -356,7 +359,8 @@ def make_request(self, url, method="GET", body="", headers=None): response, content = client.request(url, method=method, body=body_bytes, headers=headers) - self.logs.debug("TradeKing response: %s %s" % (response, content)) + self.logs.debug("TradeKing response: %s" % (response)) + self.logs.debug("TradeKing content: %s" % (content)) try: return loads(content) @@ -470,12 +474,17 @@ def get_balance(self): try: balances = response["response"] money = balances["accountbalance"]["money"] - cash_str = money["cash"] + cash_str = money["cashavailable"] uncleareddeposits_str = money["uncleareddeposits"] except KeyError: self.logs.error("Malformed balances response: %s" % response) return 0 + # self.logs.error("balances: %s" % balances) + # self.logs.error("cash_str: %s" % cash_str) + # self.logs.error("uncleareddeposits_str: %s" % uncleareddeposits_str) + + try: cash = float(cash_str) uncleareddeposits = float(uncleareddeposits_str) diff --git a/twitter.py b/twitter.py index 0d389b3..a031c6b 100644 --- a/twitter.py +++ b/twitter.py @@ -13,6 +13,9 @@ from logs import Logs +from dotenv import load_dotenv +load_dotenv() + # The keys for the Twitter account we're using for API requests and tweeting # alerts (@Trump2Cash). Read from environment variables. TWITTER_ACCESS_TOKEN = getenv("TWITTER_ACCESS_TOKEN") @@ -332,7 +335,8 @@ def get_error_status(self): def on_data(self, data): """Puts a task to process the new data on the queue.""" - + print(data) + # Stop streaming if requested. if self.stop_event.is_set(): return False diff --git a/twitter_tests.py b/twitter_tests.py index e17bdaa..b7322c6 100644 --- a/twitter_tests.py +++ b/twitter_tests.py @@ -35,98 +35,98 @@ def test_streaming(twitter): twitter.start_streaming(callback) -def test_make_tweet_text(twitter): - assert twitter.make_tweet_text([{ - "name": "Boeing", - "sentiment": -0.1, - "ticker": "BA"}], - "https://twitter.com/realDonaldTrump/status/806134244384899072") == ( - "Boeing \U0001f44e $BA\n" - "https://twitter.com/realDonaldTrump/status/806134244384899072") - assert twitter.make_tweet_text([{ - "name": "Ford", - "sentiment": 0.3, - "ticker": "F"}, { - "name": "Fiat", - "root": "Fiat Chrysler Automobiles", - "sentiment": 0.3, - "ticker": "FCAU"}], - "https://twitter.com/realDonaldTrump/status/818461467766824961") == ( - "Ford \U0001f44d $F\n" - "Fiat \U0001f44d $FCAU\n" - "https://twitter.com/realDonaldTrump/status/818461467766824961") - assert twitter.make_tweet_text([{ - "name": "Lockheed Martin", - "sentiment": -0.1, - "ticker": "LMT"}, { - "name": "Boeing", - "sentiment": 0.1, - "ticker": "BA"}], - "https://twitter.com/realDonaldTrump/status/812061677160202240") == ( - "Lockheed Martin \U0001f44e $LMT\n" - "Boeing \U0001f44d $BA\n" - "https://twitter.com/realDonaldTrump/status/812061677160202240") - assert twitter.make_tweet_text([{ - "name": "General Motors", - "sentiment": 0, - "ticker": "GM"}], - "https://twitter.com/realDonaldTrump/status/821697182235496450") == ( - "General Motors ¯\\_(\u30c4)_/¯ $GM\n" - "https://twitter.com/realDonaldTrump/status/821697182235496450") - assert twitter.make_tweet_text([{ - "ticker": "XOM", - "name": "ExxonMobil", - "sentiment": 0.5, - "exchange": "New York Stock Exchange"}, { - "root": "BlackRock", - "ticker": "BLK", - "name": "ExxonMobil", - "sentiment": 0.5, - "exchange": "New York Stock Exchange"}, { - "root": "PNC Financial Services", - "ticker": "PNC", - "name": "ExxonMobil", - "sentiment": 0.5, - "exchange": "New York Stock Exchange"}, { - "root": "State Street Corporation", - "ticker": "STT", - "name": "ExxonMobil", - "sentiment": 0.5, - "exchange": "New York Stock Exchange"}], - "https://twitter.com/realDonaldTrump/status/838862131852369922") == ( - "ExxonMobil \U0001f44d $XOM $BLK $PNC $STT\n" - "https://twitter.com/realDonaldTrump/status/838862131852369922") - assert twitter.make_tweet_text([{ - "ticker": "GM", - "name": "General Motors", - "sentiment": 0.4, - "exchange": "New York Stock Exchange"}, { - "ticker": "WMT", - "name": "Walmart", - "sentiment": 0.4, - "exchange": "New York Stock Exchange"}, { - "root": "State Street Corporation", - "ticker": "STT", - "name": "Walmart", - "sentiment": 0.4, - "exchange": "New York Stock Exchange"}], - "https://twitter.com/realDonaldTrump/status/821415698278875137") == ( - "General Motors \U0001f44d $GM\n" - "Walmart \U0001f44d $WMT $STT\n" - "https://twitter.com/realDonaldTrump/status/821415698278875137") - assert twitter.make_tweet_text([{ - "ticker": chr(i - 32), - "name": chr(i), - "sentiment": 0} for i in range(97, 123)], - "https://twitter.com/realDonaldTrump/status/0") == ( - "a ¯\\_(\u30c4)_/¯ $A\n" - "b ¯\\_(\u30c4)_/¯ $B\n" - "c ¯\\_(\u30c4)_/¯ $C\n" - "d ¯\\_(\u30c4)_/¯ $D\n" - "e ¯\\_(\u30c4)_/¯ $E\n" - "f ¯\\_(\u30c4)_/¯ $F\n" - "g ¯\\\u2026\n" - "https://twitter.com/realDonaldTrump/status/0") +# def test_make_tweet_text(twitter): +# assert twitter.make_tweet_text([{ +# "name": "Boeing", +# "sentiment": -0.1, +# "ticker": "BA"}], +# "https://twitter.com/realDonaldTrump/status/806134244384899072") == ( +# "Boeing \U0001f44e $BA\n" +# "https://twitter.com/realDonaldTrump/status/806134244384899072") +# assert twitter.make_tweet_text([{ +# "name": "Ford", +# "sentiment": 0.3, +# "ticker": "F"}, { +# "name": "Fiat", +# "root": "Fiat Chrysler Automobiles", +# "sentiment": 0.3, +# "ticker": "FCAU"}], +# "https://twitter.com/realDonaldTrump/status/818461467766824961") == ( +# "Ford \U0001f44d $F\n" +# "Fiat \U0001f44d $FCAU\n" +# "https://twitter.com/realDonaldTrump/status/818461467766824961") +# assert twitter.make_tweet_text([{ +# "name": "Lockheed Martin", +# "sentiment": -0.1, +# "ticker": "LMT"}, { +# "name": "Boeing", +# "sentiment": 0.1, +# "ticker": "BA"}], +# "https://twitter.com/realDonaldTrump/status/812061677160202240") == ( +# "Lockheed Martin \U0001f44e $LMT\n" +# "Boeing \U0001f44d $BA\n" +# "https://twitter.com/realDonaldTrump/status/812061677160202240") +# assert twitter.make_tweet_text([{ +# "name": "General Motors", +# "sentiment": 0, +# "ticker": "GM"}], +# "https://twitter.com/realDonaldTrump/status/821697182235496450") == ( +# "General Motors ¯\\_(\u30c4)_/¯ $GM\n" +# "https://twitter.com/realDonaldTrump/status/821697182235496450") +# assert twitter.make_tweet_text([{ +# "ticker": "XOM", +# "name": "ExxonMobil", +# "sentiment": 0.5, +# "exchange": "New York Stock Exchange"}, { +# "root": "BlackRock", +# "ticker": "BLK", +# "name": "ExxonMobil", +# "sentiment": 0.5, +# "exchange": "New York Stock Exchange"}, { +# "root": "PNC Financial Services", +# "ticker": "PNC", +# "name": "ExxonMobil", +# "sentiment": 0.5, +# "exchange": "New York Stock Exchange"}, { +# "root": "State Street Corporation", +# "ticker": "STT", +# "name": "ExxonMobil", +# "sentiment": 0.5, +# "exchange": "New York Stock Exchange"}], +# "https://twitter.com/realDonaldTrump/status/838862131852369922") == ( +# "ExxonMobil \U0001f44d $XOM $BLK $PNC $STT\n" +# "https://twitter.com/realDonaldTrump/status/838862131852369922") +# assert twitter.make_tweet_text([{ +# "ticker": "GM", +# "name": "General Motors", +# "sentiment": 0.4, +# "exchange": "New York Stock Exchange"}, { +# "ticker": "WMT", +# "name": "Walmart", +# "sentiment": 0.4, +# "exchange": "New York Stock Exchange"}, { +# "root": "State Street Corporation", +# "ticker": "STT", +# "name": "Walmart", +# "sentiment": 0.4, +# "exchange": "New York Stock Exchange"}], +# "https://twitter.com/realDonaldTrump/status/821415698278875137") == ( +# "General Motors \U0001f44d $GM\n" +# "Walmart \U0001f44d $WMT $STT\n" +# "https://twitter.com/realDonaldTrump/status/821415698278875137") +# assert twitter.make_tweet_text([{ +# "ticker": chr(i - 32), +# "name": chr(i), +# "sentiment": 0} for i in range(97, 123)], +# "https://twitter.com/realDonaldTrump/status/0") == ( +# "a ¯\\_(\u30c4)_/¯ $A\n" +# "b ¯\\_(\u30c4)_/¯ $B\n" +# "c ¯\\_(\u30c4)_/¯ $C\n" +# "d ¯\\_(\u30c4)_/¯ $D\n" +# "e ¯\\_(\u30c4)_/¯ $E\n" +# "f ¯\\_(\u30c4)_/¯ $F\n" +# "g ¯\\\u2026\n" +# "https://twitter.com/realDonaldTrump/status/0") def test_get_sentiment_emoji(twitter):