Create Account
Log In
Dark
chart
exchange
Premium
Terminal
Screener
Stocks
Crypto
Forex
Trends
Depth
Close
Check out our Level2View

NEWR
New Relic, Inc.
stock NYSE

Inactive
Nov 7, 2023
86.99USD+0.023%(+0.02)4,222,853
Pre-market
0.00USD-100.000%(-86.97)0
After-hours
0.00USD0.000%(0.00)0
OverviewHistoricalExchange VolumeDark Pool LevelsDark Pool PrintsExchangesShort VolumeShort Interest - DailyShort InterestBorrow Fee (CTB)Failure to Deliver (FTD)ShortsTrendsNewsTrends
NEWR Reddit Mentions
Subreddits
Limit Labels     

We have sentiment values and mention counts going back to 2017. The complete data set is available via the API.
Take me to the API
NEWR Specific Mentions
As of Oct 22, 2025 9:16:16 AM EDT (10 minutes ago)
Includes all comments and posts. Mentions per user per ticker capped at one per hour.
272 days ago • u/Ok_Thing7750 • r/algotrading • can_anybody_give_me_some_recommendations_with_my • Data • B
I have recently made the decision that I wanted to pour countless hours into attempting to make simple predictions using AI into the stock market. I'll paste my Python code below, and as of right now everything outputs. I would appreciate some recommendations for what else I could scrape to make the results more accurate, and a better AI to use. Thanks all!
import subprocess
import time
from bs4 import BeautifulSoup
import requests
import yfinance as yf
import ollama
import textwrap

print("innit done")

# Global variables
FindPriceRunning = True
PERunning = True
MKTCapRunning = True
StockPrice = " "
StockPrices = "Jack is cool"
FindPriceRT = 1
StockPE = " "
StockPEs = "Jack is really cool"
PERT = 1
MKTCap = " "
MKTCaps = "Jack is super super cool"
MKTCapRT = 1
Stock_Histories = 'Jack is cool, and the dataset begins here'
def find_price(stock):
global StockPrices
global StockPrice
global FindPriceRT
print(PERT)
print(f"Finding attributes for {stock}...")
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')

# Find stock price
price_element = soup.find('span', {'data-testid': 'qsp-price'})
if price_element:
stock_price = price_element.text
StockPrice = f"{stock} Stock Price: ${stock_price}"
print(StockPrice)
StockPrices += f", and {StockPrice}"
FindPriceRT += 1
except (requests.exceptions.RequestException, Exception) as e:
pass
def find_PE(stock):
global StockPEs
global StockPE
global PERT
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')
pe_element = soup.find('fin-streamer', {'data-field': 'trailingPE'})
if pe_element:
stock_pe = pe_element.text
StockPE = (f"{stock} P/E Ratio: {stock_pe}")
print(f"{stock} P/E Ratio: {stock_pe}")
StockPEs = (f"{StockPEs}, and {StockPE}")
PERT += 1
except (requests.exceptions.RequestException, Exception):
pass
def find_MKTCap(stock):
global MKTCaps
global MKTCap
global MKTCapRT
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')

# Find Market Cap using the correct tag
MKTCap_element = soup.find('fin-streamer', {'data-field': 'marketCap'})
if MKTCap_element:
MKTCap = MKTCap_element.text # Corrected this line
MKTCap = f"{stock} Market Cap: {MKTCap}"
print(MKTCap)
MKTCaps = f"{MKTCaps}, and {MKTCap}"
MKTCapRT += 1
except (requests.exceptions.RequestException, Exception):
pass
def find_PHistory(stock, period="30d"):
global Stock_Histories
# Download historical data for the given stock symbol
data = yf.download(stock, period=period, interval="1d")

# Initialize the stock_history variabler
stock_history = ""
# Loop through each row to format the output and build the history string
for date, row in data.iterrows():
# Ensure the price is a float
close_price = float(row['Close'].iloc[0])
stock_history += f"{date.date()} {close_price:.6f}, "
stock_history = stock_history.rstrip(", ")
print('Stock history recorded over the past 1k days ')
Stock_Histories += (f"{stock}: {stock_history}")
print('Stock histories updated')

return stock_history


# List of S&P 500 stocks
SandP500 = ["FHI", "MMM", "AOS", "ABT", "ABBV", "ACN", "ADBE", "AMD", "AES", "AFL", "A", "APD", "ABNB", "AKAM", "ALB",
"ARE", "ALGN", "ALLE", "LNT", "ALL", "GOOGL", "GOOG", "MO", "AMZN", "AMCR", "AEE", "AEP", "AXP", "AIG",
"AMT", "AWK", "AMP", "AME", "AMGN", "APH", "ADI", "ANSS", "AON", "APA", "APO", "AAPL", "AMAT", "APTV",
"ACGL", "ADM", "ANET", "AJG", "AIZ", "T", "ATO", "ADSK", "ADP", "AZO", "AVB", "AVY", "AXON", "BKR", "BALL",
"BAC", "BAX", "BDX", "BRK.B", "BBY", "TECH", "BIIB", "BLK", "BX", "BK", "BA", "BKNG", "BWA", "BSX", "BMY",
"AVGO", "BR", "BRO", "BF.B", "BLDR", "BG", "BXP", "CHRW", "CDNS", "CZR", "CPT", "CPB", "COF", "CAH", "KMX",
"CCL", "CARR", "CAT", "CBOE", "CBRE", "CDW", "CE", "COR", "CNC", "CNP", "CF", "CRL", "SCHW", "CHTR", "CVX",
"CMG", "CB", "CHD", "CI", "CINF", "CTAS", "CSCO", "C", "CFG", "CLX", "CME", "CMS", "KO", "CTSH", "CL",
"CMCSA", "CAG", "COP", "ED", "STZ", "CEG", "COO", "CPRT", "GLW", "CPAY", "CTVA", "CSGP", "COST", "CTRA",
"CRWD", "CCI", "CSX", "CMI", "CVS", "DHR", "DRI", "DVA", "DAY", "DECK", "DE", "DELL", "DAL", "DVN", "DXCM",
"FANG", "DLR", "DFS", "DG", "DLTR", "D", "DPZ", "DOV", "DOW", "DHI", "DTE", "DUK", "DD", "EMN", "ETN",
"EBAY", "ECL", "EIX", "EW", "EA", "ELV", "EMR", "ENPH", "ETR", "EOG", "EPAM", "EQT", "EFX", "EQIX", "EQR",
"ERIE", "ESS", "EL", "EG", "EVRG", "ES", "EXC", "EXPE", "EXPD", "EXR", "XOM", "FFIV", "FDS", "FICO", "FAST",
"FRT", "FDX", "FIS", "FITB", "FSLR", "FE", "FI", "FMC", "F", "FTNT", "FTV", "FOXA", "FOX", "BEN", "FCX",
"GRMN", "IT", "GE", "GEHC", "GEV", "GEN", "GNRC", "GD", "GIS", "GM", "GPC", "GILD", "GPN", "GL", "GDDY",
"GS", "HAL", "HIG", "HAS", "HCA", "DOC", "HSIC", "HSY", "HES", "HPE", "HLT", "HOLX", "HD", "HON", "HRL",
"HST", "HWM", "HPQ", "HUBB", "HUM", "HBAN", "HII", "IBM", "IEX", "IDXX", "ITW", "INCY", "IR", "PODD",
"INTC", "ICE", "IFF", "IP", "IPG", "INTU", "ISRG", "IVZ", "INVH", "IQV", "IRM", "JBHT", "JBL", "JKHY", "J",
"JNJ", "JCI", "JPM", "JNPR", "K", "KVUE", "KDP", "KEY", "KEYS", "KMB", "KIM", "KMI", "KKR", "KLAC", "KHC",
"KR", "LHX", "LH", "LRCX", "LW", "LVS", "LDOS", "LEN", "LII", "LLY", "LIN", "LYV", "LKQ", "LMT", "L", "LOW",
"LULU", "LYB", "MTB", "MPC", "MKTX", "MAR", "MMC", "MLM", "MAS", "MA", "MTCH", "MKC", "MCD", "MS", "MSFT",
"MAA", "MCO", "MCK", "MDT", "MRK", "MMC", "MOS", "NAVI", "NEE", "NKE", "NOC", "NLSN", "NTAP", "NVR", "NFLX",
"NVDA", "NEWR", "ORLY", "O", "ODFL", "OMC", "ON", "PGR", "PEP", "PFE", "PM", "PNC", "PSA", "PEG", "PYPL",
"PNR", "PPL", "PXD", "QRTEA", "QCOM", "REGN", "ROST", "RMD", "RSG", "RTX", "OZK", "SPGI", "SBUX", "SLB",
"SNA", "STT", "SYK", "SPG", "SWKS", "SIVB", "TMO", "TSLA", "TXN", "TGT", "TRV", "UNP", "UNH", "UPS", "USB",
"VZ", "V", "VRSK", "WMT", "WBA", "WM", "WELL", "WBD", "WDC", "WEC", "WRB", "WU", "XEL", "XLNX", "XYL",
"YUM"]


def format_full_prompt(stock_data):
full_prompt = f"""
Based on the following stock data, predict **which stock is most likely to increase the most relative to its current value in 90 days**:
1 **Stock Prices** (Current Price of each stock in the S&P 500):
{StockPrices}
2️ **P/E Ratios** (Valuation of each stock in the S&P 500):
{StockPEs}
3️ **Market Caps** (Total value of each stock in the S&P 500):
{MKTCaps}
4️ **Stock Price History (Last 1000 days)**:
{Stock_Histories}
**Using this data, analyze trends and return the top 5 stocks that are most likely to increase in price over the next 90 days.**
Rank them by expected percentage growth and explain why they were chosen.
"""
return full_prompt


def get_prediction(full_prompt):
response = ollama.chat(model="llama2", messages=[{"role": "user", "content": full_prompt}])
print(response)
return response.get('text', 'No text key in response')


def main():
start_time = time.time()
for stock in SandP500:
find_price(stock)
find_PE(stock)
find_MKTCap(stock)
find_PHistory(stock, "90d")

# Format the prompt and get AI prediction
full_prompt = format_full_prompt(StockPrices)
prediction = get_prediction(full_prompt)

print("Prediction:", prediction)
end_time = time.time()
elapsed_time = end_time - start_time
print(f"Time elapsed to complete : {elapsed_time:.2f} seconds")



# Run the script
if __name__ == "__main__":
main()

sentiment 0.99
272 days ago • u/Ok_Thing7750 • r/algotrading • can_anybody_give_me_some_recommendations_with_my • Data • B
I have recently made the decision that I wanted to pour countless hours into attempting to make simple predictions using AI into the stock market. I'll paste my Python code below, and as of right now everything outputs. I would appreciate some recommendations for what else I could scrape to make the results more accurate, and a better AI to use. Thanks all!
import subprocess
import time
from bs4 import BeautifulSoup
import requests
import yfinance as yf
import ollama
import textwrap

print("innit done")

# Global variables
FindPriceRunning = True
PERunning = True
MKTCapRunning = True
StockPrice = " "
StockPrices = "Jack is cool"
FindPriceRT = 1
StockPE = " "
StockPEs = "Jack is really cool"
PERT = 1
MKTCap = " "
MKTCaps = "Jack is super super cool"
MKTCapRT = 1
Stock_Histories = 'Jack is cool, and the dataset begins here'
def find_price(stock):
global StockPrices
global StockPrice
global FindPriceRT
print(PERT)
print(f"Finding attributes for {stock}...")
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')

# Find stock price
price_element = soup.find('span', {'data-testid': 'qsp-price'})
if price_element:
stock_price = price_element.text
StockPrice = f"{stock} Stock Price: ${stock_price}"
print(StockPrice)
StockPrices += f", and {StockPrice}"
FindPriceRT += 1
except (requests.exceptions.RequestException, Exception) as e:
pass
def find_PE(stock):
global StockPEs
global StockPE
global PERT
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')
pe_element = soup.find('fin-streamer', {'data-field': 'trailingPE'})
if pe_element:
stock_pe = pe_element.text
StockPE = (f"{stock} P/E Ratio: {stock_pe}")
print(f"{stock} P/E Ratio: {stock_pe}")
StockPEs = (f"{StockPEs}, and {StockPE}")
PERT += 1
except (requests.exceptions.RequestException, Exception):
pass
def find_MKTCap(stock):
global MKTCaps
global MKTCap
global MKTCapRT
try:
source = requests.get(f"https://finance.yahoo.com/quote/{stock}/")
source.raise_for_status()
soup = BeautifulSoup(source.text, 'html.parser')

# Find Market Cap using the correct tag
MKTCap_element = soup.find('fin-streamer', {'data-field': 'marketCap'})
if MKTCap_element:
MKTCap = MKTCap_element.text # Corrected this line
MKTCap = f"{stock} Market Cap: {MKTCap}"
print(MKTCap)
MKTCaps = f"{MKTCaps}, and {MKTCap}"
MKTCapRT += 1
except (requests.exceptions.RequestException, Exception):
pass
def find_PHistory(stock, period="30d"):
global Stock_Histories
# Download historical data for the given stock symbol
data = yf.download(stock, period=period, interval="1d")

# Initialize the stock_history variabler
stock_history = ""
# Loop through each row to format the output and build the history string
for date, row in data.iterrows():
# Ensure the price is a float
close_price = float(row['Close'].iloc[0])
stock_history += f"{date.date()} {close_price:.6f}, "
stock_history = stock_history.rstrip(", ")
print('Stock history recorded over the past 1k days ')
Stock_Histories += (f"{stock}: {stock_history}")
print('Stock histories updated')

return stock_history


# List of S&P 500 stocks
SandP500 = ["FHI", "MMM", "AOS", "ABT", "ABBV", "ACN", "ADBE", "AMD", "AES", "AFL", "A", "APD", "ABNB", "AKAM", "ALB",
"ARE", "ALGN", "ALLE", "LNT", "ALL", "GOOGL", "GOOG", "MO", "AMZN", "AMCR", "AEE", "AEP", "AXP", "AIG",
"AMT", "AWK", "AMP", "AME", "AMGN", "APH", "ADI", "ANSS", "AON", "APA", "APO", "AAPL", "AMAT", "APTV",
"ACGL", "ADM", "ANET", "AJG", "AIZ", "T", "ATO", "ADSK", "ADP", "AZO", "AVB", "AVY", "AXON", "BKR", "BALL",
"BAC", "BAX", "BDX", "BRK.B", "BBY", "TECH", "BIIB", "BLK", "BX", "BK", "BA", "BKNG", "BWA", "BSX", "BMY",
"AVGO", "BR", "BRO", "BF.B", "BLDR", "BG", "BXP", "CHRW", "CDNS", "CZR", "CPT", "CPB", "COF", "CAH", "KMX",
"CCL", "CARR", "CAT", "CBOE", "CBRE", "CDW", "CE", "COR", "CNC", "CNP", "CF", "CRL", "SCHW", "CHTR", "CVX",
"CMG", "CB", "CHD", "CI", "CINF", "CTAS", "CSCO", "C", "CFG", "CLX", "CME", "CMS", "KO", "CTSH", "CL",
"CMCSA", "CAG", "COP", "ED", "STZ", "CEG", "COO", "CPRT", "GLW", "CPAY", "CTVA", "CSGP", "COST", "CTRA",
"CRWD", "CCI", "CSX", "CMI", "CVS", "DHR", "DRI", "DVA", "DAY", "DECK", "DE", "DELL", "DAL", "DVN", "DXCM",
"FANG", "DLR", "DFS", "DG", "DLTR", "D", "DPZ", "DOV", "DOW", "DHI", "DTE", "DUK", "DD", "EMN", "ETN",
"EBAY", "ECL", "EIX", "EW", "EA", "ELV", "EMR", "ENPH", "ETR", "EOG", "EPAM", "EQT", "EFX", "EQIX", "EQR",
"ERIE", "ESS", "EL", "EG", "EVRG", "ES", "EXC", "EXPE", "EXPD", "EXR", "XOM", "FFIV", "FDS", "FICO", "FAST",
"FRT", "FDX", "FIS", "FITB", "FSLR", "FE", "FI", "FMC", "F", "FTNT", "FTV", "FOXA", "FOX", "BEN", "FCX",
"GRMN", "IT", "GE", "GEHC", "GEV", "GEN", "GNRC", "GD", "GIS", "GM", "GPC", "GILD", "GPN", "GL", "GDDY",
"GS", "HAL", "HIG", "HAS", "HCA", "DOC", "HSIC", "HSY", "HES", "HPE", "HLT", "HOLX", "HD", "HON", "HRL",
"HST", "HWM", "HPQ", "HUBB", "HUM", "HBAN", "HII", "IBM", "IEX", "IDXX", "ITW", "INCY", "IR", "PODD",
"INTC", "ICE", "IFF", "IP", "IPG", "INTU", "ISRG", "IVZ", "INVH", "IQV", "IRM", "JBHT", "JBL", "JKHY", "J",
"JNJ", "JCI", "JPM", "JNPR", "K", "KVUE", "KDP", "KEY", "KEYS", "KMB", "KIM", "KMI", "KKR", "KLAC", "KHC",
"KR", "LHX", "LH", "LRCX", "LW", "LVS", "LDOS", "LEN", "LII", "LLY", "LIN", "LYV", "LKQ", "LMT", "L", "LOW",
"LULU", "LYB", "MTB", "MPC", "MKTX", "MAR", "MMC", "MLM", "MAS", "MA", "MTCH", "MKC", "MCD", "MS", "MSFT",
"MAA", "MCO", "MCK", "MDT", "MRK", "MMC", "MOS", "NAVI", "NEE", "NKE", "NOC", "NLSN", "NTAP", "NVR", "NFLX",
"NVDA", "NEWR", "ORLY", "O", "ODFL", "OMC", "ON", "PGR", "PEP", "PFE", "PM", "PNC", "PSA", "PEG", "PYPL",
"PNR", "PPL", "PXD", "QRTEA", "QCOM", "REGN", "ROST", "RMD", "RSG", "RTX", "OZK", "SPGI", "SBUX", "SLB",
"SNA", "STT", "SYK", "SPG", "SWKS", "SIVB", "TMO", "TSLA", "TXN", "TGT", "TRV", "UNP", "UNH", "UPS", "USB",
"VZ", "V", "VRSK", "WMT", "WBA", "WM", "WELL", "WBD", "WDC", "WEC", "WRB", "WU", "XEL", "XLNX", "XYL",
"YUM"]


def format_full_prompt(stock_data):
full_prompt = f"""
Based on the following stock data, predict **which stock is most likely to increase the most relative to its current value in 90 days**:
1 **Stock Prices** (Current Price of each stock in the S&P 500):
{StockPrices}
2️ **P/E Ratios** (Valuation of each stock in the S&P 500):
{StockPEs}
3️ **Market Caps** (Total value of each stock in the S&P 500):
{MKTCaps}
4️ **Stock Price History (Last 1000 days)**:
{Stock_Histories}
**Using this data, analyze trends and return the top 5 stocks that are most likely to increase in price over the next 90 days.**
Rank them by expected percentage growth and explain why they were chosen.
"""
return full_prompt


def get_prediction(full_prompt):
response = ollama.chat(model="llama2", messages=[{"role": "user", "content": full_prompt}])
print(response)
return response.get('text', 'No text key in response')


def main():
start_time = time.time()
for stock in SandP500:
find_price(stock)
find_PE(stock)
find_MKTCap(stock)
find_PHistory(stock, "90d")

# Format the prompt and get AI prediction
full_prompt = format_full_prompt(StockPrices)
prediction = get_prediction(full_prompt)

print("Prediction:", prediction)
end_time = time.time()
elapsed_time = end_time - start_time
print(f"Time elapsed to complete : {elapsed_time:.2f} seconds")



# Run the script
if __name__ == "__main__":
main()

sentiment 0.99


Share
About
Pricing
Policies
Markets
API
Info
tz UTC-4
Connect with us
ChartExchange Email
ChartExchange on Discord
ChartExchange on X
ChartExchange on Reddit
ChartExchange on GitHub
ChartExchange on YouTube
© 2020 - 2025 ChartExchange LLC