Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@st.cache(persist=True)
def load_data(nrows):
data = pd.read_csv(DATA_URL, nrows=nrows)
lowercase = lambda x: str(x).lower()
data.rename(lowercase, axis="columns", inplace=True)
data[DATE_TIME] = pd.to_datetime(data[DATE_TIME])
return data
@st.cache
def read_file_from_url(url):
return requests.get(url).content
@st.cache(allow_output_mutation=True)
def load_network(config_path, weights_path):
net = cv2.dnn.readNetFromDarknet(config_path, weights_path)
output_layer_names = net.getLayerNames()
output_layer_names = [output_layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]
return net, output_layer_names
net, output_layer_names = load_network("yolov3.cfg", "yolov3.weights")
@st.cache
def get_sentiment_analyzer() -> SentimentIntensityAnalyzer:
"""An instance of the SentimentIntensityAnalyzer
Returns:
SentimentIntensityAnalyzer -- A SentimentIntensityAnalyzer
"""
return SentimentIntensityAnalyzer() # initialize it
@st.cache
def get_file_content_as_string(url: str):
data = urllib.request.urlopen(url).read()
return data.decode("utf-8")
import streamlit as st
from allennlp import pretrained
import matplotlib.pyplot as plt
import numpy as np
st.header("AllenNLP Demo")
# Load the pretrained BiDAF model for question answering.
# (It's big, don't do this over dial-up.)
# Use st.cache so that it doesn't reload when you change the inputs.
predictor = st.cache(
pretrained.bidirectional_attention_flow_seo_2017,
ignore_hash=True # the Predictor is not hashable
)()
# Create a text area to input the passage.
passage = st.text_area("passage", "The Matrix is a 1999 movie starring Keanu Reeves.")
# Create a text input to input the question.
question = st.text_input("question", "When did the Matrix come out?")
# Use the predictor to find the answer.
result = predictor.predict(question, passage)
# From the result, we want "best_span", "question_tokens", and "passage_tokens"
start, end = result["best_span"]
question_tokens = result["question_tokens"]
@st.cache(persist=True)
def read_file_from_url(url):
try:
return requests.get(url).content
except requests.exceptions.RequestException:
st.error("Unable to load file from %s. " "Is the internet connected?" % url)
except Exception as e:
st.exception(e)
return None
@st.cache
def get_pickle(file: str):
"""An instance of an object from the pickle file"""
github_url = GITHUB_ROOT + file
with urllib.request.urlopen(github_url) as open_file: # type: ignore
return pickle.load(open_file)
@st.cache
def cached_write(value):
st.write(value)
@st.cache
def get_file_content_as_string(url: str) -> str:
"""The url content as a string
Arguments:
url {str} -- The url to request
Returns:
str -- The text of the url
"""
# Load local if possible
if url.startswith(GITHUB_RAW_URL):
path = pathlib.Path.cwd() / url.replace(GITHUB_RAW_URL, "")
if path.exists():
with open(path, encoding="utf8") as file:
content = file.read()
return content