import re import tweepy import pandas as pd import gradio as gr import itertools import collections from collections import Counter import numpy as np def search_hashtag1(hashtag_phrase): #hashtag_phrase=input("Enter hashtahg") consumer_key="30GAxNeTfZuPL5SfNhFBodmRF" consumer_secret="C6O64nP0XjtwaAnXYL9zCcDZKEIP2iL1yVdlsNJtwLiZ5AEEBs" access_token="1246523558563471360-WrbCqO8phqjIzx393mrfOSKvDFPmey" access_token_secret="u7B6yX6ZyTa5ph7xkCFnbzyuD9jbuHHJNL0Y4S7mdZb1J" auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = tweepy.API(auth) fname = '_'.join(re.findall(r"#(\w+)", hashtag_phrase)) data_frame=pd.DataFrame(columns={"timestamp"}) timestamp=[] tweet_text=[] user_name=[] for tweet in tweepy.Cursor(api.search, q=hashtag_phrase+' -filter:retweets',lang="en", tweet_mode='extended').items(200): timestamp1=tweet.created_at timestamp.append(timestamp1) tweet_text1=tweet.full_text.replace('\n',' ').encode('utf-8') tweet_text.append(tweet_text1) user_name1=tweet.user.screen_name.encode('utf-8') user_name.append(user_name1) data2=pd.DataFrame(timestamp,columns={"timestamp"}) data1=pd.DataFrame(tweet_text,columns={"tweet_text"}) data3=pd.DataFrame(user_name,columns={"user_name"}) data4=pd.concat([data1,data2],axis=1) data5=pd.concat([data4,data3],axis=1) data5.to_csv("tweet_data1.csv") #data6=data5.head(10) return data5,data5.to_csv("tweet_data1.csv") iface = gr.Interface( search_hashtag1,inputs=gr.inputs.Textbox(lines=2, placeholder="Enter Hashtag Here"), outputs=["dataframe","dataframe"], examples=["#datascience","#budget","#valentine's day","#pushpa","#batman","#SharkTankIndia"], theme="seafoam", title='Sakil Tweetlib6 App', description="You can extract tweets based on Hashtag.e.g. Please enter #datascience. The app extracts top 500 recent tweets based on the hashtag.") iface.launch(inline=False)