import gradio as gr from newsdataapi import NewsDataApiClient import os import json import pandas as pd def creating_data_dir(directory_path): # Use the os.makedirs() function to create the directory # The 'exist_ok=True' argument allows it to run without errors if the directory already exists os.makedirs(directory_path, exist_ok=True) # Check if the directory was created successfully if os.path.exists(directory_path): print(f"Directory '{directory_path}' created successfully.") else: print(f"Failed to create directory '{directory_path}'.") def retrieve_news_per_keyword(api,keywords): # %time for keyword in keywords: # print(f"{api} \n {keyword}") response = api.news_api( q= keyword , country = "us", language = 'en', full_content = True) # writing to a file file_path = os.path.join(directory_path, f"response_{keyword}.json") with open(file_path, "w") as outfile: json.dump(response, outfile) print(f"News Response for keyword {keyword} is retrieved") keywords.remove(keyword) def combine_responses_into_one(directory_path): # Use a list comprehension to get all file names in the directory file_list = [f for f in os.listdir(directory_path) if os.path.isfile(os.path.join(directory_path, f))] #retrieve the file_keyword by extracting the string after "_" # Extract the file_keyword from each filename file_keywords = [filename.split('_')[1].split('.')[0] for filename in file_list] # Initialize an empty list to store the combined JSON data combined_json = [] # Loop through each file name for filename in file_list: # Read the current JSON file with open(directory_path+'/'+filename, 'r') as file: current_json = json.load(file) # Extract the file_keyword from the filename file_keyword = filename.split('_')[1].split('.')[0] # Add the file_keyword to each result in the current JSON for result in current_json['results']: result['file_keyword'] = file_keyword # Extend the combined JSON list with the results from the current JSON combined_json.extend(current_json['results']) print(f'{filename} is added to the combined json object') # break # using the break to check the loop code always # Save the combined_json object as a JSON file with open('combined_news_response.json', 'w') as combined_file: json.dump(combined_json, combined_file, indent=4) def convert_json_to_csv(file_name): json_data_df = pd.read_json(file_name) # json_data_df.head() columns = [ 'title', 'keywords', 'creator', 'description', 'content', 'pubDate', 'country', 'category', 'language', 'file_keyword' ] csv_file_name = 'combined_news_response.csv' json_data_df[columns].to_csv(csv_file_name) print(f'{csv_file_name} is created') # API key authorization, Initialize the client with your API key NEWSDATA_API_KEY = "pub_2915202f68e543f70bb9aba9611735142c1fd" keywords = [ "GDP", "CPI", "PPI", "Unemployment Rate", "Interest Rates", "Inflation", "Trade Balance", "Retail Sales", "Manufacturing Index", "Earnings Reports", "Revenue Growth", "Profit Margins", "Earnings Surprises", "Geopolitical Events", "Trade Tensions", "Elections", "Natural Disasters", "Global Health Crises", "Oil Prices", "Gold Prices", "Precious Metals", "Agricultural Commodities", "Federal Reserve", "ECB", "Forex Market", "Exchange Rates", "Currency Pairs", "Tech Company Earnings", "Tech Innovations", "Retail Trends", "Consumer Sentiment", "Financial Regulations", "Government Policies", "Technical Analysis", "Fundamental Analysis", "Cryptocurrency News", "Bitcoin", "Altcoins", "Cryptocurrency Regulations", "S&P 500", "Dow Jones", "NASDAQ", "Market Analysis", "Stock Market Indices" ] # creating a data directory # Define the directory path you want to create directory_path = './data' def call_functions(directory_path='./data'): creating_data_dir(directory_path) items = os.listdir(directory_path) file_name = './combined_news_response.json' if len(items) == 0: print(f"Directory '{directory_path}' is empty.") api = NewsDataApiClient(apikey=NEWSDATA_API_KEY) retrieve_news_per_keyword(api,keywords) combine_responses_into_one(directory_path) convert_json_to_csv(file_name) elif len(items) >= 2: print(f"Directory '{directory_path}' contains at least two files.") combine_responses_into_one(directory_path) convert_json_to_csv(file_name) else: print(f"Directory '{directory_path}' contains only one file.") # Read the combined CSV file and display the first few rows csv_file_name = "combined_news_response.csv" if os.path.exists(csv_file_name): df = pd.read_csv(csv_file_name) # Assuming df is your DataFrame if 'Unnamed: 0' in df.columns: df.drop('Unnamed: 0', axis=1, inplace=True) first_few_rows = df.head(10) # Adjust the number of rows as needed return first_few_rows else: return f"CSV file '{csv_file_name}' not found." # Create a Gradio interface iface = gr.Interface( fn=call_functions, inputs=gr.components.Textbox(label="Directory Path"), outputs=gr.components.Dataframe(type="pandas") ) # Launch the Gradio app iface.launch(debug=True)