Install the Python library for OpenAI by running `pip install openai` in your terminal or command prompt. 3. Create a new file called `gpt_example.py` and add the following code:
# Import necessary libraries
import os # Import the os library to access environment variables
from dotenv import load_dotenv # Import the dotenv library to load environment variables from a .env file
# Load environment variables from .env file
load_dotenv()
# Set environment variables for OpenAI API key and project ID
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') # Retrieve the OpenAI API key from the environment variables
PROJECT_ID = os.getenv('PROJECT_ID') # Retrieve the project ID from the environment variables
# Import necessary classes from the OpenAI library
from openai import ChatCompletion, CurieTemperature, System
# Define a function to generate response based on user input and project data
def generate_response(user_input):
# Set up the API client with your OpenAI credentials
client = ChatCompletion() # Create an instance of the ChatCompletion class
client.model = "gpt-4" # Use GPT-4 for best results
client.temperature = CurieTemperature(0) # Disable temperature scaling to improve accuracy
# Define the system message that will be used as a starting point for generating responses
system_message = System() # Create an instance of the System class
system_message.role = "assistant" # Set the role of the system message to "assistant"
system_message.content = f"""Generate response based on user input and project data: {user_input}""" # Set the content of the system message to include the user input
# Define the user message that will be used as a starting point for generating responses
user_message = {"role": "user", "content": user_input} # Create a dictionary with the role and content of the user message
# Combine system and user messages into an array of messages to pass to the API client
messages = [system_message, user_message] # Create an array with the system and user messages
# Call the API client with the generated message array and return the response as a string
completion = client.create(messages=messages) # Call the create method of the API client with the message array
return completion['choices'][0]['text'] # Return the generated response from the API client
# Test the function by calling it with some sample input
print(generate_response("What is the best way to learn Python?")) # Print the generated response for the given input
4. Run `python gpt_example.py` in your terminal or command prompt and test out the generated response based on user input and project data. 5. Continue refining your code as needed, using OpenAI’s documentation for guidance and support.
To scrape Google results with Python:
1. First, install the necessary libraries by running `pip install beautifulsoup4 requests` in your terminal or command prompt.
2. Create a new file called `google_scraper.py` and add the following code:
# Import necessary libraries
import os # Importing the os library to access environment variables
from dotenv import load_dotenv # Importing the dotenv library to load environment variables from a .env file
load_dotenv() # Loading the environment variables from the .env file
# Set environment variables for Google API key and project ID
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY') # Assigning the value of the environment variable 'GOOGLE_API_KEY' to the variable GOOGLE_API_KEY
PROJECT_ID = os.getenv('PROJECT_ID') # Assigning the value of the environment variable 'PROJECT_ID' to the variable PROJECT_ID
from bs4 import BeautifulSoup # Importing the BeautifulSoup library for web scraping
import requests # Importing the requests library for making HTTP requests
def scrape_google(query):
# Set up the API client with your Google credentials
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.122 Safari/537.36', # Setting the user agent to mimic a web browser
'Accept': 'text/html,application/xhl+xml,application/json' # Specifying the type of content we want to receive in the response
}
# Define the URL to scrape based on user input and project data
url = f"https://www.google.com/search?q={query}&num=10&source=lnt&tbs=cdr:true_color,lr:lang_en,pdar:ir_oa,sort:date" # Creating the URL to scrape by formatting the query and project data into the string
# Send a GET request to the URL and get the HTML content as a string
response = requests.get(url, headers=headers) # Sending a GET request to the URL with the specified headers
soup = BeautifulSoup(response.content, 'html.parser') # Parsing the HTML content of the response using BeautifulSoup
# Find all the organic results on the page using CSS selectors
results = [] # Creating an empty list to store the results
for result in soup.find_all('div', {'class': 'g'})[:10]: # Looping through the first 10 results on the page
title = result.find('h3').text.strip() # Extracting the title of the result by finding the <h3> tag and removing any leading or trailing whitespace
url = result.find('a')['href'] # Extracting the URL of the result by finding the <a> tag and accessing the 'href' attribute
snippet = result.find('span', {'class': 'vwet0'}).text.strip() # Extracting the snippet of the result by finding the <span> tag with the specified class and removing any leading or trailing whitespace
# Add the results to a list for later use
results.append({'title': title, 'url': url, 'snippet': snippet}) # Appending a dictionary with the title, URL, and snippet of the result to the list
return results # Returning the list of results
# Test the function by calling it with some sample input
print(scrape_google("Python tutorial")) # Calling the function with the query "Python tutorial" and printing the results
3. Run `python google_scraper.py` in your terminal or command prompt and test out the scraped data based on user input and project data. 4. Continue refining your code as needed, using Google’s documentation for guidance and support.