gpt_enterprise.gpt_utils
🧠GPT utils ðŸ§
1""" 2\U0001F9E0 3GPT utils 4\U0001F9E0 5""" 6 7import os 8import openai 9import requests 10from typing import Tuple, List, Generator 11 12 13EMPLOYEE_PROMPTS_PATH = os.path.join(os.path.dirname(__file__), "prompts", "employees") 14 15 16def generate_text( 17 system_prompt: str, 18 user_prompt: str, 19 temperature: float, 20 model: str = os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), 21) -> Generator: 22 """ 23 24 25 Args: 26 system_prompt (str): Initialize the system with the given system prompt 27 user_prompt (str): Assistant will try to give the best answer for the given user prompt 28 model (str): OpenAI model to be used 29 temperature (float): Temperature 30 31 Returns: 32 Generator: GPT response object 33 """ 34 response = openai.chat.completions.create( 35 model=model, 36 messages=[ 37 # Initialize GPT with system prompt 38 { 39 "role": "system", 40 "content": system_prompt, # + " Use less words as possible." 41 }, 42 # Generate text relating to the user's prompt 43 {"role": "user", "content": user_prompt}, 44 ], 45 temperature=temperature, 46 ) 47 48 return response 49 50 51def generate_image( 52 base_name: str, 53 user_prompt: str, 54 output_directory: str, 55 system_prompt: str = "", 56 nb_image: int = 1, 57) -> Tuple[str, List[str]]: 58 """ 59 Generate a prompt base on user_prompt and inject it to DALL-E 60 to generate images. 61 62 Args: 63 system_prompt (str): Initialize the system with the given system prompt 64 user_prompt (str): Assistant will try to give the best answer for the given user prompt 65 base_name (str): Images' base name 66 output_directory (str): Images' output directory 67 nb_image (_type_): Number of image to generate 68 69 Returns: 70 list: Generated image names 71 """ 72 # Ask ChatGPT a prompt to generate image with DALL-E 73 with open(os.path.join(EMPLOYEE_PROMPTS_PATH, "dall_e_prompter.txt"), "r") as file: 74 response = openai.chat.completions.create( 75 model=os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), 76 messages=[ 77 # Initialize ChatGPT to be a helpful assistant but that it remains the employee 78 { 79 "role": "system", 80 "content": ( 81 f"{file.read()}" 82 + f" You are also {system_prompt} But keep in mind that {file.read()}" 83 if system_prompt 84 else "" 85 ), 86 }, 87 # Generate a subject 88 {"role": "user", "content": f"SUBJECT {user_prompt}"}, 89 ], 90 ) 91 92 generated_image_names = [] 93 94 try: 95 # Create images, troncate prompt to 70 characters 96 # to be sure it will be accepted by DALL-E 97 image_response = openai.Image.create( 98 prompt=response.choices[0].message.content[:70], 99 n=nb_image, 100 size="1024x1024", 101 ) 102 103 # Download images 104 for index, image in enumerate(image_response["data"]): 105 img_data = requests.get(image["url"]).content 106 img_name = f"{base_name}_{index}.jpg" 107 img_path = os.path.join(output_directory, img_name) 108 with open(img_path, "wb") as handler: 109 handler.write(img_data) 110 generated_image_names.append(f"./{img_name}") 111 except Exception as error: 112 print(error) 113 114 return response.choices[0].message.content, generated_image_names
EMPLOYEE_PROMPTS_PATH =
'/opt/hostedtoolcache/Python/3.11.8/x64/lib/python3.11/site-packages/gpt_enterprise/prompts/employees'
def
generate_text( system_prompt: str, user_prompt: str, temperature: float, model: str = 'gpt-3.5-turbo-16k') -> Generator:
17def generate_text( 18 system_prompt: str, 19 user_prompt: str, 20 temperature: float, 21 model: str = os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), 22) -> Generator: 23 """ 24 25 26 Args: 27 system_prompt (str): Initialize the system with the given system prompt 28 user_prompt (str): Assistant will try to give the best answer for the given user prompt 29 model (str): OpenAI model to be used 30 temperature (float): Temperature 31 32 Returns: 33 Generator: GPT response object 34 """ 35 response = openai.chat.completions.create( 36 model=model, 37 messages=[ 38 # Initialize GPT with system prompt 39 { 40 "role": "system", 41 "content": system_prompt, # + " Use less words as possible." 42 }, 43 # Generate text relating to the user's prompt 44 {"role": "user", "content": user_prompt}, 45 ], 46 temperature=temperature, 47 ) 48 49 return response
Arguments:
- system_prompt (str): Initialize the system with the given system prompt
- user_prompt (str): Assistant will try to give the best answer for the given user prompt
- model (str): OpenAI model to be used
- temperature (float): Temperature
Returns:
Generator: GPT response object
def
generate_image( base_name: str, user_prompt: str, output_directory: str, system_prompt: str = '', nb_image: int = 1) -> Tuple[str, List[str]]:
52def generate_image( 53 base_name: str, 54 user_prompt: str, 55 output_directory: str, 56 system_prompt: str = "", 57 nb_image: int = 1, 58) -> Tuple[str, List[str]]: 59 """ 60 Generate a prompt base on user_prompt and inject it to DALL-E 61 to generate images. 62 63 Args: 64 system_prompt (str): Initialize the system with the given system prompt 65 user_prompt (str): Assistant will try to give the best answer for the given user prompt 66 base_name (str): Images' base name 67 output_directory (str): Images' output directory 68 nb_image (_type_): Number of image to generate 69 70 Returns: 71 list: Generated image names 72 """ 73 # Ask ChatGPT a prompt to generate image with DALL-E 74 with open(os.path.join(EMPLOYEE_PROMPTS_PATH, "dall_e_prompter.txt"), "r") as file: 75 response = openai.chat.completions.create( 76 model=os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), 77 messages=[ 78 # Initialize ChatGPT to be a helpful assistant but that it remains the employee 79 { 80 "role": "system", 81 "content": ( 82 f"{file.read()}" 83 + f" You are also {system_prompt} But keep in mind that {file.read()}" 84 if system_prompt 85 else "" 86 ), 87 }, 88 # Generate a subject 89 {"role": "user", "content": f"SUBJECT {user_prompt}"}, 90 ], 91 ) 92 93 generated_image_names = [] 94 95 try: 96 # Create images, troncate prompt to 70 characters 97 # to be sure it will be accepted by DALL-E 98 image_response = openai.Image.create( 99 prompt=response.choices[0].message.content[:70], 100 n=nb_image, 101 size="1024x1024", 102 ) 103 104 # Download images 105 for index, image in enumerate(image_response["data"]): 106 img_data = requests.get(image["url"]).content 107 img_name = f"{base_name}_{index}.jpg" 108 img_path = os.path.join(output_directory, img_name) 109 with open(img_path, "wb") as handler: 110 handler.write(img_data) 111 generated_image_names.append(f"./{img_name}") 112 except Exception as error: 113 print(error) 114 115 return response.choices[0].message.content, generated_image_names
Generate a prompt base on user_prompt and inject it to DALL-E to generate images.
Arguments:
- system_prompt (str): Initialize the system with the given system prompt
- user_prompt (str): Assistant will try to give the best answer for the given user prompt
- base_name (str): Images' base name
- output_directory (str): Images' output directory
- nb_image (_type_): Number of image to generate
Returns:
list: Generated image names