Skip to content

Commit

Permalink
unused import
Browse files Browse the repository at this point in the history
  • Loading branch information
nlueem committed Aug 3, 2024
1 parent 9fbd51e commit 17c1d35
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
from pydantic import BaseModel

# FastAPI imports
from fastapi import Request, FastAPI
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware

import torch

# Set up logging
# Set up logging
logging.set_verbosity_info()
logger = logging.get_logger("transformers")

Expand Down Expand Up @@ -73,30 +73,30 @@

# Data model for making POST requests to /chat
class ChatRequest(BaseModel):
"""Class representing a chat-request"""
"""Class representing a data-model"""
messages: list
temperature: Union[float, None] = None
top_p: Union[float, None] = None
max_new_tokens: Union[int, None] = None


def generate(messages: list,
def generate(messages: list,
temperature: float = None,
top_p: float = None,
max_new_tokens: int = None) -> str:
"""Generates a response given a list of messages (conversation history)
and the generation configuration."""

temperature = (
temperature if temperature is not None
temperature if temperature is not None
else default_generation_config["temperature"]
)
top_p = (
top_p if top_p is not None
top_p if top_p is not None
else default_generation_config["top_p"]
)
max_new_tokens = (
max_new_tokens if max_new_tokens is not None
max_new_tokens if max_new_tokens is not None
else default_generation_config["max_new_tokens"]
)
prompt = pipe.tokenizer.apply_chat_template(
Expand Down Expand Up @@ -140,7 +140,7 @@ def chat(chat_request: ChatRequest):
if not is_system_prompt(messages[0]):
messages.insert(0, {"role": "system", "content": DEFAULT_SYSTEM_PROMPT})

logger.info("Generating response...")
logger.info("Generating response...")
response = generate(messages, temperature, top_p, max_new_tokens)
logger.info(f"/chat Response: {response}")
return response
Expand Down

0 comments on commit 17c1d35

Please sign in to comment.