Skip to content

Commit

Permalink
added --changeDefaultModel to persistantly change default model
Browse files Browse the repository at this point in the history
  • Loading branch information
xssdoctor committed Mar 6, 2024
1 parent b844511 commit 2f29597
Show file tree
Hide file tree
Showing 3 changed files with 63 additions and 22 deletions.
22 changes: 8 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,10 +197,8 @@ Once you have it all set up, here's how to use it.
`fabric -h`

```bash
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
[--output [OUTPUT]] [--stream] [--list] [--update]
[--pattern PATTERN] [--setup] [--local] [--claude]
[--model MODEL] [--listmodels] [--context]
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] [--output [OUTPUT]] [--stream] [--list] [--update] [--pattern PATTERN] [--setup] [--changeDefaultModel CHANGEDEFAULTMODEL] [--local]
[--claude] [--model MODEL] [--listmodels] [--context]

An open source framework for augmenting humans using AI.

Expand All @@ -209,27 +207,23 @@ options:
--text TEXT, -t TEXT Text to extract summary from
--copy, -C Copy the response to the clipboard
--agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys}
Use an AI agent to help you with a task. Acceptable
values are 'trip_planner' or 'ApiKeys'. This option
cannot be used with any other flag.
Use an AI agent to help you with a task. Acceptable values are 'trip_planner' or 'ApiKeys'. This option cannot be used with any other flag.
--output [OUTPUT], -o [OUTPUT]
Save the response to a file
--stream, -s Use this option if you want to see the results in
realtime. NOTE: You will not be able to pipe the
output into another command.
--stream, -s Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command.
--list, -l List available patterns
--update, -u Update patterns
--pattern PATTERN, -p PATTERN
The pattern (prompt) to use
--setup Set up your fabric instance
--changeDefaultModel CHANGEDEFAULTMODEL
Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.
--local, -L Use local LLM. Default is llama2
--claude Use Claude AI
--model MODEL, -m MODEL
Select the model to use (GPT-4 by default for chatGPT
and llama2 for Ollama)
Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)
--listmodels List all available models
--context, -c Use Context file (context.md) to add context to your
pattern
--context, -c Use Context file (context.md) to add context to your pattern
```
#### Example commands
Expand Down
6 changes: 6 additions & 0 deletions installer/client/cli/fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def main():
parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true"
)
parser.add_argument('--changeDefaultModel',
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")

Expand Down Expand Up @@ -77,6 +79,10 @@ def main():
Update()
Alias()
sys.exit()
if args.changeDefaultModel:
Setup().default_model(args.changeDefaultModel)
print(f"Default model changed to {args.changeDefaultModel}")
sys.exit()
if args.agents:
# Handle the agents logic
if args.agents == 'trip_planner':
Expand Down
57 changes: 49 additions & 8 deletions installer/client/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,21 +51,24 @@ def __init__(self, args, pattern="", env_file="~/.config/fabric/.env", local=Fal
self.args = args
self.model = args.model
self.claude = claude
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.args.model = 'llama2'
if self.claude:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'claude-3-opus-20240229'
try:
self.model = os.environ["DEFAULT_MODEL"]
except:
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'llama2'
if self.claude:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'claude-3-opus-20240229'

async def localChat(self, messages):
from ollama import AsyncClient
response = await AsyncClient().chat(model=self.args.model, messages=messages)
response = await AsyncClient().chat(model=self.model, messages=messages)
print(response['message']['content'])

async def localStream(self, messages):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)

async def claudeStream(self, system, user):
Expand Down Expand Up @@ -243,6 +246,8 @@ def sendMessage(self, input_data: str, context=""):
if "overloaded_error" in str(e):
print(
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
if "Attempted to call a sync iterator on an async stream" in str(e):
print("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
else:
print(f"Error: {e}")
print(e)
Expand All @@ -261,6 +266,7 @@ def fetch_available_models(self):
"https://api.openai.com/v1/models", headers=headers)

if response.status_code == 200:
print("OpenAI GPT models:\n")
models = response.json().get("data", [])
# Filter only gpt models
gpt_models = [model for model in models if model.get(
Expand All @@ -270,6 +276,13 @@ def fetch_available_models(self):

for model in sorted_gpt_models:
print(model.get("id"))
print("\nLocal Ollama models:")
import ollama
ollamaList = ollama.list()['models']
for model in ollamaList:
print(model['name'].rstrip(":latest"))
print("\nClaude models:")
print("claude-3-opus-20240229")
else:
print(f"Failed to fetch models: HTTP {response.status_code}")

Expand Down Expand Up @@ -461,6 +474,33 @@ def claude_key(self, claude_key):
with open(self.env_file, "w") as f:
f.write(f"CLAUDE_API_KEY={claude_key}")

def default_model(self, model):
""" Set the default model in the environment file.
Args:
model (str): The model to be set.
"""

model = model.strip()
if os.path.exists(self.env_file) and model:
with open(self.env_file, "r") as f:
lines = f.readlines()
with open(self.env_file, "w") as f:
for line in lines:
if "DEFAULT_MODEL" not in line:
f.write(line)
f.write(f"DEFAULT_MODEL={model}")
elif model:
with open(self.env_file, "w") as f:
f.write(f"DEFAULT_MODEL={model}")
else:
with open(self.env_file, "r") as f:
lines = f.readlines()
with open(self.env_file, "w") as f:
for line in lines:
if "DEFAULT_MODEL" not in line:
f.write(line)

def patterns(self):
""" Method to update patterns and exit the system.
Expand All @@ -486,6 +526,7 @@ def run(self):
print("Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
claudekey = input()
self.claude_key(claudekey.strip())
print("Please enter your default model. Press enter to choose the default gpt-4-turbo-preview\n")
self.patterns()


Expand Down

0 comments on commit 2f29597

Please sign in to comment.