Merge branch 'main' of github.com:abetlen/llama_cpp_python into main
This commit is contained in:
commit
a928893d03
2 changed files with 8 additions and 8 deletions
|
@ -45,7 +45,7 @@ Note: If you are using Apple Silicon (M1) Mac, make sure you have installed a ve
|
|||
wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-MacOSX-arm64.sh
|
||||
bash Miniforge3-MacOSX-arm64.sh
|
||||
```
|
||||
Otherwise, while installing it will build the llama.ccp x86 version which will be 10x slower on Apple Silicon (M1) Mac.
|
||||
Otherwise, while installing it will build the llama.cpp x86 version which will be 10x slower on Apple Silicon (M1) Mac.
|
||||
|
||||
### Installation with Hardware Acceleration
|
||||
|
||||
|
|
|
@ -587,7 +587,6 @@ def format_open_orca(
|
|||
system_template = "{system_message}"
|
||||
system_message = (
|
||||
"You are a helpful assistant. Please answer truthfully and write out your "
|
||||
)
|
||||
"thinking step by step to be sure you get the right answer. If you make a mistake or encounter "
|
||||
"an error in your thinking, say so out loud and attempt to correct it. If you don't know or "
|
||||
"aren't sure about something, say so clearly. You will act as a professional logician, mathematician, "
|
||||
|
@ -595,6 +594,7 @@ def format_open_orca(
|
|||
"question or solve the relevant problem; state which expert type your are, if so. Also think of "
|
||||
"any particular named expert that would be ideal to answer the relevant question or solve the "
|
||||
"relevant problem; name and act as them, if appropriate."
|
||||
)
|
||||
roles = ("User", "Assistant")
|
||||
sep = "<|end_of_turn|>\n"
|
||||
# stop_token_ids=[32000, 32001], # "<|end_of_turn|>"
|
||||
|
|
Loading…
Reference in a new issue