-
Notifications
You must be signed in to change notification settings - Fork 7
Expand file tree
/
Copy pathgpt3
More file actions
executable file
·127 lines (113 loc) · 3.32 KB
/
gpt3
File metadata and controls
executable file
·127 lines (113 loc) · 3.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#!/bin/bash
set -ef -o pipefail
# Inherit defaults from env variables for easy scripting
# Available engines (models) are listed here: https://platform.openai.com/docs/models
# Apparently text-davinci-003 costs a lot more, and is less accurate, but I found it much faster than gpt-3.5-turbo!
[ -z "$ENGINE" ] && ENGINE="text-davinci-003"
#[ -z "$ENGINE" ] && ENGINE="gpt-3.5-turbo"
[ -z "$ENGINE" ] && ENGINE=davinci
[ -z "$TEMPERATURE" ] && TEMPERATURE=0.5
[ -z "$FREQ_PENALTY" ] && FREQ_PENALTY=0
[ -z "$PRES_PENALTY" ] && PRES_PENALTY=0
POSITIONAL=()
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-e|--engine)
ENGINE="$2"
shift # past argument
shift # past value
;;
-t|--temperature)
TEMPERATURE="$2"
shift # past argument
shift # past value
;;
-f|--freq-penalty)
FREQ_PENALTY="$2"
shift # past argument
shift # past value
;;
-p|--pres-penalty)
PRES_PENALTY="$2"
shift # past argument
shift # past value
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameter
[ -z "$2" ] && MAX_TOKENS=256 || MAX_TOKENS="$2"
[ -z "$OPENAI_KEY" ] && KEY="$OPENAI_API_KEY" || KEY="$OPENAI_KEY"
PROMPT="$1"
if [ -z "$KEY" ]; then
echo "You must export OPENAI_KEY or OPENAI_API_KEY. Get one here: https://platform.openai.com/account/api-keys"
exit 1
fi
if ! command -v jq >/dev/null 2>&1; then
echo "Please install jq (Command-line JSON processor) - instructions in README"
exit 1
fi
# FIXME: Improve error handling
call_chat_api() {
request_data=$(
cat << !!!
{
"model": "${ENGINE}",
"messages": [{"role": "user", "content": "${PROMPT//\"/\\\"}"}],
"temperature": ${TEMPERATURE}
}
!!!
)
response="$(
# Not a GET request
curl -sSL -N \
https://api.openai.com/v1/chat/completions \
-H "gpt3-cli-joeytwiddle/0.2.0 (https://github.com/CrazyPython/gpt3-cli)" \
-H "Authorization: Bearer $KEY" \
-H "Content-Type: application/json" \
-d "$request_data"
)"
result="$(
printf "%s\n" "$response" |
jq -j --unbuffered '.choices[0].message.content'
)"
if [ -n "$result" ] && ! [ "$result" = "null" ]
then
printf "%s\n" "$result"
else #[[ "$result" =~ '"error": {' ]]
printf "%s\n" "$response"
fi
# Example error response:
# {
# "error": {
# "message": "you must provide a model parameter",
# "type": "invalid_request_error",
# "param": null,
# "code": null
# }
# }
}
call_old_api() {
curl -sSL -N \
-G https://api.openai.com/v1/engines/${ENGINE}/completions/browser_stream \
-H "gpt3-cli-joeytwiddle/0.2.0 (https://github.com/CrazyPython/gpt3-cli)" \
-H "Authorization: Bearer $KEY" \
--data-urlencode model="$ENGINE" \
--data-urlencode prompt="$PROMPT" \
--data-urlencode temperature="$TEMPERATURE" \
--data-urlencode max_tokens="$MAX_TOKENS" \
--data-urlencode frequency_penalty="$FREQ_PENALTY" \
--data-urlencode presence_penalty="$PRES_PENALTY" |
sed -u 's/^data: //' | grep --line-buffer -v '^\[DONE\]$' | jq -j --unbuffered '.choices[0].text'
# Add trailing newline
echo
}
if [[ "$ENGINE" =~ "gpt-3.5-turbo" ]] || [[ "$ENGINE" == gpt-* ]]; then
call_chat_api
else
call_old_api
fi