Custom inference parameters for cope (#7)
Browse files- Custom inference parameters for cope (574225a2a7a0a2630a5b99e1dde904ca11119282)
Co-authored-by: Samidh <[email protected]>
- utils/model_interface.py +1 -2
utils/model_interface.py
CHANGED
|
@@ -126,11 +126,10 @@ def run_test(
|
|
| 126 |
client = OpenAI(base_url='https://q0c7gn9b2s2wkb6v.us-east-1.aws.endpoints.huggingface.cloud/v1', api_key=cope_token)
|
| 127 |
cope_prompt = COPE_PROMPT_TEMPLATE.format(policy=policy,content=test_input)
|
| 128 |
response = client.completions.create(
|
| 129 |
-
model=
|
| 130 |
prompt=cope_prompt,
|
| 131 |
max_tokens=1,
|
| 132 |
temperature=0,
|
| 133 |
-
extra_headers={"X-HF-Bill-To": "roosttools"},
|
| 134 |
)
|
| 135 |
result = {"content": response.choices[0].text}
|
| 136 |
return result
|
|
|
|
| 126 |
client = OpenAI(base_url='https://q0c7gn9b2s2wkb6v.us-east-1.aws.endpoints.huggingface.cloud/v1', api_key=cope_token)
|
| 127 |
cope_prompt = COPE_PROMPT_TEMPLATE.format(policy=policy,content=test_input)
|
| 128 |
response = client.completions.create(
|
| 129 |
+
model='cope-a-9b',
|
| 130 |
prompt=cope_prompt,
|
| 131 |
max_tokens=1,
|
| 132 |
temperature=0,
|
|
|
|
| 133 |
)
|
| 134 |
result = {"content": response.choices[0].text}
|
| 135 |
return result
|