diff --git a/.gitignore b/.gitignore index aedf101..53ee6d8 100644 --- a/.gitignore +++ b/.gitignore @@ -2,8 +2,7 @@ # burpference logs and local config logs/ .burpference/ -configs/* -!configs/*.example.json +configs/*_local.json # Only ignore files with _local suffix prompt.txt ### Python ### diff --git a/burpference/api_adapters.py b/burpference/api_adapters.py index 806fc11..6588071 100644 --- a/burpference/api_adapters.py +++ b/burpference/api_adapters.py @@ -275,6 +275,63 @@ def process_response(self, response_data): return str(response) +# Cohere /v2/chat API adapter class +class CohereAPIAdapter(BaseAPIAdapter): + def prepare_request(self, user_content, system_content=None): + messages = [] + if system_content: + messages.append( + { + "role": "SYSTEM", + "content": system_content, + } + ) + messages.append( + { + "role": "USER", + "content": user_content, + } + ) + + return { + "model": self.config.get("model", "command-r-plus-08-2024"), + "messages": messages, + "stream": self.config.get("stream", False), + } + + def process_response(self, response_data): + response = json.loads(response_data) + if "text" in response: + return response["text"] + elif "response" in response and "text" in response["response"]: + return response["response"]["text"] + else: + raise ValueError("Unexpected response format: %s" % str(response)) + + def send_request(self, request_payload): + headers = self.config.get("headers", {}) + if not headers: + headers = { + "accept": "application/json", + "content-type": "application/json", + "Authorization": "Bearer %s" % self.config.get("api_key", ""), + } + + encoded_data = json.dumps(request_payload).encode("utf-8") + req = urllib2.Request( + self.config.get("host"), data=encoded_data, headers=headers + ) + + try: + response = urllib2.urlopen(req) + return response.read() + except urllib2.HTTPError as e: + error_message = e.read().decode("utf-8") + raise ValueError("HTTP Error %d: %s" % (e.code, error_message)) + except Exception as e: + raise ValueError("Error sending request: %s" % str(e)) + + # Generic other API base adapter @@ -295,7 +352,9 @@ def get_api_adapter(config): api_type = config.get("api_type", "").lower() endpoint = config.get("host", "").lower() - if api_type == "ollama": + if api_type == "cohere": + return CohereAPIAdapter(config) + elif api_type == "ollama": if "/generate" in endpoint: return OllamaGenerateAPIAdapter(config) elif "/chat" in endpoint: diff --git a/configs/README.md b/configs/README.md index de44b9a..772d4e4 100644 --- a/configs/README.md +++ b/configs/README.md @@ -17,6 +17,8 @@ If you intend to fork or contribute to burpference, ensure that you have exclude - [Example OpenAI `/completions` inference with `gpt-4o-mini`:](#example-openai-completions-inference-with-gpt-4o-mini) - [HuggingFace Serveless Inference](#huggingface-serveless-inference) - [Example HuggingFace `/text-generation` inference](#example-huggingface-text-generation-inference) + - [Cohere `/v2/chat` Inference](#cohere-v2chat-inference) + - [Example Cohere `/v2/chat` inference](#example-cohere-v2chat-inference) - [Model System Prompts](#model-system-prompts) --- @@ -122,9 +124,26 @@ In order to serve inference as part of burpference, the model must be running on } ``` -## Model System Prompts +### Cohere `/v2/chat` Inference + +#### Example Cohere `/v2/chat` inference + +```json +{ + "api_type": "cohere", + "headers": { + "Authorization": "bearer CO_API_KEY", + "accept": "application/json", + "content-type": "application/json" + }, + "host": "https://api.cohere.com/v2/chat", + "model": "command-r-plus-08-2024", + "stream": false +} +``` -By default, the system prompt sent as pretext to the model is defined [here](../prompts/proxy_prompt.txt), feel free to edit, tune and tweak as you see fit. +## Model System Prompts +By default, the system prompt sent as pretext to the model is defined [here](../prompts/proxy_prompt.txt), feel free to edit, tune and tweak as you see fit. This is also true for the scanner extension tab. --- \ No newline at end of file diff --git a/configs/cohere_command_r_plus_08_2024.json b/configs/cohere_command_r_plus_08_2024.json new file mode 100644 index 0000000..817d439 --- /dev/null +++ b/configs/cohere_command_r_plus_08_2024.json @@ -0,0 +1,11 @@ +{ + "api_type": "cohere", + "headers": { + "Authorization": "bearer CO_API_KEY", + "accept": "application/json", + "content-type": "application/json" + }, + "host": "https://api.cohere.com/v2/chat", + "model": "command-r-plus-08-2024", + "stream": false +} \ No newline at end of file