|
| 1 | +import asyncio |
| 2 | +import os |
| 3 | +import sys |
| 4 | + |
| 5 | +curr_dir = os.path.dirname(os.path.realpath(__file__)) |
| 6 | +repo_root = os.path.abspath(os.path.join(curr_dir, os.pardir)) |
| 7 | +sys.path.insert(1, os.path.join(repo_root, "src")) |
| 8 | + |
| 9 | +import typesense |
| 10 | + |
| 11 | +from typesense.types.document import MessageChunk, StreamConfigBuilder |
| 12 | + |
| 13 | + |
| 14 | +def require_env(name: str) -> str: |
| 15 | + value = os.environ.get(name) |
| 16 | + if not value: |
| 17 | + raise RuntimeError(f"Missing required environment variable: {name}") |
| 18 | + return value |
| 19 | + |
| 20 | + |
| 21 | +async def main() -> None: |
| 22 | + typesense_api_key = require_env("TYPESENSE_API_KEY") |
| 23 | + openai_api_key = require_env("OPENAI_API_KEY") |
| 24 | + |
| 25 | + client = typesense.AsyncClient( |
| 26 | + { |
| 27 | + "api_key": typesense_api_key, |
| 28 | + "nodes": [ |
| 29 | + { |
| 30 | + "host": "localhost", |
| 31 | + "port": "8108", |
| 32 | + "protocol": "http", |
| 33 | + } |
| 34 | + ], |
| 35 | + "connection_timeout_seconds": 10, |
| 36 | + } |
| 37 | + ) |
| 38 | + |
| 39 | + try: |
| 40 | + try: |
| 41 | + await client.conversations_models["conv-model-1"].delete() |
| 42 | + except Exception: |
| 43 | + pass |
| 44 | + |
| 45 | + try: |
| 46 | + await client.collections["streaming_docs"].delete() |
| 47 | + except Exception: |
| 48 | + pass |
| 49 | + |
| 50 | + try: |
| 51 | + await client.collections["conversation_store"].delete() |
| 52 | + except Exception: |
| 53 | + pass |
| 54 | + |
| 55 | + await client.collections.create( |
| 56 | + { |
| 57 | + "name": "conversation_store", |
| 58 | + "fields": [ |
| 59 | + {"name": "conversation_id", "type": "string"}, |
| 60 | + {"name": "model_id", "type": "string"}, |
| 61 | + {"name": "timestamp", "type": "int32"}, |
| 62 | + {"name": "role", "type": "string", "index": False}, |
| 63 | + {"name": "message", "type": "string", "index": False}, |
| 64 | + ], |
| 65 | + } |
| 66 | + ) |
| 67 | + |
| 68 | + await client.collections.create( |
| 69 | + { |
| 70 | + "name": "streaming_docs", |
| 71 | + "fields": [ |
| 72 | + {"name": "title", "type": "string"}, |
| 73 | + { |
| 74 | + "name": "embedding", |
| 75 | + "type": "float[]", |
| 76 | + "embed": { |
| 77 | + "from": ["title"], |
| 78 | + "model_config": { |
| 79 | + "model_name": "openai/text-embedding-3-small", |
| 80 | + "api_key": openai_api_key, |
| 81 | + }, |
| 82 | + }, |
| 83 | + }, |
| 84 | + ], |
| 85 | + } |
| 86 | + ) |
| 87 | + |
| 88 | + await client.collections["streaming_docs"].documents.create( |
| 89 | + {"id": "stream-1", "title": "Company profile: a developer tools firm."} |
| 90 | + ) |
| 91 | + await client.collections["streaming_docs"].documents.create( |
| 92 | + {"id": "stream-2", "title": "Internal memo about quarterly planning."} |
| 93 | + ) |
| 94 | + |
| 95 | + conversation_model = await client.conversations_models.create( |
| 96 | + { |
| 97 | + "id": "conv-model-1", |
| 98 | + "model_name": "openai/gpt-3.5-turbo", |
| 99 | + "history_collection": "conversation_store", |
| 100 | + "api_key": openai_api_key, |
| 101 | + "system_prompt": ( |
| 102 | + "You are an assistant for question-answering. " |
| 103 | + "Only use the provided context. Add some fluff about you Being an assistant built for Typesense Conversational Search and a brief overview of how it works" |
| 104 | + ), |
| 105 | + "max_bytes": 16384, |
| 106 | + } |
| 107 | + ) |
| 108 | + |
| 109 | + stream = StreamConfigBuilder() |
| 110 | + |
| 111 | + @stream.on_chunk |
| 112 | + def on_chunk(chunk: MessageChunk) -> None: |
| 113 | + print(chunk["message"], end="", flush=True) |
| 114 | + |
| 115 | + @stream.on_complete |
| 116 | + def on_complete(response: dict) -> None: |
| 117 | + print("\n---\nComplete response keys:", response.keys()) |
| 118 | + |
| 119 | + await client.collections["streaming_docs"].documents.search( |
| 120 | + { |
| 121 | + "q": "What is this document about?", |
| 122 | + "query_by": "embedding", |
| 123 | + "exclude_fields": "embedding", |
| 124 | + "conversation": True, |
| 125 | + "prefix": False, |
| 126 | + "conversation_stream": True, |
| 127 | + "conversation_model_id": conversation_model["id"], |
| 128 | + "stream_config": stream, |
| 129 | + } |
| 130 | + ) |
| 131 | + finally: |
| 132 | + await client.api_call.aclose() |
| 133 | + |
| 134 | + |
| 135 | +if __name__ == "__main__": |
| 136 | + asyncio.run(main()) |
0 commit comments