|
| 1 | +# Copyright (C) 2024 Intel Corporation |
| 2 | +# SPDX-License-Identifier: Apache-2.0 |
| 3 | + |
| 4 | +import json |
| 5 | +import unittest |
| 6 | +from typing import Union |
| 7 | + |
| 8 | +import requests |
| 9 | +from fastapi import Request |
| 10 | + |
| 11 | +from comps import ( |
| 12 | + EmbedDoc, |
| 13 | + EmbedMultimodalDoc, |
| 14 | + LVMDoc, |
| 15 | + LVMSearchedMultimodalDoc, |
| 16 | + MultimodalDoc, |
| 17 | + MultimodalRAGWithVideosGateway, |
| 18 | + SearchedMultimodalDoc, |
| 19 | + ServiceOrchestrator, |
| 20 | + TextDoc, |
| 21 | + opea_microservices, |
| 22 | + register_microservice, |
| 23 | +) |
| 24 | + |
| 25 | + |
| 26 | +@register_microservice(name="mm_embedding", host="0.0.0.0", port=8083, endpoint="/v1/mm_embedding") |
| 27 | +async def mm_embedding_add(request: MultimodalDoc) -> EmbedDoc: |
| 28 | + req = request.model_dump_json() |
| 29 | + req_dict = json.loads(req) |
| 30 | + text = req_dict["text"] |
| 31 | + res = {} |
| 32 | + res["text"] = text |
| 33 | + res["embedding"] = [0.12, 0.45] |
| 34 | + return res |
| 35 | + |
| 36 | + |
| 37 | +@register_microservice(name="mm_retriever", host="0.0.0.0", port=8084, endpoint="/v1/mm_retriever") |
| 38 | +async def mm_retriever_add(request: EmbedMultimodalDoc) -> SearchedMultimodalDoc: |
| 39 | + req = request.model_dump_json() |
| 40 | + req_dict = json.loads(req) |
| 41 | + text = req_dict["text"] |
| 42 | + res = {} |
| 43 | + res["retrieved_docs"] = [] |
| 44 | + res["initial_query"] = text |
| 45 | + res["top_n"] = 1 |
| 46 | + res["metadata"] = [ |
| 47 | + { |
| 48 | + "b64_img_str": "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8/5+hnoEIwDiqkL4KAcT9GO0U4BxoAAAAAElFTkSuQmCC", |
| 49 | + "transcript_for_inference": "yellow image", |
| 50 | + } |
| 51 | + ] |
| 52 | + res["chat_template"] = "The caption of the image is: '{context}'. {question}" |
| 53 | + return res |
| 54 | + |
| 55 | + |
| 56 | +@register_microservice(name="lvm", host="0.0.0.0", port=8085, endpoint="/v1/lvm") |
| 57 | +async def lvm_add(request: Union[LVMDoc, LVMSearchedMultimodalDoc]) -> TextDoc: |
| 58 | + req = request.model_dump_json() |
| 59 | + req_dict = json.loads(req) |
| 60 | + if isinstance(request, LVMSearchedMultimodalDoc): |
| 61 | + print("request is the output of multimodal retriever") |
| 62 | + text = req_dict["initial_query"] |
| 63 | + text += "opea project!" |
| 64 | + |
| 65 | + else: |
| 66 | + print("request is from user.") |
| 67 | + text = req_dict["prompt"] |
| 68 | + text = f"<image>\nUSER: {text}\nASSISTANT:" |
| 69 | + |
| 70 | + res = {} |
| 71 | + res["text"] = text |
| 72 | + return res |
| 73 | + |
| 74 | + |
| 75 | +class TestServiceOrchestrator(unittest.IsolatedAsyncioTestCase): |
| 76 | + @classmethod |
| 77 | + def setUpClass(cls): |
| 78 | + cls.mm_embedding = opea_microservices["mm_embedding"] |
| 79 | + cls.mm_retriever = opea_microservices["mm_retriever"] |
| 80 | + cls.lvm = opea_microservices["lvm"] |
| 81 | + cls.mm_embedding.start() |
| 82 | + cls.mm_retriever.start() |
| 83 | + cls.lvm.start() |
| 84 | + |
| 85 | + cls.service_builder = ServiceOrchestrator() |
| 86 | + |
| 87 | + cls.service_builder.add(opea_microservices["mm_embedding"]).add(opea_microservices["mm_retriever"]).add( |
| 88 | + opea_microservices["lvm"] |
| 89 | + ) |
| 90 | + cls.service_builder.flow_to(cls.mm_embedding, cls.mm_retriever) |
| 91 | + cls.service_builder.flow_to(cls.mm_retriever, cls.lvm) |
| 92 | + |
| 93 | + cls.follow_up_query_service_builder = ServiceOrchestrator() |
| 94 | + cls.follow_up_query_service_builder.add(cls.lvm) |
| 95 | + |
| 96 | + cls.gateway = MultimodalRAGWithVideosGateway( |
| 97 | + cls.service_builder, cls.follow_up_query_service_builder, port=9898 |
| 98 | + ) |
| 99 | + |
| 100 | + @classmethod |
| 101 | + def tearDownClass(cls): |
| 102 | + cls.mm_embedding.stop() |
| 103 | + cls.mm_retriever.stop() |
| 104 | + cls.lvm.stop() |
| 105 | + cls.gateway.stop() |
| 106 | + |
| 107 | + async def test_service_builder_schedule(self): |
| 108 | + result_dict, _ = await self.service_builder.schedule(initial_inputs={"text": "hello, "}) |
| 109 | + self.assertEqual(result_dict[self.lvm.name]["text"], "hello, opea project!") |
| 110 | + |
| 111 | + async def test_follow_up_query_service_builder_schedule(self): |
| 112 | + result_dict, _ = await self.follow_up_query_service_builder.schedule( |
| 113 | + initial_inputs={"prompt": "chao, ", "image": "some image"} |
| 114 | + ) |
| 115 | + # print(result_dict) |
| 116 | + self.assertEqual(result_dict[self.lvm.name]["text"], "<image>\nUSER: chao, \nASSISTANT:") |
| 117 | + |
| 118 | + def test_multimodal_rag_with_videos_gateway(self): |
| 119 | + json_data = {"messages": "hello, "} |
| 120 | + response = requests.post("http://0.0.0.0:9898/v1/mmragvideoqna", json=json_data) |
| 121 | + response = response.json() |
| 122 | + self.assertEqual(response["choices"][-1]["message"]["content"], "hello, opea project!") |
| 123 | + |
| 124 | + def test_follow_up_mm_rag_with_videos_gateway(self): |
| 125 | + json_data = { |
| 126 | + "messages": [ |
| 127 | + { |
| 128 | + "role": "user", |
| 129 | + "content": [ |
| 130 | + {"type": "text", "text": "hello, "}, |
| 131 | + { |
| 132 | + "type": "image_url", |
| 133 | + "image_url": {"url": "https://www.ilankelman.org/stopsigns/australia.jpg"}, |
| 134 | + }, |
| 135 | + ], |
| 136 | + }, |
| 137 | + {"role": "assistant", "content": "opea project! "}, |
| 138 | + {"role": "user", "content": "chao, "}, |
| 139 | + ], |
| 140 | + "max_tokens": 300, |
| 141 | + } |
| 142 | + response = requests.post("http://0.0.0.0:9898/v1/mmragvideoqna", json=json_data) |
| 143 | + response = response.json() |
| 144 | + self.assertEqual( |
| 145 | + response["choices"][-1]["message"]["content"], |
| 146 | + "<image>\nUSER: hello, \nASSISTANT: opea project! \nUSER: chao, \n\nASSISTANT:", |
| 147 | + ) |
| 148 | + |
| 149 | + def test_handle_message(self): |
| 150 | + messages = [ |
| 151 | + { |
| 152 | + "role": "user", |
| 153 | + "content": [ |
| 154 | + {"type": "text", "text": "hello, "}, |
| 155 | + { |
| 156 | + "type": "image_url", |
| 157 | + "image_url": {"url": "https://www.ilankelman.org/stopsigns/australia.jpg"}, |
| 158 | + }, |
| 159 | + ], |
| 160 | + }, |
| 161 | + {"role": "assistant", "content": "opea project! "}, |
| 162 | + {"role": "user", "content": "chao, "}, |
| 163 | + ] |
| 164 | + prompt, images = self.gateway._handle_message(messages) |
| 165 | + self.assertEqual(prompt, "hello, \nASSISTANT: opea project! \nUSER: chao, \n") |
| 166 | + |
| 167 | + def test_handle_message_with_system_prompt(self): |
| 168 | + messages = [ |
| 169 | + {"role": "system", "content": "System Prompt"}, |
| 170 | + { |
| 171 | + "role": "user", |
| 172 | + "content": [ |
| 173 | + {"type": "text", "text": "hello, "}, |
| 174 | + { |
| 175 | + "type": "image_url", |
| 176 | + "image_url": {"url": "https://www.ilankelman.org/stopsigns/australia.jpg"}, |
| 177 | + }, |
| 178 | + ], |
| 179 | + }, |
| 180 | + {"role": "assistant", "content": "opea project! "}, |
| 181 | + {"role": "user", "content": "chao, "}, |
| 182 | + ] |
| 183 | + prompt, images = self.gateway._handle_message(messages) |
| 184 | + self.assertEqual(prompt, "System Prompt\nhello, \nASSISTANT: opea project! \nUSER: chao, \n") |
| 185 | + |
| 186 | + async def test_handle_request(self): |
| 187 | + json_data = { |
| 188 | + "messages": [ |
| 189 | + { |
| 190 | + "role": "user", |
| 191 | + "content": [ |
| 192 | + {"type": "text", "text": "hello, "}, |
| 193 | + { |
| 194 | + "type": "image_url", |
| 195 | + "image_url": {"url": "https://www.ilankelman.org/stopsigns/australia.jpg"}, |
| 196 | + }, |
| 197 | + ], |
| 198 | + }, |
| 199 | + {"role": "assistant", "content": "opea project! "}, |
| 200 | + {"role": "user", "content": "chao, "}, |
| 201 | + ], |
| 202 | + "max_tokens": 300, |
| 203 | + } |
| 204 | + mock_request = Request(scope={"type": "http"}) |
| 205 | + mock_request._json = json_data |
| 206 | + res = await self.gateway.handle_request(mock_request) |
| 207 | + res = json.loads(res.json()) |
| 208 | + self.assertEqual( |
| 209 | + res["choices"][-1]["message"]["content"], |
| 210 | + "<image>\nUSER: hello, \nASSISTANT: opea project! \nUSER: chao, \n\nASSISTANT:", |
| 211 | + ) |
| 212 | + |
| 213 | + |
| 214 | +if __name__ == "__main__": |
| 215 | + unittest.main() |
0 commit comments