Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 6 additions & 7 deletions plugins/wasm-go/extensions/ai-proxy/provider/dify.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,13 @@ import (
"encoding/json"
"errors"
"fmt"
"net/http"
"strings"
"time"

"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
"github.com/alibaba/higress/plugins/wasm-go/pkg/log"
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
"net/http"
"strings"
)

const (
Expand Down Expand Up @@ -138,7 +136,7 @@ func (d *difyProvider) responseDify2OpenAI(ctx wrapper.HttpContext, response *Di
}
return &chatCompletionResponse{
Id: id,
Created: time.Now().UnixMilli() / 1000,
Created: response.CreatedAt,
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
SystemFingerprint: "",
Object: objectChatCompletion,
Expand Down Expand Up @@ -222,7 +220,7 @@ func (d *difyProvider) streamResponseDify2OpenAI(ctx wrapper.HttpContext, respon
}
return &chatCompletionResponse{
Id: id,
Created: time.Now().UnixMilli() / 1000,
Created: response.CreatedAt,
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
SystemFingerprint: "",
Object: objectChatCompletionChunk,
Expand Down Expand Up @@ -309,7 +307,7 @@ type DifyChatResponse struct {
ConversationId string `json:"conversation_id"`
MessageId string `json:"message_id"`
Answer string `json:"answer"`
CreateAt int64 `json:"create_at"`
CreatedAt int64 `json:"created_at"`
Data DifyData `json:"data"`
MetaData DifyMetaData `json:"metadata"`
}
Expand All @@ -319,6 +317,7 @@ type DifyChunkChatResponse struct {
ConversationId string `json:"conversation_id"`
MessageId string `json:"message_id"`
Answer string `json:"answer"`
CreatedAt int64 `json:"created_at"`
Data DifyData `json:"data"`
MetaData DifyMetaData `json:"metadata"`
}
2 changes: 1 addition & 1 deletion test/e2e/conformance/base/llm-mock.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ metadata:
spec:
containers:
- name: llm-mock
image: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/llm-mock:latest
image: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/llm-mock-server:latest
ports:
- containerPort: 3000
---
Expand Down
66 changes: 66 additions & 0 deletions test/e2e/conformance/tests/go-wasm-ai-proxy.go
Original file line number Diff line number Diff line change
Expand Up @@ -957,6 +957,72 @@ data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}}

data: [DONE]

`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "dify case 1: non-streaming completion request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.dify.ai",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好"}],"stream":false}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"USER: \n你好\n"},"finish_reason":"stop"}],"created":10,"model":"dify","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "dify case 2: streaming completion request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.dify.ai",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好"}],"stream":true}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeTextEventStream,
Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"U"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"S"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"E"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"R"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":":"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":" "}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"\n"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"你"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"好"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"\n"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}}

data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"USER: \n你好\n"},"finish_reason":"stop"}],"model":"dify","object":"chat.completion.chunk","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}

`),
},
},
Expand Down
29 changes: 29 additions & 0 deletions test/e2e/conformance/tests/go-wasm-ai-proxy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,25 @@ spec:
port:
number: 3000
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-dify
namespace: higress-conformance-ai-backend
spec:
ingressClassName: higress
rules:
- host: "api.dify.ai"
http:
paths:
- pathType: Prefix
path: "/"
backend:
service:
name: llm-mock-service
port:
number: 3000
---
apiVersion: extensions.higress.io/v1alpha1
kind: WasmPlugin
metadata:
Expand Down Expand Up @@ -493,4 +512,14 @@ spec:
type: zhipuai
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-zhipuai
- config:
provider:
apiTokens:
- fake_token
modelMapping:
'*': dify
type: dify
botType: Completion
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-dify
url: file:///opt/plugins/wasm-go/extensions/ai-proxy/plugin.wasm
Loading