Skip to content

Commit a090178

Browse files
authored
[Serving] Support multiple languages. (#312)
1 parent c7ba50c commit a090178

File tree

7 files changed

+468
-0
lines changed

7 files changed

+468
-0
lines changed

serving/sdk/go/README.md

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
# DeepRec Processor Go Example
2+
3+
## 要求
4+
5+
- Go
6+
- Protobuf v3.6.1
7+
8+
9+
## 使用
10+
11+
1. 根据proto文件生成对应go文件
12+
13+
```sh
14+
mkdir tensorflow_eas
15+
go mod init demo
16+
```
17+
18+
根据 [Protobuf Docs](https://github.com/protocolbuffers/protobuf/tree/48cb18e5c419ddd23d9badcfe4e9df7bde1979b2#protocol-compiler-installation) 安装 protocol compiler
19+
20+
安装 protocol compiler plugins for Go
21+
```sh
22+
go install github.com/golang/protobuf/protoc-gen-go@v1.2.0
23+
```
24+
25+
添加$GOPATH/bin到PATH环境变量中
26+
27+
根据proto文件生成go文件
28+
```sh
29+
protoc --go_out=../go/tensorflow_eas -I../../processor/serving/ predict.proto
30+
```
31+
32+
注意:predict.proto文件位于DeepRec/serving/processor/serving
33+
34+
2. 安装protobuf go module
35+
36+
```sh
37+
go get github.com/golang/protobuf/proto@v1.2.0
38+
```
39+
40+
3. 生成 DeepRec Serving Processor
41+
42+
需要```libserving_processor.so```
43+
编译详见[https://github.com/alibaba/DeepRec](https://github.com/alibaba/DeepRec)项目首页“How to Build serving library”部分。
44+
45+
4. 生成 demo checkpoint 和 savedmodel
46+
47+
```sh
48+
python simple_model.py --saved_model_dir=xxx --checkpoint_dir=xxx
49+
```
50+
如果没有设置saved_model_dir,默认路径为 '/tmp/saved_model'
51+
如果没有设置checkpoint_dir,默认路径为 '/tmp/checkpoint/1'
52+
```sh
53+
python simple_model.py
54+
```
55+
56+
注意:simple_model.py文件位于DeepRec/serving/processor/tests/end2end/
57+
58+
5. 设置demo.go中的`saved_model_dir` and `checkpoint_dir`
59+
60+
```go
61+
var modelConfig = []byte(`{
62+
...
63+
"checkpoint_dir": "/tmp/checkpoint/",
64+
"savedmodel_dir": "/tmp/saved_model/"
65+
} `)
66+
```
67+
注意:这里的 checkpoint_dir 应该是 checkpoint dir 的父目录,
68+
比如 '/tmp/checkpoint/1',设置 checkpoint_dir 为 '/tmp/checkpoint'
69+
70+
6. 编译和运行
71+
72+
修改demo.go中LDFLAGS为processor的路径
73+
```sh
74+
go build .
75+
LD_PRELOAD=/path/to/libserving_processor.so ./demo
76+
```

serving/sdk/go/demo.go

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
package main
2+
3+
/*
4+
#cgo CFLAGS: -I${SRCDIR}
5+
#cgo LDFLAGS: -L${SRCDIR} -lserving_processor
6+
#include <stdlib.h>
7+
#include "processor.h"
8+
*/
9+
import "C"
10+
import (
11+
tensorflow_eas "demo/tensorflow_eas"
12+
fmt "fmt"
13+
proto "github.com/golang/protobuf/proto"
14+
unsafe "unsafe"
15+
)
16+
17+
var modelConfig = []byte(`{
18+
"omp_num_threads": 4,
19+
"kmp_blocktime": 0,
20+
"feature_store_type": "memory",
21+
"serialize_protocol": "protobuf",
22+
"inter_op_parallelism_threads": 10,
23+
"intra_op_parallelism_threads": 10,
24+
"init_timeout_minutes": 1,
25+
"signature_name": "serving_default",
26+
"read_thread_num": 3,
27+
"update_thread_num": 2,
28+
"model_store_type": "local",
29+
"checkpoint_dir": "/tmp/checkpoint/",
30+
"savedmodel_dir": "/tmp/saved_model/"
31+
} `)
32+
33+
func main() {
34+
// Load shared library
35+
modelEntry := []byte(".")
36+
state := C.int(0)
37+
model := C.initialize((*C.char)(unsafe.Pointer(&modelEntry[0])), (*C.char)(unsafe.Pointer(&modelConfig[0])), &state)
38+
defer C.free(unsafe.Pointer(model))
39+
if int(state) == -1 {
40+
println("initialize error")
41+
}
42+
43+
// input type: float
44+
dtype := tensorflow_eas.ArrayDataType_DT_FLOAT
45+
// input shape: [1, 1]
46+
var arrayShape tensorflow_eas.ArrayShape
47+
arrayShape.Dim = append(arrayShape.Dim, 1)
48+
arrayShape.Dim = append(arrayShape.Dim, 1)
49+
// input array
50+
var input tensorflow_eas.ArrayProto
51+
input.FloatVal = append(input.FloatVal, 1.0)
52+
input.Dtype = dtype
53+
input.ArrayShape = &arrayShape
54+
55+
// Predictrequest
56+
var req tensorflow_eas.PredictRequest
57+
req.SignatureName = "serving_default"
58+
req.OutputFilter = append(req.OutputFilter, "y:0")
59+
req.Inputs = make(map[string]*tensorflow_eas.ArrayProto)
60+
req.Inputs["x:0"] = &input
61+
buffer, err := proto.Marshal(&req)
62+
if err != nil {
63+
println(err.Error())
64+
}
65+
size := C.int(proto.Size(&req))
66+
67+
// do process
68+
output := unsafe.Pointer(nil)
69+
defer C.free(output)
70+
outputSize := C.int(0)
71+
state = C.process(model, unsafe.Pointer(&buffer[0]), size, &output, &outputSize)
72+
73+
// parse response
74+
outputString := C.GoBytes(output, outputSize)
75+
var resp tensorflow_eas.PredictResponse
76+
err = proto.Unmarshal(outputString, &resp)
77+
if err != nil {
78+
println(err.Error())
79+
}
80+
fmt.Printf("process returned state: %d, response: %s", int(state), resp.Outputs)
81+
82+
}

serving/sdk/go/processor.h

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#ifndef SERVING_PROCESSOR_SERVING_TF_PROCESSOR_H
2+
#define SERVING_PROCESSOR_SERVING_TF_PROCESSOR_H
3+
4+
#ifdef __cplusplus
5+
extern "C" {
6+
#endif
7+
void* initialize(const char* model_entry, const char* model_config, int* state);
8+
int process(void* model_buf, const void* input_data, int input_size,
9+
void** output_data, int* output_size);
10+
int batch_process(void* model_buf, const void* input_data[], int* input_size,
11+
void* output_data[], int* output_size);
12+
int get_serving_model_info(void* model_buf, void** output_data, int* output_size);
13+
#ifdef __cplusplus
14+
}
15+
#endif
16+
#endif

serving/sdk/java/README.md

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
# DeepRec Processor Java Example
2+
3+
## 要求
4+
5+
- Java Environment
6+
- Protobuf v3.6.1
7+
8+
9+
## 使用
10+
11+
1. 根据proto文件生成对应java文件
12+
13+
```sh
14+
mkdir bin lib
15+
```
16+
17+
lib目录下需要```protobuf-java.jar```,根据 [Protobuf Docs](https://github.com/protocolbuffers/protobuf/tree/48cb18e5c419ddd23d9badcfe4e9df7bde1979b2/java#build-from-source) 生成
18+
19+
根据proto文件生成java文件
20+
```sh
21+
protoc --java_out=../java -I../../processor/serving/ predict.proto
22+
```
23+
24+
注意:predict.proto文件位于DeepRec/serving/processor/serving
25+
26+
2. 下载JNA
27+
28+
lib目录下需要```jna.jar```,访问 [JNA GitHub](https://github.com/java-native-access/jna) 下载
29+
30+
3. 生成 DeepRec Serving Processor
31+
32+
需要```libserving_processor.so```
33+
编译详见[https://github.com/alibaba/DeepRec](https://github.com/alibaba/DeepRec)项目首页“How to Build serving library”部分。
34+
35+
4. 生成 demo checkpoint 和 savedmodel
36+
37+
```sh
38+
python simple_model.py --saved_model_dir=xxx --checkpoint_dir=xxx
39+
```
40+
如果没有设置saved_model_dir,默认路径为 '/tmp/saved_model'
41+
如果没有设置checkpoint_dir,默认路径为 '/tmp/checkpoint/1'
42+
```sh
43+
python simple_model.py
44+
```
45+
46+
注意:simple_model.py文件位于DeepRec/serving/processor/tests/end2end/
47+
48+
5. 设置Demo.java中的`saved_model_dir` and `checkpoint_dir`
49+
50+
```java
51+
public static String modelConfig =
52+
... +
53+
"\"checkpoint_dir\": \"/tmp/checkpoint/\"," +
54+
"\"savedmodel_dir\": \"/tmp/saved_model/\"}";
55+
```
56+
注意:这里的 checkpoint_dir 应该是 checkpoint dir 的父目录,
57+
比如 '/tmp/checkpoint/1',设置 checkpoint_dir 为 '/tmp/checkpoint'
58+
59+
6. 编译和运行
60+
61+
```sh
62+
javac -d bin/ src/main/java/Demo.java tensorflow/eas/Predict.java -cp lib/\*
63+
```
64+
65+
```sh
66+
LD_PRELOAD=/path/to/libserving_processor.so java -cp bin/:lib/\* src.main.java.Demo tensorflow.eas.Predict
67+
```
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
package src.main.java;
2+
3+
import com.sun.jna.Library;
4+
import com.sun.jna.Native;
5+
import com.sun.jna.Pointer;
6+
import com.sun.jna.ptr.PointerByReference;
7+
8+
import tensorflow.eas.Predict;
9+
10+
class Demo {
11+
12+
public static String modelConfig =
13+
"{\"omp_num_threads\": 4," +
14+
"\"kmp_blocktime\": 0," +
15+
"\"feature_store_type\": \"memory\"," +
16+
"\"serialize_protocol\": \"protobuf\"," +
17+
"\"inter_op_parallelism_threads\": 10," +
18+
"\"intra_op_parallelism_threads\": 10," +
19+
"\"init_timeout_minutes\": 1," +
20+
"\"signature_name\": \"serving_default\"," +
21+
"\"read_thread_num\": 3," +
22+
"\"update_thread_num\": 2," +
23+
"\"model_store_type\": \"local\"," +
24+
"\"checkpoint_dir\": \"/tmp/checkpoint/\"," +
25+
"\"savedmodel_dir\": \"/tmp/saved_model/\"}";
26+
27+
// Load shared library via JNA
28+
public interface Processor extends Library
29+
{
30+
Processor INSTANCE = (Processor) Native.load("serving_processor", Processor.class);
31+
32+
// Define shared library function prototype
33+
public Pointer initialize(String modelEntry, String modelConfig, int[] state);
34+
35+
public int process(Pointer model, byte[] buffer, int size, PointerByReference outputData, int[] outputSize);
36+
}
37+
38+
public static void main(String[] args) {
39+
Demo demo = new Demo();
40+
String modelEntry = "";
41+
int[] state = {0};
42+
Pointer model = Processor.INSTANCE.initialize(modelEntry, modelConfig, state);
43+
if (state[0] == -1) {
44+
System.err.println("initialize error");
45+
}
46+
47+
// input type: float
48+
Predict.ArrayDataType dtype = Predict.ArrayDataType.DT_FLOAT;
49+
// input shape: [1, 1]
50+
Predict.ArrayShape arrayShape =
51+
Predict.ArrayShape.newBuilder()
52+
.addDim(1)
53+
.addDim(1)
54+
.build();
55+
// input array
56+
Predict.ArrayProto input =
57+
Predict.ArrayProto.newBuilder()
58+
.addFloatVal((float) 1.0)
59+
.setDtype(dtype)
60+
.setArrayShape(arrayShape)
61+
.build();
62+
// PredictRequest
63+
Predict.PredictRequest req =
64+
Predict.PredictRequest.newBuilder()
65+
.setSignatureName("serving_default")
66+
.addOutputFilter("y:0")
67+
.putInputs("x:0", input)
68+
.build();
69+
byte[] buffer = req.toByteArray();
70+
int size = req.getSerializedSize();
71+
72+
// do process
73+
PointerByReference output = new PointerByReference();
74+
int[] outputSize = {0};
75+
state[0] = Processor.INSTANCE.process(model, buffer, size, output, outputSize);
76+
77+
// parse response
78+
byte[] outputString = output.getValue().getByteArray(0, outputSize[0]);
79+
String s = new String(outputString);
80+
try {
81+
Predict.PredictResponse resp =
82+
Predict.PredictResponse.newBuilder()
83+
.mergeFrom(outputString)
84+
.build();
85+
System.out.println(resp.toString());
86+
} catch (Exception e) {
87+
System.err.println("parse response error");
88+
}
89+
90+
}
91+
}

serving/sdk/python/README.md

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# DeepRec Processor Python Example
2+
3+
## 要求
4+
5+
- Python 3
6+
- Protobuf v3.6.1
7+
8+
9+
## 使用
10+
11+
1. 根据proto文件生成对应py文件
12+
13+
根据 [Protobuf Docs](https://github.com/protocolbuffers/protobuf/tree/48cb18e5c419ddd23d9badcfe4e9df7bde1979b2/python#installation) 安装 Python Protocol Buffers runtime library
14+
```sh
15+
protoc --python_out=../python -I../../processor/serving/ predict.proto
16+
```
17+
18+
注意:predict.proto文件位于DeepRec/serving/processor/serving
19+
20+
2. 生成 DeepRec Serving Processor
21+
22+
需要```libserving_processor.so```
23+
编译详见[https://github.com/alibaba/DeepRec](https://github.com/alibaba/DeepRec)项目首页“How to Build serving library”部分。
24+
25+
3. 生成 demo checkpoint 和 savedmodel
26+
27+
```sh
28+
python simple_model.py --saved_model_dir=xxx --checkpoint_dir=xxx
29+
```
30+
如果没有设置saved_model_dir,默认路径为 '/tmp/saved_model'
31+
如果没有设置checkpoint_dir,默认路径为 '/tmp/checkpoint/1'
32+
```sh
33+
python simple_model.py
34+
```
35+
36+
注意:simple_model.py文件位于DeepRec/serving/processor/tests/end2end/
37+
38+
4. 设置demo.py中的`saved_model_dir` and `checkpoint_dir`
39+
40+
```python
41+
model_config = '{ \
42+
... \
43+
"checkpoint_dir": "/tmp/checkpoint/", \
44+
"savedmodel_dir": "/tmp/saved_model/" \
45+
}'
46+
```
47+
注意:这里的 checkpoint_dir 应该是 checkpoint dir 的父目录,
48+
比如 '/tmp/checkpoint/1',设置 checkpoint_dir 为 '/tmp/checkpoint'
49+
50+
5. 运行
51+
52+
```sh
53+
LD_PRELOAD=/path/to/libserving_processor.so python demo.py
54+
```

0 commit comments

Comments
 (0)