mirror of
https://github.com/vimagick/dockerfiles.git
synced 2024-11-24 08:52:15 +02:00
add tensorflow/serving
This commit is contained in:
parent
a4a15a0973
commit
53c61b65f2
@ -428,6 +428,8 @@ A collection of delicious docker recipes.
|
||||
- [x] jira
|
||||
- [x] strapi/strapi
|
||||
- [x] amancevice/superset
|
||||
- [x] tensorflow
|
||||
- [x] serving
|
||||
- [x] tile38/tile38
|
||||
- [x] traefik
|
||||
- [x] trinodb/trino
|
||||
|
@ -6,7 +6,6 @@ and distribute data.
|
||||
|
||||
![](https://nifi.apache.org/assets/images/flow-th.png)
|
||||
|
||||
|
||||
## up and running
|
||||
|
||||
```bash
|
||||
@ -18,4 +17,6 @@ $ curl http://127.0.0.1:8080/nifi/
|
||||
$ curl http://127.0.0.1:18080/nifi-registry/
|
||||
```
|
||||
|
||||
> :warning: upgrade seems very hard
|
||||
|
||||
[1]: https://nifi.apache.org
|
||||
|
@ -1,25 +1,28 @@
|
||||
nifi:
|
||||
image: apache/nifi:1.11.4
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "8082:8082"
|
||||
- "10000:10000"
|
||||
environment:
|
||||
- NIFI_WEB_HTTP_PORT=8080
|
||||
- NIFI_CLUSTER_IS_NODE=true
|
||||
- NIFI_CLUSTER_NODE_PROTOCOL_PORT=8082
|
||||
- NIFI_REMOTE_INPUT_SOCKET_PORT=10000
|
||||
- NIFI_ZK_CONNECT_STRING=zookeeper1:2181,zookeeper2:2181,zookeeper3:2181
|
||||
- NIFI_ELECTION_MAX_WAIT=1 min
|
||||
- NIFI_ELECTION_MAX_CANDIDATES=2
|
||||
- NIFI_JVM_HEAP_INIT=512m
|
||||
- NIFI_JVM_HEAP_MAX=1g
|
||||
extra_hosts:
|
||||
- zookeeper1:10.0.0.21
|
||||
- zookeeper2:10.0.0.22
|
||||
- zookeeper3:10.0.0.23
|
||||
# nifi1:10.0.0.11
|
||||
- nifi2:10.0.0.12
|
||||
- nifi3:10.0.0.13
|
||||
hostname: nifi1
|
||||
restart: unless-stopped
|
||||
version: "3.8"
|
||||
services:
|
||||
nifi:
|
||||
image: apache/nifi:1.15.0
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "8082:8082"
|
||||
- "10000:10000"
|
||||
environment:
|
||||
- NIFI_WEB_HTTP_PORT=8080
|
||||
- NIFI_WEB_PROXY_CONTEXT_PATH=// # FIX BUG
|
||||
- NIFI_CLUSTER_IS_NODE=true
|
||||
- NIFI_CLUSTER_NODE_PROTOCOL_PORT=8082
|
||||
- NIFI_REMOTE_INPUT_SOCKET_PORT=10000
|
||||
- NIFI_ZK_CONNECT_STRING=zookeeper1:2181,zookeeper2:2181,zookeeper3:2181
|
||||
- NIFI_ELECTION_MAX_WAIT=1 min
|
||||
- NIFI_ELECTION_MAX_CANDIDATES=2
|
||||
- NIFI_JVM_HEAP_INIT=512m
|
||||
- NIFI_JVM_HEAP_MAX=1g
|
||||
extra_hosts:
|
||||
- zookeeper1:10.0.0.21
|
||||
- zookeeper2:10.0.0.22
|
||||
- zookeeper3:10.0.0.23
|
||||
# nifi1:10.0.0.11
|
||||
- nifi2:10.0.0.12
|
||||
- nifi3:10.0.0.13
|
||||
hostname: nifi1
|
||||
restart: unless-stopped
|
||||
|
@ -3,7 +3,7 @@ version: "3.8"
|
||||
services:
|
||||
|
||||
nifi:
|
||||
image: apache/nifi:1.12.1
|
||||
image: apache/nifi:1.15.0
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
@ -16,17 +16,19 @@ services:
|
||||
- ./data/nifi/logs:/opt/nifi/nifi-current/logs
|
||||
environment:
|
||||
- NIFI_WEB_HTTP_PORT=8080
|
||||
- NIFI_WEB_PROXY_CONTEXT_PATH=// # FIX BUG
|
||||
- NIFI_JVM_HEAP_INIT=8g
|
||||
- NIFI_JVM_HEAP_MAX=16g
|
||||
hostname: nifi
|
||||
restart: unless-stopped
|
||||
|
||||
registry:
|
||||
image: apache/nifi-registry:0.8.0
|
||||
image: apache/nifi-registry:1.15.0
|
||||
ports:
|
||||
- "18080:18080"
|
||||
volumes:
|
||||
- ./data/registry/database:/opt/nifi-registry/nifi-registry-current/database
|
||||
- ./data/registry/flow_storage:/opt/nifi-registry/nifi-registry-current/flow_storage
|
||||
- ./data/registry/logs:/opt/nifi-registry/nifi-registry-current/logs
|
||||
hostname: registry
|
||||
restart: unless-stopped
|
||||
|
16
tensorflow/serving/README.md
Normal file
16
tensorflow/serving/README.md
Normal file
@ -0,0 +1,16 @@
|
||||
tenserflow/serving
|
||||
==================
|
||||
|
||||
[TensorFlow Serving][1] is a flexible, high-performance serving system for machine learning models, designed for production environments.
|
||||
|
||||
## up and running
|
||||
|
||||
```bash
|
||||
$ mkdir -p data
|
||||
$ curl -sSL https://github.com/tensorflow/serving/archive/refs/heads/master.tar.gz | tar xz -C data serving/tensorflow_serving/servables/tensorflow/testdata --strip 5
|
||||
$ docker-compose up -d
|
||||
$ curl -X POST http://localhost:8501/v1/models/half_plus_two:predict -d '{"instances": [1.0, 2.0, 5.0]}'
|
||||
{ "predictions": [2.5, 3.0, 4.5] }
|
||||
```
|
||||
|
||||
[1]: https://github.com/tensorflow/serving
|
12
tensorflow/serving/docker-compose.yml
Normal file
12
tensorflow/serving/docker-compose.yml
Normal file
@ -0,0 +1,12 @@
|
||||
version: "3.8"
|
||||
services:
|
||||
serving:
|
||||
image: tensorflow/serving
|
||||
ports:
|
||||
- "8501:8501"
|
||||
volumes:
|
||||
- ./data/saved_model_half_plus_two_cpu:/models/half_plus_two
|
||||
environment:
|
||||
- MODEL_BASE_PATH=/models
|
||||
- MODEL_NAME=half_plus_two
|
||||
restart: unless-stopped
|
Loading…
Reference in New Issue
Block a user