mirror of
https://github.com/vimagick/dockerfiles.git
synced 2024-11-28 09:08:50 +02:00
update
This commit is contained in:
parent
571d74d6d4
commit
9873448a9e
@ -1,5 +0,0 @@
|
||||
scrapyd-onbuild
|
||||
===============
|
||||
|
||||
Dockerfile for building an image that runs [scrapyd][1].
|
||||
|
@ -1 +0,0 @@
|
||||
psycopg2
|
@ -1,10 +1,13 @@
|
||||
#
|
||||
# Dockerfile for scrapyd-onbuild
|
||||
# Dockerfile for scrapyd
|
||||
#
|
||||
|
||||
FROM debian:jessie
|
||||
MAINTAINER kev <noreply@datageek.info>
|
||||
|
||||
ADD ./scrapyd.conf /etc/scrapyd/
|
||||
ADD ./requirements.txt /etc/scrapyd/
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y autoconf \
|
||||
build-essential \
|
||||
@ -36,6 +39,7 @@ RUN apt-get update \
|
||||
&& rm -rf libxslt \
|
||||
&& curl -sSL https://bootstrap.pypa.io/get-pip.py | python \
|
||||
&& pip install scrapy==0.24.6 scrapyd==1.0.1 \
|
||||
&& pip install -r /etc/scrapyd/requirements.txt \
|
||||
&& curl -sSL https://github.com/scrapy/scrapy/raw/master/extras/scrapy_bash_completion -o /etc/bash_completion.d/scrapy_bash_completion \
|
||||
&& echo 'source /etc/bash_completion.d/scrapy_bash_completion' >> /root/.bashrc \
|
||||
&& apt-get remove -y autoconf \
|
||||
@ -44,15 +48,11 @@ RUN apt-get update \
|
||||
libssl-dev \
|
||||
libtool \
|
||||
python-dev \
|
||||
&& apt-get autoremove -y
|
||||
&& apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ADD ./scrapyd.conf /etc/scrapyd/
|
||||
VOLUME /etc/scrapyd/ /var/lib/scrapyd/
|
||||
|
||||
EXPOSE 6800
|
||||
|
||||
ONBUILD ADD ./packages.txt /etc/scrapyd/
|
||||
ONBUILD RUN cat /etc/scrapyd/packages.txt | xargs apt-get install -y
|
||||
ONBUILD ADD ./requirements.txt /etc/scrapyd/
|
||||
ONBUILD RUN pip install -r /etc/scrapyd/requirements.txt
|
||||
|
||||
CMD ["scrapyd"]
|
15
scrapyd.old/README.md
Normal file
15
scrapyd.old/README.md
Normal file
@ -0,0 +1,15 @@
|
||||
docker-scrapyd
|
||||
==============
|
||||
|
||||
Dockerfile for building an image that runs [scrapyd][1].
|
||||
|
||||
## Building
|
||||
|
||||
$ docker build -t scrapyd .
|
||||
|
||||
## Running
|
||||
|
||||
$ docker run -p 6800:6800 scrapyd
|
||||
|
||||
[1]: https://github.com/scrapy/scrapyd
|
||||
|
21
scrapyd.old/requirements.txt
Normal file
21
scrapyd.old/requirements.txt
Normal file
@ -0,0 +1,21 @@
|
||||
--allow-all-external
|
||||
--allow-unverified jsonpath
|
||||
|
||||
# parser
|
||||
git+https://github.com/scrapy/scrapely
|
||||
jsonpath
|
||||
jsonschema
|
||||
pyparsing
|
||||
pyquery
|
||||
pyyaml
|
||||
|
||||
# database
|
||||
pymongo
|
||||
redis
|
||||
|
||||
# others
|
||||
chardet
|
||||
requests
|
||||
service_identity
|
||||
toolz
|
||||
|
@ -1,11 +1,11 @@
|
||||
#
|
||||
# Dockerfile for scrapyd
|
||||
# Dockerfile for scrapyd-onbuild
|
||||
#
|
||||
|
||||
FROM debian:jessie
|
||||
MAINTAINER kev <noreply@datageek.info>
|
||||
|
||||
ADD ./scrapyd.conf /etc/scrapyd/
|
||||
ADD ./packages.txt /etc/scrapyd/
|
||||
ADD ./requirements.txt /etc/scrapyd/
|
||||
|
||||
RUN apt-get update \
|
||||
@ -39,7 +39,9 @@ RUN apt-get update \
|
||||
&& rm -rf libxslt \
|
||||
&& curl -sSL https://bootstrap.pypa.io/get-pip.py | python \
|
||||
&& pip install scrapy==0.24.6 scrapyd==1.0.1 \
|
||||
&& xargs apt-get install -y < /etc/scrapyd/packages.txt \
|
||||
&& pip install -r /etc/scrapyd/requirements.txt \
|
||||
&& xargs apt-get remove -y < /etc/scrapyd/packages.txt \
|
||||
&& curl -sSL https://github.com/scrapy/scrapy/raw/master/extras/scrapy_bash_completion -o /etc/bash_completion.d/scrapy_bash_completion \
|
||||
&& echo 'source /etc/bash_completion.d/scrapy_bash_completion' >> /root/.bashrc \
|
||||
&& apt-get remove -y autoconf \
|
||||
@ -51,8 +53,8 @@ RUN apt-get update \
|
||||
&& apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ADD ./scrapyd.conf /etc/scrapyd/
|
||||
VOLUME /etc/scrapyd/ /var/lib/scrapyd/
|
||||
|
||||
EXPOSE 6800
|
||||
|
||||
CMD ["scrapyd"]
|
||||
|
@ -1,15 +1,13 @@
|
||||
docker-scrapyd
|
||||
==============
|
||||
scrapyd
|
||||
=======
|
||||
|
||||
Dockerfile for building an image that runs [scrapyd][1].
|
||||
|
||||
## Building
|
||||
Please use this image as base for your own project.
|
||||
|
||||
$ docker build -t scrapyd .
|
||||
You may edit these files:
|
||||
|
||||
## Running
|
||||
|
||||
$ docker run -p 6800:6800 scrapyd
|
||||
- `packages.txt` - dependencies for python requirements
|
||||
- `requirements.txt` - additional python packages to be installed
|
||||
|
||||
[1]: https://github.com/scrapy/scrapyd
|
||||
|
||||
|
@ -1,21 +1 @@
|
||||
--allow-all-external
|
||||
--allow-unverified jsonpath
|
||||
|
||||
# parser
|
||||
git+https://github.com/scrapy/scrapely
|
||||
jsonpath
|
||||
jsonschema
|
||||
pyparsing
|
||||
pyquery
|
||||
pyyaml
|
||||
|
||||
# database
|
||||
pymongo
|
||||
redis
|
||||
|
||||
# others
|
||||
chardet
|
||||
requests
|
||||
service_identity
|
||||
toolz
|
||||
|
||||
psycopg2
|
||||
|
Loading…
Reference in New Issue
Block a user