diff --git a/builder/Dockerfile b/builder/Dockerfile new file mode 100644 index 00000000..ae3af75b --- /dev/null +++ b/builder/Dockerfile @@ -0,0 +1,56 @@ +FROM python:3.5.7-slim-buster +ENV PYTHONUNBUFFERED 1 +ENV DEBIAN_FRONTEND noninteractive +ENV MESSAGELEVEL QUIET + +ARG ENABLE_REMOTEBUILD=true +ARG ENABLE_PAM=true + +################################################################################ +# CORE +# Do not modify this section + +RUN apt-get update && apt-get install -y \ + net-tools \ + build-essential \ + libssl-dev \ + uuid-dev \ + libgpgme11-dev \ + squashfs-tools \ + libseccomp-dev \ + wget \ + pkg-config \ + git \ + cryptsetup \ + debootstrap \ + yum + +# Install websocket requisite for remote build +RUN export VERSION=1.13.4 OS=linux ARCH=amd64 && \ + wget https://dl.google.com/go/go$VERSION.$OS-$ARCH.tar.gz && \ + tar -C /usr/local -xzvf go$VERSION.$OS-$ARCH.tar.gz && \ + rm go$VERSION.$OS-$ARCH.tar.gz; \ +export VERSION=3.5.0 && \ + wget https://github.com/sylabs/singularity/releases/download/v${VERSION}/singularity-${VERSION}.tar.gz && \ + tar -xzf singularity-${VERSION}.tar.gz -C /tmp && \ + rm singularity-${VERSION}.tar.gz; \ +cd /tmp/singularity && export PATH=/usr/local/go/bin:$PATH && ./mconfig && \ + make -C builddir && \ + make -C builddir install + +# Install Python requirements out of /tmp so not triggered if other contents of /code change +ADD builder/requirements.txt /tmp/requirements.txt +RUN pip install --upgrade pip +RUN pip install -r /tmp/requirements.txt +# Install PAM Authentication (uncomment if wanted) +RUN if $ENABLE_PAM; then pip install django-pam ; fi; + +ADD . /code/ + +WORKDIR /code + +RUN apt-get autoremove -y +RUN apt-get clean +RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +EXPOSE 3032 diff --git a/builder/docker-compose.yml b/builder/docker-compose.yml new file mode 100644 index 00000000..a06adbca --- /dev/null +++ b/builder/docker-compose.yml @@ -0,0 +1,72 @@ +db: + image: postgres + +uwsgi: + restart: always + image: quay.io/vanessa/sregistry + volumes: + - .:/code + - ./static:/var/www/static + - ./images:/var/www/images + # uncomment for PAM auth + #- /etc/passwd:/etc/passwd + #- /etc/shadow:/etc/shadow + links: + - redis + - db + +nginx: + restart: always + image: quay.io/vanessa/sregistry_nginx + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx.conf:/etc/nginx/conf.d/default.conf:ro + - ./uwsgi_params.par:/etc/nginx/uwsgi_params.par:ro + - /etc/ssl/certs:/etc/ssl/certs:ro + - /etc/ssl/private:/etc/ssl/private:ro + volumes_from: + - uwsgi + links: + - builder + - uwsgi + - db + +redis: + restart: always + image: redis:latest + +scheduler: + image: quay.io/vanessa/sregistry + command: python /code/manage.py rqscheduler + volumes: + - .:/code + volumes_from: + - uwsgi + links: + - redis + - db + +worker: + image: quay.io/vanessa/sregistry + command: python /code/manage.py rqworker default + volumes: + - .:/code + volumes_from: + - uwsgi + links: + - redis + - db + +# uncomment for remote build +builder: + image: quay.io/vanessa/sregistry_builder + command: daphne --root-path "/v1/build-ws" -b 0.0.0.0 -p 3032 --proxy-headers shub.plugins.remote_build.asgi:application + volumes: + - .:/code + volumes_from: + - uwsgi + links: + - redis + - db diff --git a/builder/nginx.conf.ws b/builder/nginx.conf.ws new file mode 100644 index 00000000..fed79e49 --- /dev/null +++ b/builder/nginx.conf.ws @@ -0,0 +1,81 @@ +upstream websocket { + ip_hash; + server builder:3032 fail_timeout=0; +} + +server { + listen *:80; + server_name localhost; + + client_max_body_size 10024M; + client_body_buffer_size 10024M; + client_body_timeout 120; + + add_header X-Clacks-Overhead "GNU Terry Pratchett"; + add_header X-Clacks-Overhead "GNU Terry Pratchet"; + add_header Access-Control-Allow-Origin *; + add_header 'Access-Control-Allow-Credentials' 'true'; + add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; + add_header 'Access-Control-Allow-Headers' 'Authorization,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; + + location /images { + alias /var/www/images; + } + + location ~* \.(php|aspx|myadmin|asp)$ { + deny all; + } + + location / { + include /etc/nginx/uwsgi_params.par; + uwsgi_pass uwsgi:3031; + uwsgi_max_temp_file_size 10024m; + } + + location /static { + alias /var/www/static; + } + + # Upload form should be submitted to this location + location /upload { + + # Pass altered request body to this location + upload_pass /api/uploads/complete/; + + # Store files to this directory + # The directory is hashed, subdirectories 0 1 2 3 4 5 6 7 8 9 should exist + upload_store /var/www/images/_upload 1; + upload_store_access user:rw group:rw all:rw; + + # Set specified fields in request body + upload_set_form_field $upload_field_name.name "$upload_file_name"; + upload_set_form_field $upload_field_name.content_type "$upload_content_type"; + upload_set_form_field $upload_field_name.path "$upload_tmp_path"; + + # Inform backend about hash and size of a file + upload_aggregate_form_field "$upload_field_name.md5" "$upload_file_md5"; + upload_aggregate_form_field "$upload_field_name.size" "$upload_file_size"; + + upload_pass_form_field "^submit$|^description$"; + upload_pass_form_field "^SREGISTRY_EVENT$"; + upload_pass_form_field "^collection$"; + upload_pass_form_field "^name$"; + upload_pass_form_field "^tag$"; + upload_cleanup 400-599; + + } + + location /v1/build-ws/ { + proxy_pass http://websocket; # daphne (ASGI) listening on port 3032 + proxy_http_version 1.1; + proxy_read_timeout 86400; + proxy_redirect off; + + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + } +} diff --git a/builder/nginx.conf.wss b/builder/nginx.conf.wss new file mode 100644 index 00000000..0dbabb0a --- /dev/null +++ b/builder/nginx.conf.wss @@ -0,0 +1,163 @@ +upstream websocket { + ip_hash; + server builder:3032 fail_timeout=0; +} + +server { + listen *:80; + server_name localhost; + + client_max_body_size 10024M; + client_body_buffer_size 10024M; + client_body_timeout 120; + + add_header X-Clacks-Overhead "GNU Terry Pratchett"; + add_header X-Clacks-Overhead "GNU Terry Pratchet"; + add_header Access-Control-Allow-Origin *; + add_header 'Access-Control-Allow-Credentials' 'true'; + add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; + add_header 'Access-Control-Allow-Headers' 'Authorization,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; + + location /images { + alias /var/www/images; + } + + location ~* \.(php|aspx|myadmin|asp)$ { + deny all; + } + + location / { + include /etc/nginx/uwsgi_params.par; + uwsgi_pass uwsgi:3031; + uwsgi_max_temp_file_size 10024m; + } + + location /static { + alias /var/www/static; + } + + # Upload form should be submitted to this location + location /upload { + + # Pass altered request body to this location + upload_pass /api/uploads/complete/; + + # Store files to this directory + # The directory is hashed, subdirectories 0 1 2 3 4 5 6 7 8 9 should exist + upload_store /var/www/images/_upload 1; + upload_store_access user:rw group:rw all:rw; + + # Set specified fields in request body + upload_set_form_field $upload_field_name.name "$upload_file_name"; + upload_set_form_field $upload_field_name.content_type "$upload_content_type"; + upload_set_form_field $upload_field_name.path "$upload_tmp_path"; + + # Inform backend about hash and size of a file + upload_aggregate_form_field "$upload_field_name.md5" "$upload_file_md5"; + upload_aggregate_form_field "$upload_field_name.size" "$upload_file_size"; + + upload_pass_form_field "^submit$|^description$"; + upload_pass_form_field "^SREGISTRY_EVENT$"; + upload_pass_form_field "^collection$"; + upload_pass_form_field "^name$"; + upload_pass_form_field "^tag$"; + upload_cleanup 400-599; + + } + + location /v1/build-ws/ { + proxy_pass http://websocket; # daphne (ASGI) listening on port 3032 + proxy_http_version 1.1; + proxy_read_timeout 86400; + proxy_redirect off; + + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + } +} + +server { + + listen 443; + server_name localhost; + + root html; + client_max_body_size 10024M; + client_body_buffer_size 10024M; + + ssl on; + ssl_certificate /etc/ssl/certs/chained.pem; + ssl_certificate_key /etc/ssl/private/domain.key; + ssl_session_timeout 5m; + ssl_protocols TLSv1 TLSv1.1 TLSv1.2; + ssl_ciphers ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA; + ssl_session_cache shared:SSL:50m; + ssl_dhparam /etc/ssl/certs/dhparam.pem; + ssl_prefer_server_ciphers on; + + location /images { + alias /var/www/images; + } + + location /static { + alias /var/www/static; + } + + location ~* \.(php|aspx|myadmin|asp)$ { + deny all; + } + + # Upload form should be submitted to this location + location /upload { + + # Pass altered request body to this location + upload_pass /api/uploads/complete/; + + # Store files to this directory + # The directory is hashed, subdirectories 0 1 2 3 4 5 6 7 8 9 should exist + upload_store /var/www/images/_upload 1; + upload_store_access user:rw group:rw all:rw; + + # Set specified fields in request body + upload_set_form_field $upload_field_name.name "$upload_file_name"; + upload_set_form_field $upload_field_name.content_type "$upload_content_type"; + upload_set_form_field $upload_field_name.path "$upload_tmp_path"; + + # Inform backend about hash and size of a file + upload_aggregate_form_field "$upload_field_name.md5" "$upload_file_md5"; + upload_aggregate_form_field "$upload_field_name.size" "$upload_file_size"; + + upload_pass_form_field "^submit$|^description$"; + upload_pass_form_field "^SREGISTRY_EVENT$"; + upload_pass_form_field "^collection$"; + upload_pass_form_field "^name$"; + upload_pass_form_field "^tag$"; + upload_cleanup 400-599; + + } + + location / { + include /etc/nginx/uwsgi_params.par; + uwsgi_pass uwsgi:3031; + uwsgi_max_temp_file_size 10024m; + } + + location /v1/build-ws/ { + proxy_pass http://websocket; # daphne (ASGI) listening on port 3032 + proxy_http_version 1.1; + proxy_read_timeout 86400; + proxy_redirect off; + + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + } +} + diff --git a/builder/requirements.txt b/builder/requirements.txt new file mode 100644 index 00000000..c8ea97d8 --- /dev/null +++ b/builder/requirements.txt @@ -0,0 +1,15 @@ +django==2.2.8 +djangorestframework==3.10.3 +channels +channels_redis +django_user_agents +social-auth-app-django +django-crispy-forms +django-rq +rq-scheduler +django-gravatar2 +django-extensions +django-rest-swagger +django-taggit +django-taggit-templatetags +psycopg2-binary diff --git a/docs/_docs/plugins/README.md b/docs/_docs/plugins/README.md index abae45c7..905208ad 100644 --- a/docs/_docs/plugins/README.md +++ b/docs/_docs/plugins/README.md @@ -25,6 +25,7 @@ your registries' local `shub/settings/secrets.py` file. - [SAML](saml): Authentication with SAML - [Google Build](google-build) provides build and storage on Google Cloud. - [Keystore](pgp) provides a standard keystore for signing containers + - [Remote Build](remote-build) provides a library endpoint to remotely build container The Dockerfile has some build arguments to build the Docker image according to the plugins software requirements. These variables are set to false by default: @@ -34,6 +35,7 @@ ARG ENABLE_PAM=false ARG ENABLE_GOOGLEBUILD=false ARG ENABLE_GLOBUS=false ARG ENABLE_SAML=false +ARG ENABLE_REMOTEBUILD=false ``` Therefore, if you want to install the requirements of all current supported plugins, you can build the image as follows: @@ -67,6 +69,6 @@ RUN if $ENABLE_{PLUGIN_NAME}; then {INSTALLATION_COMMAND}; fi; ``` ## Writing Documentation Documentation for your plugin is just as important as the plugin itself! You should create a subfolder under -`docs/pages/plugins/` with an appropriate README.md that is linked to in this file. +`docs/_docs/plugins/` with an appropriate README.md that is linked to in this file. Use the others as examples to guide you. diff --git a/docs/_docs/plugins/remote_build/README.md b/docs/_docs/plugins/remote_build/README.md new file mode 100644 index 00000000..c00968bb --- /dev/null +++ b/docs/_docs/plugins/remote_build/README.md @@ -0,0 +1,138 @@ +--- +title: "Plugin: Remote Builder and REST API endpoints" +pdf: false +toc: false +permalink: docs/plugins/remote-build +--- + +# Plugin: Remote Build mimic sylabs API + +## Configure sregistry + +By default, remote build is disabled. To configure Singularity Registry Server +to build remotely container image, in settings/config.py, you must enable this +plugin by uncommenting it from the list here: + +```bash +PLUGINS_ENABLED = [ +# 'ldap_auth', +# 'saml_auth', +# 'pam_auth', +# 'globus', +# 'google_build', + 'remote_build' +] +``` +This plugin need a dedicated docker build image. So you will need to build +locally this image locally: + +```bash +$ docker build -f builder/Dockerfile -t quay.io/quay.io/vanessa/sregistry_builder . +$ cp builder/docker-compose.yml . +$ cp builder/nginx.conf.wss nging.conf # if use https +# cp builder/nginx.conf.wss nging.conf # or if use http + +``` + +## Secrets + +Next, set the following variables in `shub/settings/secrets.py`, +that you can create from `dummy_secrets.py` in the shub/settings folder. +The first two speak for themselves, your project name and path to your +Google Application Credentials. + +## Singularity Remote Build + +This is a first effort to provide support to `remote build`. +Freshly build image on application builder is then pushed on library... +So we need [singularity client](https://sylabs.io) installed. + +### Motivation + +Remote build provide user without local compute resource (for instance), +to build remotely and retrieved locally container image on their desktop. + +Remote build activity can also be raised through API REST endpoint call. +Two endpoints have been added : `/v1/push` and `/v1/build`. +Indeed, build process can be split in two phases : + +```bash +singularity build # Local resources consumption +singularity push library:// # Push to remote library +``` + +Both phases will be handled right now with only one command invocation: + +```bash +singularity build --remote # No consumption of local resources +``` + +Or soon (work still in progress): + +```bash +http POST https:///v1/build Authorization:"BEARER " \ +Content-Disposition:"inline;filename=" @ +``` + +`http` stand for popular [HTTP client](https://httpie.org/) + +Work right now, `PUSH` endpoint: + +```bash +curl -XPOST https:///v1/push -H 'Authorization:BEARER ' \ +--upload-file '' -H 'Content-Disposition:inline; filename=' +``` + +Using curl this time, but `httpie` work too... + +Of cours, you can proceed through existing plugin : [google-build](https://singularityhub.github.io/sregistry/docs/plugins/google-build), +but it's not everyone who + +have the opportunity to access google cloud, for security reason for instance... + +### In the nutshell + +This basic implementation of the Sylabs Library API use Django [channels](https://channels.readthedocs.io/en/latest/), +Websocket Server [Daphne](https://github.com/django/daphne/) and [ASGI](https://channels.readthedocs.io/en/latest/asgi.html) + +### Prerequisites + +Apply the same requisites as is used for [Pushing Singularity image](https://singularityhub.github.io/sregistry/docs/client#singularity-push) + +### Install + +You need to build new locally image, with new argument ENABLE_REMOTEBUILD set to true: + +```bash +$ docker build -f builder/Dockerfile -t quay.io/quay.io/vanessa/sregistry_builder . +$ cp builder/docker-compose.yml . +$ cp builder/nginx.conf.wss nging.conf # if use https +# cp builder/nginx.conf.wss nging.conf # or if use http +``` + +### Utilisation + +To build remotely image on [sregistry](https://singularityhub.github.io/sregistry): + +```bash +singularity build --builder https:// --remote +``` + +Container image `` will then be generate locally and on remote library. + +To generate image only remotely, use: + +```bash +singularity build --builder https:// --detached +``` + +### Features + +- [X] build on remote library +- [X] retrieve locally build +- [ ] API REST endpoints to manipulate singularity images [WIP] + +### TODO :boom: + +- [X] Optimize channels consumer `BuildConsumer` +- [ ] Extend collection spacename to username diff --git a/shub/plugins/remote_build/__init__.py b/shub/plugins/remote_build/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shub/plugins/remote_build/asgi.py b/shub/plugins/remote_build/asgi.py new file mode 100755 index 00000000..054647ed --- /dev/null +++ b/shub/plugins/remote_build/asgi.py @@ -0,0 +1,12 @@ +""" +ASGI entrypoint. Configures Django and then runs the application +defined in the ASGI_APPLICATION setting. +""" + +import os +import django +from channels.routing import get_default_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shub.settings") +django.setup() +application = get_default_application() diff --git a/shub/plugins/remote_build/consumers.py b/shub/plugins/remote_build/consumers.py new file mode 100644 index 00000000..c4966142 --- /dev/null +++ b/shub/plugins/remote_build/consumers.py @@ -0,0 +1,76 @@ +from django.conf import settings +from channels.generic.websocket import AsyncWebsocketConsumer +from channels.consumer import AsyncConsumer +from channels.exceptions import StopConsumer + +import asyncio +import os + +class BuildConsumer(AsyncConsumer): + async def websocket_connect(self, message): + await self.accept() + await self.receive() + await self.close(code=1000) + + + async def close(self, code=None): + """ + Closes the WebSocket from the server end + """ + if code is not None and code is not True: + await super().send({"type": "websocket.close", "code": code}) + else: + await super().send({"type": "websocket.CloseNormalClosure"}) + + async def accept(self): + """ + Accepts an incoming socket + """ + await self.send({"type": "websocket.accept", "text": "data"}) + + async def websocket_disconnect(self, message): + """ + Called when a WebSocket connection is closed. Base level so you don't + need to call super() all the time. + """ + # TODO: group leaving + await self.disconnect(message["code"]) + raise StopConsumer() + + async def disconnect(self, code): + """ + Called when a WebSocket connection is closed. + """ + pass + + async def websocket_receive(self, message): + await self.receive(text_data=message["text"]) + + async def receive(self, text_data=None, bytes_data=None): + """ + Called with a decoded WebSocket frame. + """ + self.buildid = self.scope["url_route"]["kwargs"]["buildid"] + self.specfile = '/tmp/.{}.spec'.format(self.buildid) + self.specfile = os.path.join(settings.UPLOAD_PATH, self.buildid + ".spec") + self.filename = os.path.join(settings.UPLOAD_PATH, self.buildid + ".sif") + + cmd = 'singularity build -F {} {}'.format(self.filename, self.specfile) +# cmd = 'dd if=/dev/zero of={} bs=1k count=10'.format(self.filename) + text_data, err = await self.run(cmd) + await self.send({ + "type": "websocket.send", + "text": text_data + }) + await self.close() + + async def run(self, cmd): + proc = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE) + + stdout, stderr = await proc.communicate() + return stdout.rstrip().decode(), stderr + + diff --git a/shub/plugins/remote_build/routing.py b/shub/plugins/remote_build/routing.py new file mode 100644 index 00000000..03029463 --- /dev/null +++ b/shub/plugins/remote_build/routing.py @@ -0,0 +1,30 @@ +from django.urls import re_path + +from channels.http import AsgiHandler +from channels.routing import ProtocolTypeRouter, URLRouter +from channels.auth import AuthMiddlewareStack + +from shub.plugins.remote_build.consumers import BuildConsumer + + +# The channel routing defines what connections get handled by what consumers, +# selecting on either the connection type (ProtocolTypeRouter) or properties +# of the connection's scope (like URLRouter, which looks at scope["path"]) +# For more, see http://channels.readthedocs.io/en/latest/topics/routing.html +application = ProtocolTypeRouter({ + + # Channels will do this for you automatically. It's included here as an example. + # "http": AsgiHandler, + + # Route all WebSocket requests to our custom chat handler. + # We actually don't need the URLRouter here, but we've put it in for + # illustration. Also note the inclusion of the AuthMiddlewareStack to + # add users and sessions - see http://channels.readthedocs.io/en/latest/topics/authentication.html + "websocket": AuthMiddlewareStack( + URLRouter([ + # URLRouter just takes standard Django path() or url() entries. + re_path(r'^v1/build-ws/(?P.+?)$', BuildConsumer), + ]), + ), + +}) diff --git a/shub/plugins/remote_build/urls.py b/shub/plugins/remote_build/urls.py new file mode 100644 index 00000000..2df7c2d1 --- /dev/null +++ b/shub/plugins/remote_build/urls.py @@ -0,0 +1,10 @@ +from django.conf.urls import url, include + +from shub.plugins.remote_build import views + +urlpatterns = [ + url(r"v1/build$", views.BuildContainersView.as_view()), + url(r"v1/build/(?P.+?)?$", views.PushContainersView.as_view()), + url(r"v1/push$", views.PushContainersView.as_view()), + url(r"v1/push/(?P.+?)?$", views.PushContainersView.as_view()), +] diff --git a/shub/plugins/remote_build/views.py b/shub/plugins/remote_build/views.py new file mode 100644 index 00000000..f1b90cc3 --- /dev/null +++ b/shub/plugins/remote_build/views.py @@ -0,0 +1,305 @@ +""" + +Copyright (C) 2019-2020 Vanessa Sochat. + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed +with this file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" + +from django.conf import settings +from django.core.files import File +from django.core.files.storage import default_storage +from django.shortcuts import redirect, reverse +from sregistry.utils import parse_image_name + +from shub.apps.logs.utils import generate_log +from shub.apps.main.models import Collection, Container +from shub.apps.main.utils import format_collection_name + + +from ratelimit.mixins import RatelimitMixin + +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework.renderers import JSONRenderer + +from rest_framework.exceptions import ParseError +from rest_framework.parsers import FileUploadParser, MultiPartParser + +from shub.apps.library.views.helpers import ( + generate_container_metadata, + get_token, + validate_token, +) + +from shub.apps.library.views.images import ( + PushImageView, + RequestPushImageFileView, + PushImageFileView, +) + +import django_rq +import shutil +import tempfile +import json +import uuid +import os +import random +import string +import base64 +import requests + +class BuildContainersView(RatelimitMixin, APIView): + """Build Containers + POST /v1/build + GET /v1/build/ + PUT /v1/build/.+/_cancel + """ + ratelimit_key = 'ip' + ratelimit_rate = settings.VIEW_RATE_LIMIT + ratelimit_block = settings.VIEW_RATE_LIMIT_BLOCK + ratelimit_method = ('GET', 'POST',) + renderer_classes = (JSONRenderer,) +# parser_classes = (FileUploadParser,) + + def post(self, request, format=None): + + print("POST BuildContainersView") + + definitionRaw = request.data.get('definitionRaw') + + if definitionRaw: + # deserialized recipe raw data + raw=base64.b64decode(definitionRaw).decode() + elif 'file' in request.data: + file_obj = request.data['file'] + else: + msg = "singularity recipe must be provide as raw data" + return Response({'error':msg}, status=404) + + print(request.query_params) + + if not validate_token(request): + print("Token not valid") + return Response(status=404) + + token = get_token(request) + user = token.user + # Define randomly container image name... + + buildid = ''.join([random.choice(string.ascii_lowercase + + string.digits) for n in range(24)]) + + filename = os.path.join(settings.UPLOAD_PATH, buildid + ".spec") + + try: + print("Writing spec file in {}...".format(filename)) + if 'file' in request.data: + file_obj = request.data['file'] + with open(filename, "wb+") as destname: + for chunk in file_obj.chunks(): + destname.write(chunk) + else: + destname = open(filename, 'w') + destname.write(raw) + destname.close() + except: + print("Failed to write spec to file {}".format(filename)) + return Response(status=404) + + libraryRef = "{0}/remote-builds/rb-{1}".format(user,buildid) + +# To be implemented : websocket part... +# if 'file' in request.data: +# import websocket +# ws = websocket.WebSocket() +# ws.connect("wss://{}/v1/build-ws/{}".format(settings.DOMAIN_NAME, buildid)) + + data = {"id": buildid, "libraryRef": libraryRef} + return Response(data={"data": data}, status=200) + +class PushContainersView(RatelimitMixin, APIView): + """Push Container image + POST /v1/push + GET /v1/push/ + """ + ratelimit_key = 'ip' + ratelimit_rate = settings.VIEW_RATE_LIMIT + ratelimit_block = settings.VIEW_RATE_LIMIT_BLOCK + ratelimit_method = ('GET', 'POST',) + renderer_classes = (JSONRenderer,) + parser_classes = (FileUploadParser,) + + def get(self, request, buildid): + + print("GET PushContainersView") + print(request.query_params) + if not validate_token(request): + print("Token not valid") + return Response(status=404) + + token = get_token(request) + user = token.user + + name = "remote-builds" +# + # Look up the collection + + data = {"name": name} + url = "{}/collections/new/".format(settings.DOMAIN_NAME) + _token = request.META.get("HTTP_AUTHORIZATION").replace("BEARER", "").strip() + headers = {'Authorization': _token, 'X-CSRFToken': _token, 'Referer': url} +# r = requests.put(url, data=data, headers=headers) +# print(r.text) +# +# Dont work fine! Use temporaly bellow replacement... + try: + collection = Collection.objects.get(name=name) + except Collection.DoesNotExist: + # No special characters allowed + name = format_collection_name(name) + collection = Collection(name=name, secret=str(uuid.uuid4())) + collection.save() + collection.owners.add(user) + collection.save() + + try: + collection = Collection.objects.get(name=name) + except Collection.DoesNotExist: + data="Collection %s don't exist!" % name + print(data) + return Response(data={"data": data}, status=404) + + collection = "remote-builds" + name = "rb-{}".format(buildid) + libraryURL = settings.DOMAIN_NAME + filename = os.path.join(settings.UPLOAD_PATH, buildid + ".sif") + + if os.path.exists(filename): + from sregistry.utils import get_file_hash + print("Retrieve sha256 hash of {} ...".format(filename)) + version = get_file_hash(filename, "sha256") + + print("Retrieve file {} size...".format(filename)) + imageSize = os.path.getsize(filename) + else: + print("File {} don't exist!".format(filename)) + return Response(status=404) + + libraryRef = "{}/remote-builds/rb-{}:sha256.{}".format(user,buildid,version) +# + try: + data = PushImageView.as_view()(request._request, + username=user, + collection=collection, + name=name, + version=version + ).data['data'] + container_id = data['id'] + print("PushNamedContainerView data {}".format(data)) + except: + print("Failed to GET PushNamedContainerView!") + return Response(status=404) + + try: + request._request.method = 'POST' + data = RequestPushImageFileView.as_view()(request._request, + container_id=container_id + ).data['data'] + url = data['uploadURL'] + secret = url.split('/')[-1] + except: + print("Failed to POST RequestPushImageFileView!") + return Response(status=404) + + data = open(filename, 'rb') + headers = {'Content-type': 'application/octet-stream','Authorization': request.META.get("HTTP_AUTHORIZATION")} + r = requests.put(url, data=data, headers=headers) +# + try: + request._request.method = 'PUT' + data = CompleteBuildImageFileView.as_view()(request._request, + container_id=container_id + ) + except: + print("Failed to PUT CompleteBuildImageFileView!") + return Response(status=404) + + try: + print("Cleanup spec and images files...") + specfile = os.path.join(settings.UPLOAD_PATH, buildid + ".spec") + os.remove(filename) + os.remove(specfile) + except: + print("Failed to cleanup spec and images files") + return Response(status=404) + +## To be modify accordingly to real complete status + isComplete = True + data = {'imageSize': imageSize, 'isComplete': isComplete, 'libraryRef': libraryRef, 'libraryURL': libraryURL} + request._request.method = 'GET' + return Response(data={"data": data}, status=200) + + def post(self, request, format=None): + + print("POST PushContainersView") + + if 'file' in request.data: + file_obj = request.data['file'] + else: + msg = "singularity image must be updated" + return Response({'error':msg}, status=404) + + print(request.query_params) + + if not validate_token(request): + print("Token not valid") + return Response(status=404) + + token = get_token(request) + user = token.user + # Define randomly container image name... + buildid = ''.join([random.choice(string.ascii_lowercase + + string.digits) for n in range(24)]) + + filename = os.path.join(settings.UPLOAD_PATH, buildid + ".sif") + + try: + print("Writing spec file in {}...".format(filename)) + file_obj = request.data['file'] + with open(filename, "wb+") as destname: + for chunk in file_obj.chunks(): + destname.write(chunk) + except: + print("Failed to write spec to file {}".format(filename)) + return Response(status=404) + + request._request.method = 'GET' + return self.get(request, buildid) + +class CompleteBuildImageFileView(RatelimitMixin, APIView): + """This view (UploadImageCompleteRequest) isn't currently useful, + but should exist as it is used for Singularity. + """ + + ratelimit_key = "ip" + ratelimit_rate = settings.VIEW_RATE_LIMIT + ratelimit_block = settings.VIEW_RATE_LIMIT_BLOCK + ratelimit_method = "PUT" + renderer_classes = (JSONRenderer,) + + def put(self, request, container_id, format=None): + + print("PUT CompleteBuildImageFileView") + + try: + container = Container.objects.get(id=container_id) + tag = container.tag + name = container.name + Container.objects.filter(tag__startswith="DUMMY-", name=name, tag=tag).update(tag="latest") + print("Suppress DUMMY tag {} on container {}...".format(tag,name)) + return Response(status=200) + except Container.DoesNotExist: + return Response(status=404) diff --git a/shub/settings/__init__.py b/shub/settings/__init__.py index 697b62ed..2ca54461 100644 --- a/shub/settings/__init__.py +++ b/shub/settings/__init__.py @@ -14,6 +14,26 @@ if "pam_auth" in PLUGINS_ENABLED: INSTALLED_APPS += ["django_pam"] +if "remote_build" in PLUGINS_ENABLED: + INSTALLED_APPS += ["channels"] +# ASGI_APPLICATION should be set to your outermost router + ASGI_APPLICATION = 'shub.plugins.remote_build.routing.application' + +# Channel layer definitions +# http://channels.readthedocs.io/en/latest/topics/channel_layers.html + +# redis_host = os.environ.get('REDIS_HOST', '172.16.0.8') +# +# CHANNEL_LAYERS = { +# "default": { +# # This example app uses the Redis channel layer implementation channels_redis +# "BACKEND": "channels_redis.core.RedisChannelLayer", +# "CONFIG": { +# "hosts": [(redis_host, 6379)], +# }, +# }, +# } + # If google_build in use, we are required to include GitHub if "google_build" in PLUGINS_ENABLED: diff --git a/shub/urls.py b/shub/urls.py index 595f3d5c..353c4e66 100644 --- a/shub/urls.py +++ b/shub/urls.py @@ -60,4 +60,6 @@ # per protocol, keystore must be /pks if plugin == "pgp": url_regex = "^pks/" + elif plugin == "remote_build": + url_regex = "^" urlpatterns.append(url(url_regex, include(plugin_urls)))