aboutsummaryrefslogtreecommitdiffstats
path: root/src/api
diff options
context:
space:
mode:
authorSawyer Bergeron <sbergeron@iol.unh.edu>2021-10-29 15:11:29 -0400
committerSawyer Bergeron <sbergeron@iol.unh.edu>2021-11-01 18:07:49 -0400
commit23d35dc2c56b8c2b5496b6f0a5fc62066b22bbc7 (patch)
treec8eca16091ce1646d088bff54345c728f3726041 /src/api
parent35b9f39178cc502a5283a1b37a65f7dd0838ae05 (diff)
Add Cloud Init Support
Squashed commit of the following: commit afcee3cad5c091e78e909b83f8df49accf1af5b6 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Mon Oct 11 22:02:16 2021 +0000 Prod cobbler hotfixes Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: I092bc6d85a3b2c77bfbe24f3af0d2b7a5f75a8c3 commit 5ce0a52b17e530436c298e1b581d37bac853f5a7 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Thu Oct 7 17:14:01 2021 -0400 Manually merge CI files Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: Ic63d5da699578007ef2f2cc373350ded06c66971 commit 5b70b8f1b8bbbe6aeec43b8d8dfdc6b7cc68bc9c Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Thu Sep 30 16:33:01 2021 -0400 Fixes for collaborator field Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: I3dbdedf26fa84617ea7680a0f99e032d88f1ea98 Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> commit 529b2521627b17142284c55c744812129edc71e8 Merge: d555513 e9d72ce Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Thu Sep 30 14:03:55 2021 +0000 Merge "Push cloud config content for generated files into userdata_raw" into cobbler commit d55551394df73645e49ae2ae3e730a9f1c6af81d Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Thu Sep 30 10:02:32 2021 -0400 Better error handling for quick deploy Change-Id: I03a725dfee9ce2f119d72ef940cd08df5aee3dcc Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> commit e9d72ce78a85c6ff2f3f8591bcbf4115f97318d5 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Tue Sep 28 19:11:49 2021 -0400 Push cloud config content for generated files into userdata_raw Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: Ieb8bd9b8b172b6bf11062f67f41fc78154cc7c89 commit 95d39c60f7e8062cabc8c1665080a2d2c8904234 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Sat Sep 25 16:18:12 2021 -0400 Allow for "pod specific" vlan allocation for LFEDGE allocation case Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: I8b75410145027f43eaf6de7bd5f1813af38d3e7f Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> commit 2ebb82b5f344de1e17abd70c51c4cce765761dd1 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Thu Sep 23 16:37:43 2021 -0400 Fix collaborator field with recent changes Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: Id305de9b1567adf103c47d5180b0b28ebfdf1b5e commit a819fc1df86721eda36eee89d0235c89b3159d6b Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Tue Sep 7 11:28:35 2021 -0400 Add user specified CI file entry Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: Ia920130612da8fcde9d1a0d5dde7861904857162 Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> commit d93346a716bde5237b7cfef5c10ea56e4922b59a Author: Adam Hassick <ahassick@iol.unh.edu> Date: Tue Jul 27 13:05:16 2021 +0000 Make C-I serialization work with current netconf rules Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: If967e5e1f268c5bee3ad4496847662cf4de1187c Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> commit 6ffb1fdf6ce7825770148bada5a4c54899e4ed36 Author: Adam Hassick <ahassick@iol.unh.edu> Date: Tue Jun 29 16:49:27 2021 -0400 Cobbler model changes, new endpoints Signed-off-by: Adam Hassick <ahassick@iol.unh.edu> Change-Id: If0a94730e92747127cef121ec4930a4c8bae6c92 Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Signed-off-by: Adam Hassick <ahassick@iol.unh.edu> commit 49e2b407003b69551ddafa851639e83ec42a5b09 Author: Jacob Hodgdon <jhodgdon@iol.unh.edu> Date: Fri May 14 15:42:56 2021 -0400 Color fixes for rebrand Signed-off-by: Jacob Hodgdon <jhodgdon@iol.unh.edu> Change-Id: I5cf4ede598afa377db7ecec17d8dfef085e130ac commit a908da441bf6efcdb289a46d0c2761840138b1a5 Author: Sawyer Bergeron <sbergeron@iol.unh.edu> Date: Tue Jun 8 11:15:56 2021 -0400 Draft for cloud-init file generation Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: I07f3a4a1ab67531cba2cc7e3de22e9bb860706e1 Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu> Change-Id: I392505174cbc07214c31c42aab2474a748e47913 Signed-off-by: Sawyer Bergeron <sbergeron@iol.unh.edu>
Diffstat (limited to 'src/api')
-rw-r--r--src/api/migrations/0017_auto_20210630_1629.py18
-rw-r--r--src/api/migrations/0018_cloudinitfile.py25
-rw-r--r--src/api/migrations/0019_auto_20210907_1448.py29
-rw-r--r--src/api/models.py188
-rw-r--r--src/api/urls.py22
-rw-r--r--src/api/views.py177
6 files changed, 448 insertions, 11 deletions
diff --git a/src/api/migrations/0017_auto_20210630_1629.py b/src/api/migrations/0017_auto_20210630_1629.py
new file mode 100644
index 0000000..643ff5f
--- /dev/null
+++ b/src/api/migrations/0017_auto_20210630_1629.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2 on 2021-06-30 16:29
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0016_auto_20201109_2149'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='snapshotconfig',
+ name='image',
+ field=models.CharField(max_length=200, null=True),
+ ),
+ ]
diff --git a/src/api/migrations/0018_cloudinitfile.py b/src/api/migrations/0018_cloudinitfile.py
new file mode 100644
index 0000000..4e41b39
--- /dev/null
+++ b/src/api/migrations/0018_cloudinitfile.py
@@ -0,0 +1,25 @@
+# Generated by Django 2.2 on 2021-07-01 20:45
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('resource_inventory', '0019_auto_20210701_1947'),
+ ('booking', '0008_auto_20201109_1947'),
+ ('api', '0017_auto_20210630_1629'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CloudInitFile',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('resource_id', models.CharField(max_length=200)),
+ ('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='booking.Booking')),
+ ('rconfig', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='resource_inventory.ResourceConfiguration')),
+ ],
+ ),
+ ]
diff --git a/src/api/migrations/0019_auto_20210907_1448.py b/src/api/migrations/0019_auto_20210907_1448.py
new file mode 100644
index 0000000..92140fb
--- /dev/null
+++ b/src/api/migrations/0019_auto_20210907_1448.py
@@ -0,0 +1,29 @@
+# Generated by Django 2.2 on 2021-09-07 14:48
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('booking', '0008_auto_20201109_1947'),
+ ('resource_inventory', '0020_cloudinitfile'),
+ ('api', '0018_cloudinitfile'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='GeneratedCloudConfig',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('resource_id', models.CharField(max_length=200)),
+ ('text', models.TextField(blank=True, null=True)),
+ ('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='booking.Booking')),
+ ('rconfig', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='resource_inventory.ResourceConfiguration')),
+ ],
+ ),
+ migrations.DeleteModel(
+ name='CloudInitFile',
+ ),
+ ]
diff --git a/src/api/models.py b/src/api/models.py
index d85f3e9..5928ea9 100644
--- a/src/api/models.py
+++ b/src/api/models.py
@@ -19,18 +19,22 @@ from django.utils import timezone
import json
import uuid
+import yaml
from booking.models import Booking
from resource_inventory.models import (
Lab,
ResourceProfile,
Image,
+ Opsys,
Interface,
ResourceOPNFVConfig,
RemoteInfo,
OPNFVConfig,
ConfigState,
- ResourceQuery
+ ResourceQuery,
+ ResourceConfiguration,
+ CloudInitFile
)
from resource_inventory.idf_templater import IDFTemplater
from resource_inventory.pdf_templater import PDFTemplater
@@ -84,6 +88,18 @@ class LabManager:
def __init__(self, lab):
self.lab = lab
+ def get_opsyss(self):
+ return Opsys.objects.filter(from_lab=self.lab)
+
+ def get_images(self):
+ return Image.objects.filter(from_lab=self.lab)
+
+ def get_image(self, image_id):
+ return Image.objects.filter(from_lab=self.lab, lab_id=image_id)
+
+ def get_opsys(self, opsys_id):
+ return Opsys.objects.filter(from_lab=self.lab, lab_id=opsys_id)
+
def get_downtime(self):
return Downtime.objects.filter(start__lt=timezone.now(), end__gt=timezone.now(), lab=self.lab)
@@ -338,6 +354,157 @@ class LabManager:
return profile_ser
+class GeneratedCloudConfig(models.Model):
+ resource_id = models.CharField(max_length=200)
+ booking = models.ForeignKey(Booking, on_delete=models.CASCADE)
+ rconfig = models.ForeignKey(ResourceConfiguration, on_delete=models.CASCADE)
+ text = models.TextField(null=True, blank=True)
+
+ def _normalize_username(self, username: str) -> str:
+ # TODO: make usernames posix compliant
+ return username
+
+ def _get_ssh_string(self, username: str) -> str:
+ user = User.objects.get(username=username)
+ uprofile = user.userprofile
+
+ ssh_file = uprofile.ssh_public_key
+
+ escaped_file = ssh_file.open().read().decode(encoding="UTF-8").replace("\n", " ")
+
+ return escaped_file
+
+ def _serialize_users(self):
+ """
+ returns the dictionary to be placed behind the `users` field of the toplevel c-i dict
+ """
+ # conserves distro default user
+ user_array = ["default"]
+
+ users = list(self.booking.collaborators.all())
+ users.append(self.booking.owner)
+ for collaborator in users:
+ userdict = {}
+
+ # TODO: validate if usernames are valid as linux usernames (and provide an override potentially)
+ userdict['name'] = self._normalize_username(collaborator.username)
+
+ userdict['groups'] = "sudo"
+ userdict['sudo'] = "ALL=(ALL) NOPASSWD:ALL"
+
+ userdict['ssh_authorized_keys'] = [self._get_ssh_string(collaborator.username)]
+
+ user_array.append(userdict)
+
+ # user_array.append({
+ # "name": "opnfv",
+ # "passwd": "$6$k54L.vim1cLaEc4$5AyUIrufGlbtVBzuCWOlA1yV6QdD7Gr2MzwIs/WhuYR9ebSfh3Qlb7djkqzjwjxpnSAonK1YOabPP6NxUDccu.",
+ # "ssh_redirect_user": True,
+ # "sudo": "ALL=(ALL) NOPASSWD:ALL",
+ # "groups": "sudo",
+ # })
+
+ return user_array
+
+ # TODO: make this configurable
+ def _serialize_sysinfo(self):
+ defuser = {}
+ defuser['name'] = 'opnfv'
+ defuser['plain_text_passwd'] = 'OPNFV_HOST'
+ defuser['home'] = '/home/opnfv'
+ defuser['shell'] = '/bin/bash'
+ defuser['lock_passwd'] = True
+ defuser['gecos'] = 'Lab Manager User'
+ defuser['groups'] = 'sudo'
+
+ return {'default_user': defuser}
+
+ # TODO: make this configurable
+ def _serialize_runcmds(self):
+ cmdlist = []
+
+ # have hosts run dhcp on boot
+ cmdlist.append(['sudo', 'dhclient', '-r'])
+ cmdlist.append(['sudo', 'dhclient'])
+
+ return cmdlist
+
+ def _serialize_netconf_v1(self):
+ # interfaces = {} # map from iface_name => dhcp_config
+ # vlans = {} # map from vlan_id => dhcp_config
+
+ config_arr = []
+
+ for interface in self._resource().interfaces.all():
+ interface_name = interface.profile.name
+ interface_mac = interface.mac_address
+
+ iface_dict_entry = {
+ "type": "physical",
+ "name": interface_name,
+ "mac_address": interface_mac,
+ }
+
+ for vlan in interface.config.all():
+ if vlan.tagged:
+ vlan_dict_entry = {'type': 'vlan'}
+ vlan_dict_entry['name'] = str(interface_name) + "." + str(vlan.vlan_id)
+ vlan_dict_entry['vlan_link'] = str(interface_name)
+ vlan_dict_entry['vlan_id'] = int(vlan.vlan_id)
+ vlan_dict_entry['mac_address'] = str(interface_mac)
+ if vlan.public:
+ vlan_dict_entry["subnets"] = [{"type": "dhcp"}]
+ config_arr.append(vlan_dict_entry)
+ if (not vlan.tagged) and vlan.public:
+ iface_dict_entry["subnets"] = [{"type": "dhcp"}]
+
+ # vlan_dict_entry['mtu'] = # TODO, determine override MTU if needed
+
+ config_arr.append(iface_dict_entry)
+
+ ns_dict = {
+ 'type': 'nameserver',
+ 'address': ['10.64.0.1', '8.8.8.8']
+ }
+
+ config_arr.append(ns_dict)
+
+ full_dict = {'version': 1, 'config': config_arr}
+
+ return full_dict
+
+ @classmethod
+ def get(cls, booking_id: int, resource_lab_id: str, file_id: int):
+ return GeneratedCloudConfig.objects.get(resource_id=resource_lab_id, booking__id=booking_id, file_id=file_id)
+
+ def _resource(self):
+ return ResourceQuery.get(labid=self.resource_id, lab=self.booking.lab)
+
+ # def _get_facts(self):
+ # resource = self._resource()
+
+ # hostname = self.rconfig.name
+ # iface_configs = for_config.interface_configs.all()
+
+ def _to_dict(self):
+ main_dict = {}
+
+ main_dict['users'] = self._serialize_users()
+ main_dict['network'] = self._serialize_netconf_v1()
+ main_dict['hostname'] = self.rconfig.name
+
+ # add first startup commands
+ main_dict['runcmd'] = self._serialize_runcmds()
+
+ # configure distro default user
+ main_dict['system_info'] = self._serialize_sysinfo()
+
+ return main_dict
+
+ def serialize(self) -> str:
+ return yaml.dump(self._to_dict())
+
+
class APILog(models.Model):
user = models.ForeignKey(User, on_delete=models.PROTECT)
call_time = models.DateTimeField(auto_now=True)
@@ -761,6 +928,7 @@ class HardwareConfig(TaskConfig):
return self.get_delta()
def get_delta(self):
+ # TODO: grab the GeneratedCloudConfig urls from self.hosthardwarerelation.get_resource()
return self.format_delta(
self.hosthardwarerelation.get_resource().get_configuration(self.state),
self.hosthardwarerelation.lab_token)
@@ -813,7 +981,7 @@ class NetworkConfig(TaskConfig):
class SnapshotConfig(TaskConfig):
resource_id = models.CharField(max_length=200, default="default_id")
- image = models.IntegerField(null=True)
+ image = models.CharField(max_length=200, null=True) # cobbler ID
dashboard_id = models.IntegerField()
delta = models.TextField(default="{}")
@@ -1104,6 +1272,10 @@ class JobFactory(object):
booking=booking,
job=job
)
+ cls.makeGeneratedCloudConfigs(
+ resources=resources,
+ job=job
+ )
all_users = list(booking.collaborators.all())
all_users.append(booking.owner)
cls.makeAccessConfig(
@@ -1128,6 +1300,18 @@ class JobFactory(object):
continue
@classmethod
+ def makeGeneratedCloudConfigs(cls, resources=[], job=Job()):
+ for res in resources:
+ cif = GeneratedCloudConfig.objects.create(resource_id=res.labid, booking=job.booking, rconfig=res.config)
+ cif.save()
+
+ cif = CloudInitFile.create(priority=0, text=cif.serialize())
+ cif.save()
+
+ res.config.cloud_init_files.add(cif)
+ res.config.save()
+
+ @classmethod
def makeHardwareConfigs(cls, resources=[], job=Job()):
"""
Create and save HardwareConfig.
diff --git a/src/api/urls.py b/src/api/urls.py
index 52a6fc7..3693979 100644
--- a/src/api/urls.py
+++ b/src/api/urls.py
@@ -46,17 +46,28 @@ from api.views import (
lab_user,
GenerateTokenView,
analytics_job,
+ resource_ci_metadata,
+ resource_ci_userdata,
+ resource_ci_userdata_directory,
+ all_images,
+ all_opsyss,
+ single_image,
+ single_opsys,
user_bookings,
- make_booking,
- available_templates,
- images_for_template,
specific_booking,
extend_booking,
+ make_booking,
+ list_labs,
all_users,
- list_labs
+ images_for_template,
+ available_templates,
)
urlpatterns = [
+ path('labs/<slug:lab_name>/opsys/<slug:opsys_id>', single_opsys),
+ path('labs/<slug:lab_name>/image/<slug:image_id>', single_image),
+ path('labs/<slug:lab_name>/opsys', all_opsyss),
+ path('labs/<slug:lab_name>/image', all_images),
path('labs/<slug:lab_name>/profile', lab_profile),
path('labs/<slug:lab_name>/status', lab_status),
path('labs/<slug:lab_name>/inventory', lab_inventory),
@@ -67,6 +78,9 @@ urlpatterns = [
path('labs/<slug:lab_name>/booking/<int:booking_id>/idf', get_idf, name="get-idf"),
path('labs/<slug:lab_name>/jobs/<int:job_id>', specific_job),
path('labs/<slug:lab_name>/jobs/<int:job_id>/<slug:task_id>', specific_task),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/user-data', resource_ci_userdata_directory, name="specific-user-data"),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/meta-data', resource_ci_metadata, name="specific-meta-data"),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/<int:file_id>/user-data', resource_ci_userdata, name="user-data-dir"),
path('labs/<slug:lab_name>/jobs/new', new_jobs),
path('labs/<slug:lab_name>/jobs/current', current_jobs),
path('labs/<slug:lab_name>/jobs/done', done_jobs),
diff --git a/src/api/views.py b/src/api/views.py
index c0da1bc..84d19cc 100644
--- a/src/api/views.py
+++ b/src/api/views.py
@@ -19,24 +19,34 @@ from django.shortcuts import redirect, get_object_or_404
from django.utils.decorators import method_decorator
from django.utils import timezone
from django.views import View
+from django.http import HttpResponseNotFound
from django.http.response import JsonResponse, HttpResponse
from rest_framework import viewsets
from rest_framework.authtoken.models import Token
from django.views.decorators.csrf import csrf_exempt
from django.core.exceptions import ObjectDoesNotExist
+from django.db.models import Q
from api.serializers.booking_serializer import BookingSerializer
from api.serializers.old_serializers import UserSerializer
from api.forms import DowntimeForm
from account.models import UserProfile, Lab
from booking.models import Booking
-from api.models import LabManagerTracker, AutomationAPIManager, get_task, APILog
+from booking.quick_deployer import create_from_API
+from api.models import LabManagerTracker, get_task, Job, AutomationAPIManager, APILog
from notifier.manager import NotificationHandler
from analytics.models import ActiveVPNUser
-from booking.quick_deployer import create_from_API
-from resource_inventory.models import ResourceTemplate
-from django.db.models import Q
-
+from resource_inventory.models import (
+ Image,
+ Opsys,
+ CloudInitFile,
+ ResourceQuery,
+ ResourceTemplate,
+)
+
+import yaml
+import uuid
+from deepmerge import Merger
"""
API views.
@@ -88,6 +98,83 @@ def lab_host(request, lab_name="", host_id=""):
if request.method == "POST":
return JsonResponse(lab_manager.update_host(host_id, request.POST), safe=False)
+# API extension for Cobbler integration
+
+
+def all_images(request, lab_name=""):
+ a = []
+ for i in Image.objects.all():
+ a.append(i.serialize())
+ return JsonResponse(a, safe=False)
+
+
+def all_opsyss(request, lab_name=""):
+ a = []
+ for opsys in Opsys.objects.all():
+ a.append(opsys.serialize())
+
+ return JsonResponse(a, safe=False)
+
+
+@csrf_exempt
+def single_image(request, lab_name="", image_id=""):
+ lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ lab_manager = LabManagerTracker.get(lab_name, lab_token)
+ img = lab_manager.get_image(image_id).first()
+
+ if request.method == "GET":
+ if not img:
+ return HttpResponse(status=404)
+ return JsonResponse(img.serialize(), safe=False)
+
+ if request.method == "POST":
+ # get POST data
+ data = json.loads(request.body.decode('utf-8'))
+ if img:
+ img.update(data)
+ else:
+ # append lab name and the ID from the URL
+ data['from_lab_id'] = lab_name
+ data['lab_id'] = image_id
+
+ # create and save a new Image object
+ img = Image.new_from_data(data)
+
+ img.save()
+
+ # indicate success in response
+ return HttpResponse(status=200)
+ return HttpResponse(status=405)
+
+
+@csrf_exempt
+def single_opsys(request, lab_name="", opsys_id=""):
+ lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ lab_manager = LabManagerTracker.get(lab_name, lab_token)
+ opsys = lab_manager.get_opsys(opsys_id).first()
+
+ if request.method == "GET":
+ if not opsys:
+ return HttpResponse(status=404)
+ return JsonResponse(opsys.serialize(), safe=False)
+
+ if request.method == "POST":
+ data = json.loads(request.body.decode('utf-8'))
+ if opsys:
+ opsys.update(data)
+ else:
+ # only name, available, and obsolete are needed to create an Opsys
+ # other fields are derived from the URL parameters
+ data['from_lab_id'] = lab_name
+ data['lab_id'] = opsys_id
+ opsys = Opsys.new_from_data(data)
+
+ opsys.save()
+ return HttpResponse(status=200)
+ return HttpResponse(status=405)
+
+# end API extension
+
def get_pdf(request, lab_name="", booking_id=""):
lab_token = request.META.get('HTTP_AUTH_TOKEN')
@@ -175,6 +262,86 @@ def specific_job(request, lab_name="", job_id=""):
return JsonResponse(lab_manager.get_job(job_id), safe=False)
+@csrf_exempt
+def resource_ci_userdata(request, lab_name="", job_id="", resource_id="", file_id=0):
+ # lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ # lab_manager = LabManagerTracker.get(lab_name, lab_token)
+
+ # job = lab_manager.get_job(job_id)
+ Job.objects.get(id=job_id) # verify a valid job was given, even if we don't use it
+
+ cifile = None
+ try:
+ cifile = CloudInitFile.objects.get(id=file_id)
+ except ObjectDoesNotExist:
+ return HttpResponseNotFound("Could not find a matching resource by id " + str(resource_id))
+
+ text = cifile.text
+
+ prepended_text = "#cloud-config\n"
+ # mstrat = CloudInitFile.merge_strategy()
+ # prepended_text = prepended_text + yaml.dump({"merge_strategy": mstrat}) + "\n"
+ # print("in cloudinitfile create")
+ text = prepended_text + text
+ cloud_dict = {
+ "datasource": {
+ "None": {
+ "metadata": {
+ "instance-id": str(uuid.uuid4())
+ },
+ "userdata_raw": text,
+ },
+ },
+ "datasource_list": ["None"],
+ }
+
+ return HttpResponse(yaml.dump(cloud_dict), status=200)
+
+
+@csrf_exempt
+def resource_ci_metadata(request, lab_name="", job_id="", resource_id="", file_id=0):
+ return HttpResponse("#cloud-config", status=200)
+
+
+@csrf_exempt
+def resource_ci_userdata_directory(request, lab_name="", job_id="", resource_id=""):
+ # files = [{"id": file.file_id, "priority": file.priority} for file in CloudInitFile.objects.filter(job__id=job_id, resource_id=resource_id).order_by("priority").all()]
+ resource = ResourceQuery.get(labid=resource_id, lab=Lab.objects.get(name=lab_name))
+ files = resource.config.cloud_init_files
+ files = [{"id": file.id, "priority": file.priority} for file in files.order_by("priority").all()]
+
+ d = {
+ 'merge_failures': []
+ }
+
+ merger = Merger(
+ [
+ (list, ["append"]),
+ (dict, ["merge"]),
+ ],
+ ["override"], # fallback
+ ["override"], # if types conflict (shouldn't happen in CI, but handle case)
+ )
+
+ for f in resource.config.cloud_init_files.order_by("priority").all():
+ try:
+ other_dict = yaml.load(f.text)
+ if not (type(d) is dict):
+ raise Exception("CI file was valid yaml but was not a dict")
+
+ merger.merge(d, other_dict)
+ except Exception as e:
+ # if fail to merge, then just skip
+ print("Failed to merge file in, as it had invalid content:", f.id)
+ print("File text was:")
+ print(f.text)
+ d['merge_failures'].append({f.id: str(e)})
+
+ file = CloudInitFile.create(text=yaml.dump(d), priority=0)
+
+ return HttpResponse(json.dumps([{"id": file.id, "priority": file.priority}]), status=200)
+
+
def new_jobs(request, lab_name=""):
lab_token = request.META.get('HTTP_AUTH_TOKEN')
lab_manager = LabManagerTracker.get(lab_name, lab_token)