aboutsummaryrefslogtreecommitdiffstats
path: root/src/api
diff options
context:
space:
mode:
Diffstat (limited to 'src/api')
-rw-r--r--src/api/migrations/0017_auto_20210630_1629.py18
-rw-r--r--src/api/migrations/0018_cloudinitfile.py25
-rw-r--r--src/api/migrations/0019_auto_20210907_1448.py29
-rw-r--r--src/api/models.py188
-rw-r--r--src/api/urls.py22
-rw-r--r--src/api/views.py177
6 files changed, 448 insertions, 11 deletions
diff --git a/src/api/migrations/0017_auto_20210630_1629.py b/src/api/migrations/0017_auto_20210630_1629.py
new file mode 100644
index 0000000..643ff5f
--- /dev/null
+++ b/src/api/migrations/0017_auto_20210630_1629.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2 on 2021-06-30 16:29
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0016_auto_20201109_2149'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='snapshotconfig',
+ name='image',
+ field=models.CharField(max_length=200, null=True),
+ ),
+ ]
diff --git a/src/api/migrations/0018_cloudinitfile.py b/src/api/migrations/0018_cloudinitfile.py
new file mode 100644
index 0000000..4e41b39
--- /dev/null
+++ b/src/api/migrations/0018_cloudinitfile.py
@@ -0,0 +1,25 @@
+# Generated by Django 2.2 on 2021-07-01 20:45
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('resource_inventory', '0019_auto_20210701_1947'),
+ ('booking', '0008_auto_20201109_1947'),
+ ('api', '0017_auto_20210630_1629'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CloudInitFile',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('resource_id', models.CharField(max_length=200)),
+ ('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='booking.Booking')),
+ ('rconfig', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='resource_inventory.ResourceConfiguration')),
+ ],
+ ),
+ ]
diff --git a/src/api/migrations/0019_auto_20210907_1448.py b/src/api/migrations/0019_auto_20210907_1448.py
new file mode 100644
index 0000000..92140fb
--- /dev/null
+++ b/src/api/migrations/0019_auto_20210907_1448.py
@@ -0,0 +1,29 @@
+# Generated by Django 2.2 on 2021-09-07 14:48
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('booking', '0008_auto_20201109_1947'),
+ ('resource_inventory', '0020_cloudinitfile'),
+ ('api', '0018_cloudinitfile'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='GeneratedCloudConfig',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('resource_id', models.CharField(max_length=200)),
+ ('text', models.TextField(blank=True, null=True)),
+ ('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='booking.Booking')),
+ ('rconfig', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='resource_inventory.ResourceConfiguration')),
+ ],
+ ),
+ migrations.DeleteModel(
+ name='CloudInitFile',
+ ),
+ ]
diff --git a/src/api/models.py b/src/api/models.py
index d85f3e9..5928ea9 100644
--- a/src/api/models.py
+++ b/src/api/models.py
@@ -19,18 +19,22 @@ from django.utils import timezone
import json
import uuid
+import yaml
from booking.models import Booking
from resource_inventory.models import (
Lab,
ResourceProfile,
Image,
+ Opsys,
Interface,
ResourceOPNFVConfig,
RemoteInfo,
OPNFVConfig,
ConfigState,
- ResourceQuery
+ ResourceQuery,
+ ResourceConfiguration,
+ CloudInitFile
)
from resource_inventory.idf_templater import IDFTemplater
from resource_inventory.pdf_templater import PDFTemplater
@@ -84,6 +88,18 @@ class LabManager:
def __init__(self, lab):
self.lab = lab
+ def get_opsyss(self):
+ return Opsys.objects.filter(from_lab=self.lab)
+
+ def get_images(self):
+ return Image.objects.filter(from_lab=self.lab)
+
+ def get_image(self, image_id):
+ return Image.objects.filter(from_lab=self.lab, lab_id=image_id)
+
+ def get_opsys(self, opsys_id):
+ return Opsys.objects.filter(from_lab=self.lab, lab_id=opsys_id)
+
def get_downtime(self):
return Downtime.objects.filter(start__lt=timezone.now(), end__gt=timezone.now(), lab=self.lab)
@@ -338,6 +354,157 @@ class LabManager:
return profile_ser
+class GeneratedCloudConfig(models.Model):
+ resource_id = models.CharField(max_length=200)
+ booking = models.ForeignKey(Booking, on_delete=models.CASCADE)
+ rconfig = models.ForeignKey(ResourceConfiguration, on_delete=models.CASCADE)
+ text = models.TextField(null=True, blank=True)
+
+ def _normalize_username(self, username: str) -> str:
+ # TODO: make usernames posix compliant
+ return username
+
+ def _get_ssh_string(self, username: str) -> str:
+ user = User.objects.get(username=username)
+ uprofile = user.userprofile
+
+ ssh_file = uprofile.ssh_public_key
+
+ escaped_file = ssh_file.open().read().decode(encoding="UTF-8").replace("\n", " ")
+
+ return escaped_file
+
+ def _serialize_users(self):
+ """
+ returns the dictionary to be placed behind the `users` field of the toplevel c-i dict
+ """
+ # conserves distro default user
+ user_array = ["default"]
+
+ users = list(self.booking.collaborators.all())
+ users.append(self.booking.owner)
+ for collaborator in users:
+ userdict = {}
+
+ # TODO: validate if usernames are valid as linux usernames (and provide an override potentially)
+ userdict['name'] = self._normalize_username(collaborator.username)
+
+ userdict['groups'] = "sudo"
+ userdict['sudo'] = "ALL=(ALL) NOPASSWD:ALL"
+
+ userdict['ssh_authorized_keys'] = [self._get_ssh_string(collaborator.username)]
+
+ user_array.append(userdict)
+
+ # user_array.append({
+ # "name": "opnfv",
+ # "passwd": "$6$k54L.vim1cLaEc4$5AyUIrufGlbtVBzuCWOlA1yV6QdD7Gr2MzwIs/WhuYR9ebSfh3Qlb7djkqzjwjxpnSAonK1YOabPP6NxUDccu.",
+ # "ssh_redirect_user": True,
+ # "sudo": "ALL=(ALL) NOPASSWD:ALL",
+ # "groups": "sudo",
+ # })
+
+ return user_array
+
+ # TODO: make this configurable
+ def _serialize_sysinfo(self):
+ defuser = {}
+ defuser['name'] = 'opnfv'
+ defuser['plain_text_passwd'] = 'OPNFV_HOST'
+ defuser['home'] = '/home/opnfv'
+ defuser['shell'] = '/bin/bash'
+ defuser['lock_passwd'] = True
+ defuser['gecos'] = 'Lab Manager User'
+ defuser['groups'] = 'sudo'
+
+ return {'default_user': defuser}
+
+ # TODO: make this configurable
+ def _serialize_runcmds(self):
+ cmdlist = []
+
+ # have hosts run dhcp on boot
+ cmdlist.append(['sudo', 'dhclient', '-r'])
+ cmdlist.append(['sudo', 'dhclient'])
+
+ return cmdlist
+
+ def _serialize_netconf_v1(self):
+ # interfaces = {} # map from iface_name => dhcp_config
+ # vlans = {} # map from vlan_id => dhcp_config
+
+ config_arr = []
+
+ for interface in self._resource().interfaces.all():
+ interface_name = interface.profile.name
+ interface_mac = interface.mac_address
+
+ iface_dict_entry = {
+ "type": "physical",
+ "name": interface_name,
+ "mac_address": interface_mac,
+ }
+
+ for vlan in interface.config.all():
+ if vlan.tagged:
+ vlan_dict_entry = {'type': 'vlan'}
+ vlan_dict_entry['name'] = str(interface_name) + "." + str(vlan.vlan_id)
+ vlan_dict_entry['vlan_link'] = str(interface_name)
+ vlan_dict_entry['vlan_id'] = int(vlan.vlan_id)
+ vlan_dict_entry['mac_address'] = str(interface_mac)
+ if vlan.public:
+ vlan_dict_entry["subnets"] = [{"type": "dhcp"}]
+ config_arr.append(vlan_dict_entry)
+ if (not vlan.tagged) and vlan.public:
+ iface_dict_entry["subnets"] = [{"type": "dhcp"}]
+
+ # vlan_dict_entry['mtu'] = # TODO, determine override MTU if needed
+
+ config_arr.append(iface_dict_entry)
+
+ ns_dict = {
+ 'type': 'nameserver',
+ 'address': ['10.64.0.1', '8.8.8.8']
+ }
+
+ config_arr.append(ns_dict)
+
+ full_dict = {'version': 1, 'config': config_arr}
+
+ return full_dict
+
+ @classmethod
+ def get(cls, booking_id: int, resource_lab_id: str, file_id: int):
+ return GeneratedCloudConfig.objects.get(resource_id=resource_lab_id, booking__id=booking_id, file_id=file_id)
+
+ def _resource(self):
+ return ResourceQuery.get(labid=self.resource_id, lab=self.booking.lab)
+
+ # def _get_facts(self):
+ # resource = self._resource()
+
+ # hostname = self.rconfig.name
+ # iface_configs = for_config.interface_configs.all()
+
+ def _to_dict(self):
+ main_dict = {}
+
+ main_dict['users'] = self._serialize_users()
+ main_dict['network'] = self._serialize_netconf_v1()
+ main_dict['hostname'] = self.rconfig.name
+
+ # add first startup commands
+ main_dict['runcmd'] = self._serialize_runcmds()
+
+ # configure distro default user
+ main_dict['system_info'] = self._serialize_sysinfo()
+
+ return main_dict
+
+ def serialize(self) -> str:
+ return yaml.dump(self._to_dict())
+
+
class APILog(models.Model):
user = models.ForeignKey(User, on_delete=models.PROTECT)
call_time = models.DateTimeField(auto_now=True)
@@ -761,6 +928,7 @@ class HardwareConfig(TaskConfig):
return self.get_delta()
def get_delta(self):
+ # TODO: grab the GeneratedCloudConfig urls from self.hosthardwarerelation.get_resource()
return self.format_delta(
self.hosthardwarerelation.get_resource().get_configuration(self.state),
self.hosthardwarerelation.lab_token)
@@ -813,7 +981,7 @@ class NetworkConfig(TaskConfig):
class SnapshotConfig(TaskConfig):
resource_id = models.CharField(max_length=200, default="default_id")
- image = models.IntegerField(null=True)
+ image = models.CharField(max_length=200, null=True) # cobbler ID
dashboard_id = models.IntegerField()
delta = models.TextField(default="{}")
@@ -1104,6 +1272,10 @@ class JobFactory(object):
booking=booking,
job=job
)
+ cls.makeGeneratedCloudConfigs(
+ resources=resources,
+ job=job
+ )
all_users = list(booking.collaborators.all())
all_users.append(booking.owner)
cls.makeAccessConfig(
@@ -1128,6 +1300,18 @@ class JobFactory(object):
continue
@classmethod
+ def makeGeneratedCloudConfigs(cls, resources=[], job=Job()):
+ for res in resources:
+ cif = GeneratedCloudConfig.objects.create(resource_id=res.labid, booking=job.booking, rconfig=res.config)
+ cif.save()
+
+ cif = CloudInitFile.create(priority=0, text=cif.serialize())
+ cif.save()
+
+ res.config.cloud_init_files.add(cif)
+ res.config.save()
+
+ @classmethod
def makeHardwareConfigs(cls, resources=[], job=Job()):
"""
Create and save HardwareConfig.
diff --git a/src/api/urls.py b/src/api/urls.py
index 52a6fc7..3693979 100644
--- a/src/api/urls.py
+++ b/src/api/urls.py
@@ -46,17 +46,28 @@ from api.views import (
lab_user,
GenerateTokenView,
analytics_job,
+ resource_ci_metadata,
+ resource_ci_userdata,
+ resource_ci_userdata_directory,
+ all_images,
+ all_opsyss,
+ single_image,
+ single_opsys,
user_bookings,
- make_booking,
- available_templates,
- images_for_template,
specific_booking,
extend_booking,
+ make_booking,
+ list_labs,
all_users,
- list_labs
+ images_for_template,
+ available_templates,
)
urlpatterns = [
+ path('labs/<slug:lab_name>/opsys/<slug:opsys_id>', single_opsys),
+ path('labs/<slug:lab_name>/image/<slug:image_id>', single_image),
+ path('labs/<slug:lab_name>/opsys', all_opsyss),
+ path('labs/<slug:lab_name>/image', all_images),
path('labs/<slug:lab_name>/profile', lab_profile),
path('labs/<slug:lab_name>/status', lab_status),
path('labs/<slug:lab_name>/inventory', lab_inventory),
@@ -67,6 +78,9 @@ urlpatterns = [
path('labs/<slug:lab_name>/booking/<int:booking_id>/idf', get_idf, name="get-idf"),
path('labs/<slug:lab_name>/jobs/<int:job_id>', specific_job),
path('labs/<slug:lab_name>/jobs/<int:job_id>/<slug:task_id>', specific_task),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/user-data', resource_ci_userdata_directory, name="specific-user-data"),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/meta-data', resource_ci_metadata, name="specific-meta-data"),
+ path('labs/<slug:lab_name>/jobs/<int:job_id>/cidata/<slug:resource_id>/<int:file_id>/user-data', resource_ci_userdata, name="user-data-dir"),
path('labs/<slug:lab_name>/jobs/new', new_jobs),
path('labs/<slug:lab_name>/jobs/current', current_jobs),
path('labs/<slug:lab_name>/jobs/done', done_jobs),
diff --git a/src/api/views.py b/src/api/views.py
index c0da1bc..84d19cc 100644
--- a/src/api/views.py
+++ b/src/api/views.py
@@ -19,24 +19,34 @@ from django.shortcuts import redirect, get_object_or_404
from django.utils.decorators import method_decorator
from django.utils import timezone
from django.views import View
+from django.http import HttpResponseNotFound
from django.http.response import JsonResponse, HttpResponse
from rest_framework import viewsets
from rest_framework.authtoken.models import Token
from django.views.decorators.csrf import csrf_exempt
from django.core.exceptions import ObjectDoesNotExist
+from django.db.models import Q
from api.serializers.booking_serializer import BookingSerializer
from api.serializers.old_serializers import UserSerializer
from api.forms import DowntimeForm
from account.models import UserProfile, Lab
from booking.models import Booking
-from api.models import LabManagerTracker, AutomationAPIManager, get_task, APILog
+from booking.quick_deployer import create_from_API
+from api.models import LabManagerTracker, get_task, Job, AutomationAPIManager, APILog
from notifier.manager import NotificationHandler
from analytics.models import ActiveVPNUser
-from booking.quick_deployer import create_from_API
-from resource_inventory.models import ResourceTemplate
-from django.db.models import Q
-
+from resource_inventory.models import (
+ Image,
+ Opsys,
+ CloudInitFile,
+ ResourceQuery,
+ ResourceTemplate,
+)
+
+import yaml
+import uuid
+from deepmerge import Merger
"""
API views.
@@ -88,6 +98,83 @@ def lab_host(request, lab_name="", host_id=""):
if request.method == "POST":
return JsonResponse(lab_manager.update_host(host_id, request.POST), safe=False)
+# API extension for Cobbler integration
+
+
+def all_images(request, lab_name=""):
+ a = []
+ for i in Image.objects.all():
+ a.append(i.serialize())
+ return JsonResponse(a, safe=False)
+
+
+def all_opsyss(request, lab_name=""):
+ a = []
+ for opsys in Opsys.objects.all():
+ a.append(opsys.serialize())
+
+ return JsonResponse(a, safe=False)
+
+
+@csrf_exempt
+def single_image(request, lab_name="", image_id=""):
+ lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ lab_manager = LabManagerTracker.get(lab_name, lab_token)
+ img = lab_manager.get_image(image_id).first()
+
+ if request.method == "GET":
+ if not img:
+ return HttpResponse(status=404)
+ return JsonResponse(img.serialize(), safe=False)
+
+ if request.method == "POST":
+ # get POST data
+ data = json.loads(request.body.decode('utf-8'))
+ if img:
+ img.update(data)
+ else:
+ # append lab name and the ID from the URL
+ data['from_lab_id'] = lab_name
+ data['lab_id'] = image_id
+
+ # create and save a new Image object
+ img = Image.new_from_data(data)
+
+ img.save()
+
+ # indicate success in response
+ return HttpResponse(status=200)
+ return HttpResponse(status=405)
+
+
+@csrf_exempt
+def single_opsys(request, lab_name="", opsys_id=""):
+ lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ lab_manager = LabManagerTracker.get(lab_name, lab_token)
+ opsys = lab_manager.get_opsys(opsys_id).first()
+
+ if request.method == "GET":
+ if not opsys:
+ return HttpResponse(status=404)
+ return JsonResponse(opsys.serialize(), safe=False)
+
+ if request.method == "POST":
+ data = json.loads(request.body.decode('utf-8'))
+ if opsys:
+ opsys.update(data)
+ else:
+ # only name, available, and obsolete are needed to create an Opsys
+ # other fields are derived from the URL parameters
+ data['from_lab_id'] = lab_name
+ data['lab_id'] = opsys_id
+ opsys = Opsys.new_from_data(data)
+
+ opsys.save()
+ return HttpResponse(status=200)
+ return HttpResponse(status=405)
+
+# end API extension
+
def get_pdf(request, lab_name="", booking_id=""):
lab_token = request.META.get('HTTP_AUTH_TOKEN')
@@ -175,6 +262,86 @@ def specific_job(request, lab_name="", job_id=""):
return JsonResponse(lab_manager.get_job(job_id), safe=False)
+@csrf_exempt
+def resource_ci_userdata(request, lab_name="", job_id="", resource_id="", file_id=0):
+ # lab_token = request.META.get('HTTP_AUTH_TOKEN')
+ # lab_manager = LabManagerTracker.get(lab_name, lab_token)
+
+ # job = lab_manager.get_job(job_id)
+ Job.objects.get(id=job_id) # verify a valid job was given, even if we don't use it
+
+ cifile = None
+ try:
+ cifile = CloudInitFile.objects.get(id=file_id)
+ except ObjectDoesNotExist:
+ return HttpResponseNotFound("Could not find a matching resource by id " + str(resource_id))
+
+ text = cifile.text
+
+ prepended_text = "#cloud-config\n"
+ # mstrat = CloudInitFile.merge_strategy()
+ # prepended_text = prepended_text + yaml.dump({"merge_strategy": mstrat}) + "\n"
+ # print("in cloudinitfile create")
+ text = prepended_text + text
+ cloud_dict = {
+ "datasource": {
+ "None": {
+ "metadata": {
+ "instance-id": str(uuid.uuid4())
+ },
+ "userdata_raw": text,
+ },
+ },
+ "datasource_list": ["None"],
+ }
+
+ return HttpResponse(yaml.dump(cloud_dict), status=200)
+
+
+@csrf_exempt
+def resource_ci_metadata(request, lab_name="", job_id="", resource_id="", file_id=0):
+ return HttpResponse("#cloud-config", status=200)
+
+
+@csrf_exempt
+def resource_ci_userdata_directory(request, lab_name="", job_id="", resource_id=""):
+ # files = [{"id": file.file_id, "priority": file.priority} for file in CloudInitFile.objects.filter(job__id=job_id, resource_id=resource_id).order_by("priority").all()]
+ resource = ResourceQuery.get(labid=resource_id, lab=Lab.objects.get(name=lab_name))
+ files = resource.config.cloud_init_files
+ files = [{"id": file.id, "priority": file.priority} for file in files.order_by("priority").all()]
+
+ d = {
+ 'merge_failures': []
+ }
+
+ merger = Merger(
+ [
+ (list, ["append"]),
+ (dict, ["merge"]),
+ ],
+ ["override"], # fallback
+ ["override"], # if types conflict (shouldn't happen in CI, but handle case)
+ )
+
+ for f in resource.config.cloud_init_files.order_by("priority").all():
+ try:
+ other_dict = yaml.load(f.text)
+ if not (type(d) is dict):
+ raise Exception("CI file was valid yaml but was not a dict")
+
+ merger.merge(d, other_dict)
+ except Exception as e:
+ # if fail to merge, then just skip
+ print("Failed to merge file in, as it had invalid content:", f.id)
+ print("File text was:")
+ print(f.text)
+ d['merge_failures'].append({f.id: str(e)})
+
+ file = CloudInitFile.create(text=yaml.dump(d), priority=0)
+
+ return HttpResponse(json.dumps([{"id": file.id, "priority": file.priority}]), status=200)
+
+
def new_jobs(request, lab_name=""):
lab_token = request.META.get('HTTP_AUTH_TOKEN')
lab_manager = LabManagerTracker.get(lab_name, lab_token)