summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJack Morgan <jack.morgan@intel.com>2017-09-26 00:10:21 +0000
committerGerrit Code Review <gerrit@opnfv.org>2017-09-26 00:10:21 +0000
commitf33a03d8f07c2b239322e33a271ef2f1d665313a (patch)
tree7664324153de2ef41fa9cc9f07a26ce6aff6b38d
parent25b2adb887b5e1cab767023476dd0ec2f02b4b59 (diff)
parentaeb2791f4d3a5e23090e56c330436205a61629d7 (diff)
Merge "Remove 'tools' Directory"
-rw-r--r--tools/laas-fog/LaaS_Diagram.jpgbin888779 -> 0 bytes
-rw-r--r--tools/laas-fog/README167
-rw-r--r--tools/laas-fog/conf/domain.yaml108
-rw-r--r--tools/laas-fog/conf/fuel.yaml17
-rw-r--r--tools/laas-fog/conf/inventory.yaml6
-rw-r--r--tools/laas-fog/conf/joid.yaml17
-rw-r--r--tools/laas-fog/conf/laas.yaml17
-rw-r--r--tools/laas-fog/conf/network.yaml52
-rw-r--r--tools/laas-fog/conf/pharos.yaml11
-rw-r--r--tools/laas-fog/conf/vpn.yaml15
-rwxr-xr-xtools/laas-fog/hostScripts/fuelInstall.sh40
-rwxr-xr-xtools/laas-fog/hostScripts/horizonNat.sh31
-rwxr-xr-xtools/laas-fog/hostScripts/ipnat.sh34
-rwxr-xr-xtools/laas-fog/hostScripts/joidInstall.sh33
-rwxr-xr-xtools/laas-fog/hostScripts/mkDisks.sh20
-rwxr-xr-xtools/laas-fog/hostScripts/vncAllow.sh23
-rw-r--r--tools/laas-fog/source/__init__.py17
-rw-r--r--tools/laas-fog/source/api/__init__.py17
-rw-r--r--tools/laas-fog/source/api/fog.py288
-rw-r--r--tools/laas-fog/source/api/fuel_api.py306
-rw-r--r--tools/laas-fog/source/api/libvirt_api.py331
-rw-r--r--tools/laas-fog/source/api/vpn.py235
-rw-r--r--tools/laas-fog/source/database.py296
-rwxr-xr-xtools/laas-fog/source/deploy.py82
-rw-r--r--tools/laas-fog/source/deployment_manager.py108
-rw-r--r--tools/laas-fog/source/domain.py244
-rw-r--r--tools/laas-fog/source/installers/__init__.py17
-rw-r--r--tools/laas-fog/source/installers/fuel.py268
-rw-r--r--tools/laas-fog/source/installers/installer.py35
-rw-r--r--tools/laas-fog/source/installers/joid.py40
-rwxr-xr-xtools/laas-fog/source/listen.py59
-rw-r--r--tools/laas-fog/source/network.py103
-rwxr-xr-xtools/laas-fog/source/pharos.py217
-rwxr-xr-xtools/laas-fog/source/pod_manager.py144
-rwxr-xr-xtools/laas-fog/source/resetDataBase.py110
-rwxr-xr-xtools/laas-fog/source/stop.sh24
-rw-r--r--tools/laas-fog/source/utilities.py346
-rw-r--r--tools/pharos-dashboard/.gitignore46
-rw-r--r--tools/pharos-dashboard/Makefile38
-rw-r--r--tools/pharos-dashboard/__init__.py8
-rw-r--r--tools/pharos-dashboard/booking_communication_agent.py42
-rw-r--r--tools/pharos-dashboard/config.env.sample24
-rw-r--r--tools/pharos-dashboard/config/nginx/pharos_dashboard.conf24
-rwxr-xr-xtools/pharos-dashboard/config/postgres/docker-entrypoint-initdb.d/pharos_dashboard.sh14
-rw-r--r--tools/pharos-dashboard/dashboard_api/__init__.py8
-rw-r--r--tools/pharos-dashboard/dashboard_api/api.py91
-rw-r--r--tools/pharos-dashboard/dashboard_notification/__init__.py10
-rw-r--r--tools/pharos-dashboard/dashboard_notification/notification.py120
-rw-r--r--tools/pharos-dashboard/docker-compose.yml77
-rw-r--r--tools/pharos-dashboard/rabbitmq/Dockerfile4
-rwxr-xr-xtools/pharos-dashboard/rabbitmq/init.sh10
-rw-r--r--tools/pharos-dashboard/readme.txt36
-rw-r--r--tools/pharos-dashboard/src/__init__.py8
-rw-r--r--tools/pharos-dashboard/src/account/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/account/admin.py15
-rw-r--r--tools/pharos-dashboard/src/account/apps.py15
-rw-r--r--tools/pharos-dashboard/src/account/forms.py22
-rw-r--r--tools/pharos-dashboard/src/account/jira_util.py65
-rw-r--r--tools/pharos-dashboard/src/account/middleware.py32
-rw-r--r--tools/pharos-dashboard/src/account/migrations/0001_initial.py38
-rw-r--r--tools/pharos-dashboard/src/account/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/account/models.py35
-rw-r--r--tools/pharos-dashboard/src/account/rsa.pem17
-rw-r--r--tools/pharos-dashboard/src/account/rsa.pub6
-rw-r--r--tools/pharos-dashboard/src/account/tasks.py34
-rw-r--r--tools/pharos-dashboard/src/account/tests/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/account/tests/test_general.py60
-rw-r--r--tools/pharos-dashboard/src/account/urls.py36
-rw-r--r--tools/pharos-dashboard/src/account/views.py153
-rw-r--r--tools/pharos-dashboard/src/api/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/api/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/api/serializers.py39
-rw-r--r--tools/pharos-dashboard/src/api/urls.py40
-rw-r--r--tools/pharos-dashboard/src/api/views.py53
-rw-r--r--tools/pharos-dashboard/src/booking/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/booking/admin.py17
-rw-r--r--tools/pharos-dashboard/src/booking/apps.py15
-rw-r--r--tools/pharos-dashboard/src/booking/forms.py23
-rw-r--r--tools/pharos-dashboard/src/booking/migrations/0001_initial.py68
-rw-r--r--tools/pharos-dashboard/src/booking/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/booking/models.py77
-rw-r--r--tools/pharos-dashboard/src/booking/tests/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/booking/tests/test_models.py94
-rw-r--r--tools/pharos-dashboard/src/booking/tests/test_views.py106
-rw-r--r--tools/pharos-dashboard/src/booking/urls.py39
-rw-r--r--tools/pharos-dashboard/src/booking/views.py122
-rw-r--r--tools/pharos-dashboard/src/dashboard/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/dashboard/admin.py20
-rw-r--r--tools/pharos-dashboard/src/dashboard/apps.py15
-rw-r--r--tools/pharos-dashboard/src/dashboard/fixtures/dashboard.json164
-rw-r--r--tools/pharos-dashboard/src/dashboard/migrations/0001_initial.py64
-rw-r--r--tools/pharos-dashboard/src/dashboard/migrations/0002_auto_20170505_0815.py42
-rw-r--r--tools/pharos-dashboard/src/dashboard/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/dashboard/models.py95
-rw-r--r--tools/pharos-dashboard/src/dashboard/tasks.py24
-rw-r--r--tools/pharos-dashboard/src/dashboard/templatetags/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/dashboard/templatetags/jenkins_filters.py38
-rw-r--r--tools/pharos-dashboard/src/dashboard/templatetags/jira_filters.py17
-rw-r--r--tools/pharos-dashboard/src/dashboard/tests/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/dashboard/tests/test_models.py69
-rw-r--r--tools/pharos-dashboard/src/dashboard/tests/test_views.py75
-rw-r--r--tools/pharos-dashboard/src/dashboard/urls.py41
-rw-r--r--tools/pharos-dashboard/src/dashboard/views.py141
-rw-r--r--tools/pharos-dashboard/src/jenkins/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/jenkins/adapter.py134
-rw-r--r--tools/pharos-dashboard/src/jenkins/admin.py17
-rw-r--r--tools/pharos-dashboard/src/jenkins/apps.py15
-rw-r--r--tools/pharos-dashboard/src/jenkins/migrations/0001_initial.py53
-rw-r--r--tools/pharos-dashboard/src/jenkins/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/jenkins/models.py62
-rw-r--r--tools/pharos-dashboard/src/jenkins/tasks.py64
-rw-r--r--tools/pharos-dashboard/src/jenkins/tests.py129
-rw-r--r--tools/pharos-dashboard/src/manage.py32
-rw-r--r--tools/pharos-dashboard/src/notification/__init__.py11
-rw-r--r--tools/pharos-dashboard/src/notification/admin.py17
-rw-r--r--tools/pharos-dashboard/src/notification/apps.py18
-rw-r--r--tools/pharos-dashboard/src/notification/migrations/0001_initial.py28
-rw-r--r--tools/pharos-dashboard/src/notification/migrations/__init__.py10
-rw-r--r--tools/pharos-dashboard/src/notification/models.py33
-rw-r--r--tools/pharos-dashboard/src/notification/signals.py25
-rw-r--r--tools/pharos-dashboard/src/notification/tasks.py49
-rw-r--r--tools/pharos-dashboard/src/notification/tests.py41
-rw-r--r--tools/pharos-dashboard/src/pharos_dashboard/__init__.py13
-rw-r--r--tools/pharos-dashboard/src/pharos_dashboard/celery.py30
-rw-r--r--tools/pharos-dashboard/src/pharos_dashboard/settings.py184
-rw-r--r--tools/pharos-dashboard/src/pharos_dashboard/urls.py44
-rw-r--r--tools/pharos-dashboard/src/pharos_dashboard/wsgi.py26
-rw-r--r--tools/pharos-dashboard/src/static/bower.json24
-rw-r--r--tools/pharos-dashboard/src/static/css/theme.css13
-rw-r--r--tools/pharos-dashboard/src/static/js/booking-calendar.js58
-rw-r--r--tools/pharos-dashboard/src/static/js/dataTables-sort.js36
-rw-r--r--tools/pharos-dashboard/src/static/js/datetimepicker-options.js13
-rw-r--r--tools/pharos-dashboard/src/static/js/flot-pie-chart.js30
-rw-r--r--tools/pharos-dashboard/src/static/js/fullcalendar-options.js101
-rw-r--r--tools/pharos-dashboard/src/templates/account/user_list.html55
-rw-r--r--tools/pharos-dashboard/src/templates/account/userprofile_update_form.html38
-rw-r--r--tools/pharos-dashboard/src/templates/base.html111
-rw-r--r--tools/pharos-dashboard/src/templates/booking/booking_calendar.html103
-rw-r--r--tools/pharos-dashboard/src/templates/booking/booking_detail.html26
-rw-r--r--tools/pharos-dashboard/src/templates/booking/booking_list.html48
-rw-r--r--tools/pharos-dashboard/src/templates/booking/booking_table.html37
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/ci_pods.html61
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/dev_pods.html70
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/jenkins_slaves.html46
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/resource.html58
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/resource_all.html73
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/resource_detail.html205
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/server_table.html30
-rw-r--r--tools/pharos-dashboard/src/templates/dashboard/table.html43
-rw-r--r--tools/pharos-dashboard/src/templates/layout.html73
-rw-r--r--tools/pharos-dashboard/src/templates/rest_framework/api.html9
-rw-r--r--tools/pharos-dashboard/web/Dockerfile7
-rw-r--r--tools/pharos-dashboard/web/requirements.txt17
-rw-r--r--tools/pharos-dashboard/worker/Dockerfile8
-rw-r--r--tools/pharos-dashboard/worker/requirements.txt17
-rw-r--r--tools/pharos-validator/.gitignore63
-rw-r--r--tools/pharos-validator/LICENSE202
-rw-r--r--tools/pharos-validator/Makefile56
-rw-r--r--tools/pharos-validator/config.mk5
-rw-r--r--tools/pharos-validator/docs/howto/virt-manager/HOWTO50
-rwxr-xr-xtools/pharos-validator/docs/howto/virt-manager/bridgevm.sh1
-rwxr-xr-xtools/pharos-validator/docs/howto/virt-manager/genmac.sh3
-rwxr-xr-xtools/pharos-validator/docs/howto/virt-manager/jump-server.sh1
-rwxr-xr-xtools/pharos-validator/docs/howto/virt-manager/node-cycle.sh5
-rw-r--r--tools/pharos-validator/docs/howto/virt-manager/virsh-commands.txt14
-rw-r--r--tools/pharos-validator/docs/initial_proposal.txt49
-rwxr-xr-xtools/pharos-validator/rpm/buildasroot.sh32
-rwxr-xr-xtools/pharos-validator/rpm/installpython3.sh5
-rw-r--r--tools/pharos-validator/src/pxe_initrd/.gitignore5
-rw-r--r--tools/pharos-validator/src/pxe_initrd/LICENSE-3RD-PARTY.txt2
-rw-r--r--tools/pharos-validator/src/pxe_initrd/MIRRORS.txt22
-rw-r--r--tools/pharos-validator/src/pxe_initrd/config.mk22
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/etc/boot.msg2
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/etc/ldlinux.c32bin116448 -> 0 bytes
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/etc/pxelinux.0bin46909 -> 0 bytes
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/etc/pxelinux.cfg/default15
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/etc/vmlinuzbin11948096 -> 0 bytes
-rw-r--r--tools/pharos-validator/src/pxe_initrd/pxe_initrd.mk79
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/bin/enable_services.sh21
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/bin/initial_network.py64
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/bin/install_validation_tool.sh3
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/bin/update_pkgs.sh13
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/etc/init.d/initialnetwork20
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/src/etc/init.d/tmpfs20
-rw-r--r--tools/pharos-validator/src/pxe_initrd/src/etc/profile5
-rw-r--r--tools/pharos-validator/src/pxe_initrd/src/etc/resolv.conf3
-rw-r--r--tools/pharos-validator/src/pxe_initrd/src/etc/ssh/sshd_config132
l---------tools/pharos-validator/src/pxe_initrd/src/init1
-rw-r--r--tools/pharos-validator/src/pxe_initrd/src/root/.profile5
-rw-r--r--tools/pharos-validator/src/pxe_initrd/src/root/.ssh/authorized_keys1
-rw-r--r--tools/pharos-validator/src/pxe_initrd/vmlinux/config7521
-rwxr-xr-xtools/pharos-validator/src/pxe_initrd/vmlinux/vmlinuz_build.sh0
-rw-r--r--tools/pharos-validator/src/validation_tool/.gitignore2
-rw-r--r--tools/pharos-validator/src/validation_tool/LICENSE202
-rwxr-xr-xtools/pharos-validator/src/validation_tool/bin/pharos-validator-node92
-rwxr-xr-xtools/pharos-validator/src/validation_tool/bin/pharos-validator-server183
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/config.yaml37
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/example/config.yaml37
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/example/inventory.yaml38
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/example/network.yaml207
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/inventory.yaml38
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/network.yaml221
-rw-r--r--tools/pharos-validator/src/validation_tool/doc/pharos-validator.154
-rw-r--r--tools/pharos-validator/src/validation_tool/requirements.txt3
-rwxr-xr-xtools/pharos-validator/src/validation_tool/setup.py31
-rw-r--r--tools/pharos-validator/src/validation_tool/src/__init__.py0
-rw-r--r--tools/pharos-validator/src/validation_tool/src/config.py176
-rw-r--r--tools/pharos-validator/src/validation_tool/src/const.py48
-rw-r--r--tools/pharos-validator/src/validation_tool/src/dhcp.py102
-rw-r--r--tools/pharos-validator/src/validation_tool/src/ipmi.py63
-rw-r--r--tools/pharos-validator/src/validation_tool/src/jenkins.py8
-rw-r--r--tools/pharos-validator/src/validation_tool/src/node.py85
-rw-r--r--tools/pharos-validator/src/validation_tool/src/receiver.py46
-rw-r--r--tools/pharos-validator/src/validation_tool/src/server.py111
-rw-r--r--tools/pharos-validator/src/validation_tool/src/test/__init__.py0
-rw-r--r--tools/pharos-validator/src/validation_tool/src/test/evaluate.py159
-rw-r--r--tools/pharos-validator/src/validation_tool/src/test/probe.py137
-rw-r--r--tools/pharos-validator/src/validation_tool/src/util.py107
-rw-r--r--tools/pharos-validator/src/validation_tool/tests/test_node.py0
-rw-r--r--tools/pharos-validator/src/validation_tool/tests/test_probe.py0
-rw-r--r--tools/pharos-validator/src/validation_tool/tests/test_server.py9
-rw-r--r--tools/pharos-validator/src/validation_tool/tests/test_util.py12
222 files changed, 0 insertions, 19699 deletions
diff --git a/tools/laas-fog/LaaS_Diagram.jpg b/tools/laas-fog/LaaS_Diagram.jpg
deleted file mode 100644
index 521236d1..00000000
--- a/tools/laas-fog/LaaS_Diagram.jpg
+++ /dev/null
Binary files differ
diff --git a/tools/laas-fog/README b/tools/laas-fog/README
deleted file mode 100644
index 84317eb3..00000000
--- a/tools/laas-fog/README
+++ /dev/null
@@ -1,167 +0,0 @@
-This Lab as a Serice project aims to create on demand OPNFV resources to developers.
-This project will automate the process, to the requested extent, of running an OPNFV
-installer and creating an Openstack environment within OPNFV automatically and on demand.
-
-To run, execute (from the project root):
- source/deploy.py
-
-To run the Pharos dahsboard listener, which will continualy poll the dashboard and run deployments in the background:
- source/listen.py --config <conf/pharos.conf>
-
-
-For convenience, there is a bash script source/stop.sh which will stop the dashboard listener and all related scripts.
-
-BEFORE YOU CAN RUN:
-you must first:
-- Integrate FOG into your infrastructure
-- Fill out the needed configuration files
-- Populate the database with your available hosts
-
-
-FOG:
-Our OPNFV infrastructure uses a FOG server to pxe boot, read and write disk images, and otherwise control the hosts we have available for developers.
-FOG is an open source project, and you can view it here: https://fogproject.org/
-FOG provides an easy and scriptable way to completely wipe and write the disks of our hosts.
- This makes it quick and simple for us to restore our hosts to a known, clean state after a developer has released control of it.
-
-To run the deploy script, you need to:
- Have a FOG master running
- Have your hosts registered to the FOG master
- Have a 'clean' disk image of for each installer / configuration you wish to support.
- - Fuel, Compass, and JOID all need different distros / versions to run properly
- - There is a mapping between images and their installers in the installer's config file
-The FOG server must be reachable by whatever machine is running this LaaS software,
-and have network access to PXE boot all of your hosted dev pods.
-
-
-CONFIGURATION:
-INSTALLERS#############################################################################################
--database Path to the SQLite database for storing host information.
- Should be the same for all installers in most cases.
--dhcp_log Path to log file containing DHCP information for dev pods.
--dhcp_server IP address or hostname of the DHCP server which contains the above log file
- set to `null` if the same machine will be running dhcp and this project
--fog
---api_key The FOG api key. You may instead give the path to a file containing the api key.
---server The URL of the fog server.
- ex: http://myServer.com/fog/
---user_key The FOG api key specific to your user.
- You may instead give the path to a secrets file containing the key.
---image_id The id of the image FOG will use when this installer is requested.
--installer The name of the installer, as seen from the dashboard.
- `null` will match when no installer is selected, or the `None` installer is..
--logging_dir The directory to create log files in.
- Will create the dir if it does not already exist.
--scenario The default scenario if one is not specified by the user.
- NOTE: automation of different scenarios are not currently supported.
- These values are silently ignored.
--hypervisor_config
---networks Path to the config file used to define the virtual networks for this installer.
---vms Path to the config file used to define the virtual machines for this installer.
--inventory Path to inventory file mapping dashboard host id's to FOG hostnames.
--vpn_config Path to the vpn config file
-
-
-#########################################################################################################
-
-DOMAINS##################################################################################################
--jinja-template Path to the jinja xml template used to create libvirt domain xml documents.
--domains A list of domains. List as many as you want, but be cognizant of hardware limitations
---disk Path to the qcow2 disk image for this VM
---interfaces List of interfaces for the vm
----name The name of the network or bridge that provides this interface
----type The source of the interface. Either 'bridge' or 'network' is valid, but the bridge
- must already exist on the host.
---iso
----URL Where to fetch the ISO from
----location Where to save the ISO to
----used Whether this host will use an iso as a boot drive
- if `false`, the ISO will not be downloaded
---memory Memory to allocate to the VM in KiB
---name libvirt name of VM
---vcpus How many vcpus to allocate to this host.
-#########################################################################################################
-
-NETWORKS#################################################################################################
--jinja-template Path to jinja template used to create libvirt XML network documents
--networks List of networks that will be created
---brAddr ip address of the bridge on the host
---brName name of the bridge on the host
---cidr cidr of the virtual network
---dhcp dhcp settingg
----rangeEnd end of DHCP address range
----rangeStart start of DHCP address range
----used Whether to enable dhcp for this network. Should probably be false.
---forward Libvirt network forwarding settings
----type forwarding type. See libvirt documentation for possible types.
----used if `false`, the network is isolated.
---name Name of this network in Libvirt
---netmask Netmask for this network.
-########################################################################################################
-
-PHAROS##################################################################################################
--dashboard url of the dashboard. https://labs.opnfv.org is the public OPNFV dashboard
--database path to database to store booking information.
- Should be the same db as the host database in most cases
--default_configs a mappping of installers and their configuration files.
--inventory path to the inventory file
--logging_dir Where the pharos dashboard listener should put log files.
--poling How many times a second the listener will poll the dashboard
--token Your paros api token. May also be a path to a file containing the token
-#######################################################################################################
-
-VPN####################################################################################################
-NOTE: this all assumes you use LDAP authentication
--server Domain name of your vpn server
--authenticaion
---pass password for your 'admin' user. May also be a path to a secrets file
---user full dn of your 'admin' user
--directory
---root The lowest directory that this program will need to access
---user The directory where users are stored, relative to the given root dir
--user
---objects A list of object classes that vpn users will belong to.
- Most general class should be on top, and get more specific from there.
- ex: -top, -inetOrgPerson because `top` is more general
--database The booking database
--permanent_users Users that you want to be persistent, even if they have no bookings active
- ie: your admin users
- All other users will be deleted when they have no mroe bookings
-#######################################################################################################
-
-INVENTORY##############################################################################################
-This file is used to map the resource id's known by pharos to the hostnames known by FOG.
-for example,
-50: fog-machine-4
-51: fog-machine-5
-52: fog-virtualPod-5.1
-#######################################################################################################
-
-HOW IT WORKS:
-
-0) lab resources are prepared and information is stored in the database
-1) source/listen.py launches a background instance of pharos.py
- -pharos.py continually polls the dashboard for booking info, and stores it in the database
-2) A known booking begins and pharos.py launches pod_manager.py
- - pod_manager is launched in a new process, so that the listener continues to poll the dashboard
- and multiple hosts can be provisioned at once
-3) pod_manager uses FOG to image the host
-4) if requested, pod_manager hands control to deployment_manager to install and deploy OPNFV
- - deployment_manager instantiates and calls the go() function of the given source/installers/installer subclass
-5) a vpn user is created and random root password is given to the dev pod
-##########The dashboard does not yet support the following actions#############
-6) public ssh key of the user is fetched from the dashboard
-7) user is automatically notified their pod is ready, and given all needed info
-
-
-GENERAL NOTES:
-
-resetDatabase.py relies on FOG to retrieve a list of all hosts available to developers
-
-running:
- source/resetDatabase.py --both --config <CONFIG_FILE>
-will create a database and populate it.
-WARNING: This will delete existing information if run on a previously initialized database
-
-To aid in visualization and understanding of the resulting topolgy after fully deploying OPNFV and Openstack in
-a development pod, you may review the LaaS_Diagram in this directory.
diff --git a/tools/laas-fog/conf/domain.yaml b/tools/laas-fog/conf/domain.yaml
deleted file mode 100644
index 04914e06..00000000
--- a/tools/laas-fog/conf/domain.yaml
+++ /dev/null
@@ -1,108 +0,0 @@
----
-- disk: /vm/master.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: true
- memory: 8240000
- name: master
- vcpus: 4
-
-- disk: /vm/slave1.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: false
- memory: 8240000
- name: slave1
- vcpus: 4
-
-- disk: /vm/slave2.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: false
- memory: 8240000
- name: slave2
- vcpus: 4
-
-- disk: /vm/slave3.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: false
- memory: 8240000
- name: slave3
- vcpus: 4
-
-- disk: /vm/slave4.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: false
- memory: 8240000
- name: slave4
- vcpus: 4
-
-- disk: /vm/slave5.qcow2
- interfaces:
- - name: admin
- type: network
- - name: public
- type: network
- - name: storage
- type: network
- - name: management
- type: network
- iso:
- URL: http://artifacts.opnfv.org/fuel/danube/opnfv-danube.2.0.iso
- location: /vm/fuel.iso
- used: false
- memory: 8240000
- name: slave5
- vcpus: 4
diff --git a/tools/laas-fog/conf/fuel.yaml b/tools/laas-fog/conf/fuel.yaml
deleted file mode 100644
index 0994d862..00000000
--- a/tools/laas-fog/conf/fuel.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-database: /var/OPNFV/hosts.db
-dhcp_log: /var/log/messages
-dhcp_server: null
-fog:
- api_key: /path/to/fog.key # may also put the key directly here
- server: http://fogserver.com/fog/
- user_key: /path/to/fog_user.key
- image_id: 5
-installer: Fuel
-logging_dir: /var/log/OPNFV/
-scenario: os-nosdn-nofeature-noha
-hypervisor_config:
- networks: /root/laas/conf/network.yaml
- vms: /root/laas/conf/domain.yaml
-inventory: /root/laas/conf/inventory.yaml
-vpn_config: /root/laas/conf/vpn.yaml
diff --git a/tools/laas-fog/conf/inventory.yaml b/tools/laas-fog/conf/inventory.yaml
deleted file mode 100644
index 9d3d61b0..00000000
--- a/tools/laas-fog/conf/inventory.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-# pharos id : fog name
-# for example:
-1: fog-host-1
-2: fog-host-2
-3: fog-host-3
diff --git a/tools/laas-fog/conf/joid.yaml b/tools/laas-fog/conf/joid.yaml
deleted file mode 100644
index b38dedce..00000000
--- a/tools/laas-fog/conf/joid.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-database: /var/OPNFV/hosts.db
-dhcp_log: /var/log/messages
-dhcp_server: null
-fog:
- api_key: /path/to/fog.key # may also put the key directly here
- server: http://fogserver.com/fog/
- user_key: /path/to/fog_user.key
- image_id: 12
-installer: Joid
-logging_dir: /var/log/OPNFV/
-scenario: os-nosdn-nofeature-noha
-hypervisor_config:
- networks: /root/laas/conf/network.yaml
- vms: /root/laas/conf/domain.yaml
-inventory: /root/laas/conf/inventory.yaml
-vpn_config: /root/laas/conf/vpn.yaml
diff --git a/tools/laas-fog/conf/laas.yaml b/tools/laas-fog/conf/laas.yaml
deleted file mode 100644
index da11a56b..00000000
--- a/tools/laas-fog/conf/laas.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-database: /var/OPNFV/hosts.db
-dhcp_log: /var/log/messages
-dhcp_server: null
-fog:
- api_key: /path/to/fog.key # may also put the key directly here
- server: http://fogserver.com/fog/
- user_key: /path/to/fog_user.key
- image_id: 5
-installer: null
-logging_dir: /var/log/OPNFV/
-scenario: os-nosdn-nofeature-noha
-hypervisor_config:
- networks: /root/laas/conf/network.yaml
- vms: /root/laas/conf/domain.yaml
-inventory: /root/laas/conf/inventory.yaml
-vpn_config: /root/laas/conf/vpn.yaml
diff --git a/tools/laas-fog/conf/network.yaml b/tools/laas-fog/conf/network.yaml
deleted file mode 100644
index 61860d56..00000000
--- a/tools/laas-fog/conf/network.yaml
+++ /dev/null
@@ -1,52 +0,0 @@
----
-- brAddr: 10.20.0.1
- brName: admin-br
- cidr: 10.20.0.0/24
- dhcp:
- rangeEnd: 10.20.0.250
- rangeStart: 10.20.0.15
- used: false
- forward:
- type: nat
- used: true
- name: admin
- netmask: 255.255.255.0
-
-- brAddr: 10.20.1.1
- brName: public-br
- cidr: 10.20.1.0/24
- dhcp:
- rangeEnd: 10.20.1.250
- rangeStart: 10.20.1.15
- used: false
- forward:
- type: nat
- used: true
- name: public
- netmask: 255.255.255.0
-
-- brAddr: 10.20.2.1
- brName: management-br
- cidr: 10.20.2.0/24
- dhcp:
- rangeEnd: 10.20.2.250
- rangeStart: 10.20.2.15
- used: false
- forward:
- type: nat
- used: false
- name: management
- netmask: 255.255.255.0
-
-- brAddr: 10.20.3.1
- brName: storage-br
- cidr: 10.20.3.0/24
- dhcp:
- rangeEnd: 10.20.3.250
- rangeStart: 10.20.3.15
- used: false
- forward:
- type: nat
- used: false
- name: storage
- netmask: 255.255.255.0
diff --git a/tools/laas-fog/conf/pharos.yaml b/tools/laas-fog/conf/pharos.yaml
deleted file mode 100644
index 9fedde12..00000000
--- a/tools/laas-fog/conf/pharos.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
----
-dashboard: https://labs.opnfv.org
-database: /var/OPNFV/laas.db
-default_configs:
- Fuel: /root/laas/conf/fuel.yaml
- None: /root/laas/conf/laas.yaml
- Joid: /rooot/laas/conf/joid.yaml
-inventory: /root/laas/conf/inventory.yaml
-logging_dir: /var/log/OPNFV
-polling: 3
-token: /root/laas/conf/pharos.key
diff --git a/tools/laas-fog/conf/vpn.yaml b/tools/laas-fog/conf/vpn.yaml
deleted file mode 100644
index 6f399275..00000000
--- a/tools/laas-fog/conf/vpn.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-server: vpn.domain.com
-authentication:
- pass: /path/to/keyfile # you may also put the password directly here
- user: cn=root,o=opnfv,dc=domain,dc=com
-directory:
- root: o=opnfv,dc=domain,dc=com
- user: ou=People # relative to the root dir
-user:
- objects: # listed in ascending order of specificty
- - top
- - inetOrgPerson # last object should be a class that only vpn users have
-database: /var/OPNFV/laas.db # same as the pharos api booking db
-permanent_users: # any users you want to be persistent
- - pberberian
diff --git a/tools/laas-fog/hostScripts/fuelInstall.sh b/tools/laas-fog/hostScripts/fuelInstall.sh
deleted file mode 100755
index c68907d0..00000000
--- a/tools/laas-fog/hostScripts/fuelInstall.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-virsh start master
-
-ret=''
-while [ -z "$ret" ]; do
- echo "Master node is not accepting ssh. Sleeping 15 seconds..."
- sleep 15
- ret=$(nmap 10.20.0.2 -PN -p ssh | grep open)
-done
-
-ssh-keygen -f ~/.ssh/id_rsa -t rsa -N ''
-sshpass -p r00tme ssh-copy-id -o stricthostkeychecking=no root@10.20.0.2
-
-ssh root@10.20.0.2 killall fuelmenu
-
-echo "killed fuel menu. Waiting for installation to complete"
-
-ans=''
-while [ -z "$ans" ]; do
- echo "fuel api unavailable. Sleeping 15 seconds..."
- sleep 15
- ans=$(curl http://10.20.0.2:8000 2>/dev/null )
-done
diff --git a/tools/laas-fog/hostScripts/horizonNat.sh b/tools/laas-fog/hostScripts/horizonNat.sh
deleted file mode 100755
index dd6396c6..00000000
--- a/tools/laas-fog/hostScripts/horizonNat.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-MYIP=$1
-DESTINATION=$2
-MYBRIDGE=10.20.1.1
-DESTNETWORK=10.20.1.0/24
-PORT=80
-
-iptables -I INPUT 2 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
-iptables -t nat -I INPUT 1 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
-iptables -I FORWARD -p tcp --dport "$PORT" -j ACCEPT
-
-iptables -t nat -I PREROUTING -p tcp -d "$MYIP" --dport "$PORT" -j DNAT --to-destination "$DESTINATION:$PORT"
-iptables -t nat -I POSTROUTING -p tcp -s "$DESTINATION" ! -d "$DESTNETWORK" -j SNAT --to-source "$MYIP"
-
-iptables -t nat -I POSTROUTING 2 -d "$DESTINATION" -j SNAT --to-source "$MYBRIDGE"
diff --git a/tools/laas-fog/hostScripts/ipnat.sh b/tools/laas-fog/hostScripts/ipnat.sh
deleted file mode 100755
index b8d97f0d..00000000
--- a/tools/laas-fog/hostScripts/ipnat.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-MYIP=$1
-DESTINATION=10.20.0.2
-MYBRIDGE=10.20.0.1
-DESTNETWORK=10.20.0.0/24
-PORTS=(8000 8443)
-
-for PORT in "${PORTS[@]}"; do
-
- iptables -I INPUT 2 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
- iptables -t nat -I INPUT 1 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
- iptables -I FORWARD -p tcp --dport "$PORT" -j ACCEPT
-
- iptables -t nat -I PREROUTING -p tcp -d "$MYIP" --dport "$PORT" -j DNAT --to-destination "$DESTINATION:$PORT"
- iptables -t nat -I POSTROUTING -p tcp -s "$DESTINATION" ! -d "$DESTNETWORK" -j SNAT --to-source "$MYIP"
-
- iptables -t nat -I POSTROUTING 2 -d "$DESTINATION" -j SNAT --to-source "$MYBRIDGE"
-done
diff --git a/tools/laas-fog/hostScripts/joidInstall.sh b/tools/laas-fog/hostScripts/joidInstall.sh
deleted file mode 100755
index df419c79..00000000
--- a/tools/laas-fog/hostScripts/joidInstall.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-# parses the passed scenario
-args=($(echo "$1" | tr "-" "\n"))
-# args is array: [os, nosdn, nofeature, noha]
-
-# the deploy script expects 'none' rather than 'nofeature'
-if [ "nofeature" == "${args[2]}" ]; then
- args[2]="none"
-fi
-# grabs the joid repo
-git clone "https://gerrit.opnfv.org/gerrit/joid.git"
-# working directory has to be where 03-maasdeploy is
-cd joid/ci
-# virtualy deploy maas
-./03-maasdeploy.sh virtual
-# deploys OPNFV with the given scenario
-./deploy.sh -o newton -s "${args[1]}" -t "${args[3]}" -l default -d xenial -m openstack -f "${args[2]}"
diff --git a/tools/laas-fog/hostScripts/mkDisks.sh b/tools/laas-fog/hostScripts/mkDisks.sh
deleted file mode 100755
index 0cbba899..00000000
--- a/tools/laas-fog/hostScripts/mkDisks.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-for disk in "$@"; do
- qemu-img create -f qcow2 "$disk" 100G
-done
diff --git a/tools/laas-fog/hostScripts/vncAllow.sh b/tools/laas-fog/hostScripts/vncAllow.sh
deleted file mode 100755
index 98013814..00000000
--- a/tools/laas-fog/hostScripts/vncAllow.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-MYIP=X.X.X.X
-PORT="5900:5905"
-iptables -I INPUT 2 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
-iptables -t nat -I INPUT 1 -d "$MYIP" -p tcp --dport "$PORT" -j ACCEPT
-iptables -I FORWARD -p tcp --dport "$PORT" -j ACCEPT
-iptables -I OUTPUT -p tcp --dport "$PORT" -j ACCEPT
diff --git a/tools/laas-fog/source/__init__.py b/tools/laas-fog/source/__init__.py
deleted file mode 100644
index 7bb515b7..00000000
--- a/tools/laas-fog/source/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
diff --git a/tools/laas-fog/source/api/__init__.py b/tools/laas-fog/source/api/__init__.py
deleted file mode 100644
index 7bb515b7..00000000
--- a/tools/laas-fog/source/api/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
diff --git a/tools/laas-fog/source/api/fog.py b/tools/laas-fog/source/api/fog.py
deleted file mode 100644
index 62874039..00000000
--- a/tools/laas-fog/source/api/fog.py
+++ /dev/null
@@ -1,288 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import requests
-import sys
-import time
-
-
-class FOG_Handler:
- """
- This class talks with the REST web api for the FOG server.
-
- TODO: convert prints to logs and remove uneeded pass's
- """
-
- def __init__(self, baseURL, fogKey=None, userKey=None):
- """
- init function
- baseURL should be http://fog.ip.or.hostname/fog/
- fogKey and userKey can optionally be supplied here or later
- They can be found in fog and provide authentication.
- """
- self.baseURL = baseURL
- self.fogKey = fogKey
- self.userKey = userKey
- self.header = {}
- self.updateHeader()
-
- def setLogger(self, logger):
- """
- saves the refference to the log object as
- self.log
- """
- self.log = logger
-
- def getUserKeyFromFile(self, path):
- """
- reads the user api key from a file
- """
- self.userKey = open(path).read()
- self.updateHeader()
-
- def getFogKeyFromFile(self, path):
- """
- reads the api key from a file
- """
- self.fogKey = open(path).read()
- self.updateHeader()
-
- def setUserKey(self, key):
- """
- sets the user key
- """
- self.userKey = key
- self.updateHeader()
-
- def setFogKey(self, key):
- """
- sets the fog key
- """
- self.fogKey = key
- self.updateHeader()
-
- def updateHeader(self):
- """
- recreates the http header used to talk to the fog api
- """
- self.header = {}
- self.header['fog-api-token'] = self.fogKey
- self.header['fog-user-token'] = self.userKey
-
- def setImage(self, host, imgNum):
- """
- Sets the image to be used during ghosting to the image
- with id imgNum. host can either be a hostname or number.
- """
- try:
- host = int(host)
- except:
- host = self.getHostNumber(host)
- url = self.baseURL+"host/"+str(host)
- host_conf = requests.get(url, headers=self.header).json()
- host_conf['imageID'] = str(imgNum)
- requests.put(url+"/edit", headers=self.header, json=host_conf)
-
- def delTask(self, hostNum):
- """
- Tries to delete an existing task for the host
- with hostNum as a host number
- """
- try:
- url = self.baseURL+'fog/host/'+str(hostNum)+'/cancel'
- req = requests.delete(url, headers=self.header)
- if req.status_code == 200:
- self.log.info("%s", "successfully deleted image task")
- except Exception:
- self.log.exception("Failed to delete the imaging task!")
-
- def getHostMac(self, hostname):
- """
- returns the primary mac address if the given host.
- """
- try:
- hostNum = int(self.getHostNumber(hostname))
- url = self.baseURL + "host/"+str(hostNum)
- req = requests.get(url, headers=self.header)
- macAddr = req.json()['primac']
- return macAddr
- except Exception:
- self.log.exception('%s', "Failed to connect to the FOG server")
-
- def getHostNumber(self, hostname):
- """
- returns the host number of given host
- """
- try:
- req = requests.get(self.baseURL+"host", headers=self.header)
- hostData = req.json()
- if hostData is not None:
- for hostDict in hostData['hosts']:
- if hostname == hostDict['name']:
- return hostDict['id']
- return -1
- except Exception:
- self.log.exception('%s', "Failed to connect to the FOG server")
-
- def imageHost(self, hostName, recurse=False):
- """
- Schedules an imaging task for the given host.
- This automatically uses the "associated" disk image.
- To support extra installers, I will need to create
- a way to change what that image is before calling
- this method.
- """
- num = str(self.getHostNumber(hostName))
- url = self.baseURL+'host/'+num+'/task'
-
- try:
- req = requests.post(
- url,
- headers=self.header,
- json={"taskTypeID": 1}
- )
- if req.status_code == 200:
- self.log.info("%s", "Scheduled image task for host")
- except Exception:
- if recurse: # prevents infinite loop
- self.log.exception("%s", "Failed to schedule task. Exiting")
- sys.exit(1)
- self.log.warning("%s", "Failed to schedule host imaging")
- self.log.warning("%s", "Trying to delete existing image task")
- self.delTask(num)
- self.imageHost(num, recurse=True)
-
- def waitForHost(self, host):
- """
- tracks the imaging task to completion.
- """
- while True:
- imageTask = self.getImagingTask(host)
- if imageTask is None:
- self.log.info("%s", "Imaging complete")
- return
- state = int(imageTask['stateID'])
- if state == 1:
- self.log.info("%s", "Waiting for host to check in")
- self.waitForTaskToActive(host)
- continue
- if state == 3:
- self.waitForTaskToStart(host)
- self.waitForImaging(host)
- continue
- time.sleep(8)
-
- def waitForImaging(self, host):
- """
- Once the host begins being imaged, this tracks progress.
- """
- # print "Host has begun the imaging process\n"
- while True:
- task = self.getImagingTask(host)
- if task is None:
- return
- per = str(task['percent'])
- self.log.info("%s percent done imaging", per)
- time.sleep(15)
-
- def waitForTaskToActive(self, host):
- """
- Waits for the host to reboot and pxe boot
- into FOG
- """
- while True:
- try:
- task = self.getImagingTask(host)
- except:
- pass
- state = int(task['stateID'])
- if state == 1:
- time.sleep(4)
- else:
- return
-
- def waitForTaskToStart(self, host):
- """
- waits for the task to start and imaging to begin.
- """
- while True:
- try:
- per = str(self.getImagingTask(host)['percent'])
- except:
- pass
- if per.strip() == '':
- time.sleep(1)
- else:
- return
-
- def getImagingTask(self, host):
- """
- Sorts through all current tasks to find the image task
- associated with the given host.
- """
- try:
- taskList = requests.get(
- self.baseURL+'task/current',
- headers=self.header)
- taskList = taskList.json()['tasks']
- imageTask = None
- for task in taskList:
- hostname = str(task['host']['name'])
- if hostname == host and int(task['typeID']) == 1:
- imageTask = task
- return imageTask
- except Exception:
- self.log.exception("%s", "Failed to talk to FOG server")
- sys.exit(1)
-
- def getHosts(self):
- """
- returns a list of all hosts
- """
- req = requests.get(self.baseURL+"host", headers=self.header)
- return req.json()['hosts']
-
- def getHostsinGroup(self, groupName):
- """
- returns a list of all hosts in groupName
- """
- groupID = None
- groups = requests.get(self.baseURL+"group", headers=self.header)
- groups = groups.json()['groups']
- for group in groups:
- if groupName.lower() in group['name'].lower():
- groupID = group['id']
- if groupID is None:
- return
- hostIDs = []
- associations = requests.get(
- self.baseURL+"groupassociation",
- headers=self.header
- )
- associations = associations.json()['groupassociations']
- for association in associations:
- if association['groupID'] == groupID:
- hostIDs.append(association['hostID'])
-
- hosts = []
- for hostID in hostIDs:
- hosts.append(requests.get(
- self.baseURL+"host/"+str(hostID),
- headers=self.header
- ).json())
- return hosts
diff --git a/tools/laas-fog/source/api/fuel_api.py b/tools/laas-fog/source/api/fuel_api.py
deleted file mode 100644
index 01278000..00000000
--- a/tools/laas-fog/source/api/fuel_api.py
+++ /dev/null
@@ -1,306 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import requests
-import time
-import sys
-
-
-class Fuel_api:
-
- def __init__(self, url, logger, user="admin", password="admin"):
- """
- url is the url of the fog api in the form
- http://ip.or.host.name:8000/
- logger is a reference to the logger
- the default creds for fuel is admin/admin
- """
- self.logger = logger
- self.base = url
- self.user = user
- self.password = password
- self.header = {"Content-Type": "application/json"}
-
- def getKey(self):
- """
- authenticates with the user and password
- to get a keystone key, used in the headers
- from here on to talk to fuel.
- """
- url = self.base + 'keystone/v2.0/tokens/'
- reqData = {"auth": {
- "tenantName": self.user,
- "passwordCredentials": {
- "username": self.user,
- "password": self.password
- }
- }}
- self.logger.info("Retreiving keystone token from %s", url)
- token = requests.post(url, headers=self.header, json=reqData)
- self.logger.info("Received response code %d", token.status_code)
- self.token = token.json()['access']['token']['id']
- self.header['X-Auth-Token'] = self.token
-
- def getNotifications(self):
- """
- returns the fuel notifications
- """
- url = self.base+'/api/notifications'
- try:
- req = requests.get(url, headers=self.header)
- return req.json()
-
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def waitForBootstrap(self):
- """
- Waits for the bootstrap image to build.
- """
- while True:
- time.sleep(30)
- notes = self.getNotifications()
- for note in notes:
- if "bootstrap image building done" in note['message']:
- return
-
- def getNodes(self):
- """
- returns a list of all nodes booted into fuel
- """
- url = self.base+'api/nodes'
- try:
- req = requests.get(url, headers=self.header)
- return req.json()
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def getID(self, mac):
- """
- gets the fuel id of node with given mac
- """
- for node in self.getNodes():
- if node['mac'] == mac:
- return node['id']
-
- def getNetID(self, name, osid):
- """
- gets the id of the network with name
- """
- url = self.base+'api/clusters/'
- url += str(osid)+'/network_configuration/neutron'
- try:
- req = requests.get(url, headers=self.header)
- nets = req.json()['networks']
- for net in nets:
- if net['name'] == name:
- return net['id']
- return -1
-
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def createOpenstack(self):
- """
- defines a new openstack environment in fuel.
- """
- url = self.base+'api/clusters'
- data = {
- "nodes": [],
- "tasks": [],
- "name": "OpenStack",
- "release_id": 2,
- "net_segment_type": "vlan"
- }
- try:
- req = requests.post(url, json=data, headers=self.header)
- return req.json()['id']
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def simpleNetDict(self, osID):
- """
- returns a simple dict of network names and id numbers
- """
- nets = self.getNetworks(osID)
- netDict = {}
- targetNets = ['admin', 'public', 'storage', 'management']
- for net in nets['networks']:
- for tarNet in targetNets:
- if tarNet in net['name']:
- netDict[tarNet] = net['id']
- return netDict
-
- def getNetworks(self, osID):
- """
- Returns the pythonizezd json of the openstack networks
- """
- url = self.base + 'api/clusters/'
- url += str(osID)+'/network_configuration/neutron/'
- try:
- req = requests.get(url, headers=self.header)
- return req.json()
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def uploadNetworks(self, netJson, osID):
- """
- configures the networks of the openstack
- environment with id osID based on netJson
- """
- url = self.base+'api/clusters/'
- url += str(osID)+'/network_configuration/neutron'
- try:
- req = requests.put(url, headers=self.header, json=netJson)
- return req.json()
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def addNodes(self, clusterID, nodes):
- """
- Adds the nodes into this openstack environment.
- nodes is valid json
- """
- url = self.base + 'api/clusters/'+str(clusterID)+'/assignment'
- try:
- req = requests.post(url, headers=self.header, json=nodes)
- return req.json()
-
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def getIfaces(self, nodeID):
- """
- returns the pythonized json describing the
- interfaces of given node
- """
- url = self.base + 'api/nodes/'+str(nodeID)+'/interfaces'
- try:
- req = requests.get(url, headers=self.header)
- return req.json()
-
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def setIfaces(self, nodeID, ifaceJson):
- """
- configures the interfaces of node with id nodeID
- with ifaceJson
- ifaceJson is valid json that fits fuel's schema for ifaces
- """
- url = self.base+'/api/nodes/'+str(nodeID)+'/interfaces'
- try:
- req = requests.put(url, headers=self.header, json=ifaceJson)
- return req.json()
-
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def getTasks(self):
- """
- returns a list of all tasks
- """
- url = self.base+"/api/tasks/"
- try:
- req = requests.get(url, headers=self.header)
- return req.json()
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def waitForTask(self, uuid):
- """
- Tracks the progress of task with uuid and
- returns once the task finishes
- """
- progress = 0
- while progress < 100:
- for task in self.getTasks():
- if task['uuid'] == uuid:
- progress = task['progress']
- self.logger.info("Task is %s percent done", str(progress))
- time.sleep(20)
- # Task may hang a minute at 100% without finishing
- while True:
- for task in self.getTasks():
- if task['uuid'] == uuid and not task['status'] == "ready":
- time.sleep(10)
- elif task['uuid'] == uuid and task['status'] == "ready":
- return
-
- def getHorizonIP(self, osid):
- """
- returns the ip address of the horizon dashboard.
- Horizon always takes the first ip after the public router's
- """
- url = self.base+'api/clusters/'
- url += str(osid)+'/network_configuration/neutron/'
- try:
- req = requests.get(url, headers=self.header)
- routerIP = req.json()['vips']['vrouter_pub']['ipaddr'].split('.')
- routerIP[-1] = str(int(routerIP[-1])+1)
- return '.'.join(routerIP)
- except Exception:
- self.logger.exception('%s', "Failed to talk to the Fuel api!")
- sys.exit(1)
-
- def deployOpenstack(self, clusterID):
- """
- Once openstack and the nodes are configured,
- this method actually deploys openstack.
- It takes a while.
- """
- # First, we need to provision the cluster
- url = self.base+'/api/clusters/'+str(clusterID)+'/provision'
- req = requests.put(url, headers=self.header)
- if req.status_code < 300:
- self.logger.info('%s', "Sent provisioning task")
- else:
- err = "failed to provision Openstack Environment"
- self.logger.error('%s', err)
- sys.exit(1)
-
- taskUID = ''
- tasks = self.getTasks()
- for task in tasks:
- if task['name'] == "provision" and task['cluster'] == clusterID:
- taskUID = task['uuid']
-
- self.waitForTask(taskUID)
-
- # Then, we deploy cluster
- url = self.base + '/api/clusters/'+str(clusterID)+'/deploy'
- req = requests.put(url, headers=self.header)
- if req.status_code < 300:
- self.logger.info('%s', "Sent deployment task")
- taskUID = ''
- tasks = self.getTasks()
- for task in tasks:
- if 'deploy' in task['name'] and task['cluster'] == clusterID:
- taskUID = task['uuid']
- if len(taskUID) > 0:
- self.waitForTask(taskUID)
diff --git a/tools/laas-fog/source/api/libvirt_api.py b/tools/laas-fog/source/api/libvirt_api.py
deleted file mode 100644
index 4e19736f..00000000
--- a/tools/laas-fog/source/api/libvirt_api.py
+++ /dev/null
@@ -1,331 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import libvirt
-import time
-import xml.dom
-import xml.dom.minidom
-from domain import Domain
-from network import Network
-from utilities import Utilities
-
-
-class Libvirt:
- """
- This class talks to the Libvirt api.
- Given a config file, this class should create all networks and
- domains.
-
- TODO: convert prints to logging and remove uneeded pass statements
- """
-
- def __init__(self, hostAddr, net_conf=None, dom_conf=None):
- """
- init function
- hostAddr is the ip address of the host
- net_conf and dom_conf are the paths
- to the config files
- """
- self.host = hostAddr
- self.URI = "qemu+ssh://root@"+str(hostAddr)+"/system"
- self.hypervisor = None
- self.domains = []
- self.networks = []
- self.net_conf = net_conf
- self.dom_conf = dom_conf
-
- def setLogger(self, log):
- """
- Saves the logger in self.log
- """
- self.log = log
-
- def bootMaster(self):
- """
- starts the previously defined master node
- """
- for dom in self.domains:
- if 'master' in dom.name():
- try:
- dom.create()
- except Exception:
- pass
-
- def bootSlaves(self):
- """
- boots every defined vm with 'slave' in its name
- """
- for dom in self.domains:
- if 'slave' in dom.name():
- try:
- dom.create()
- self.log.info("Booting %s", dom.name())
- except Exception:
- self.log.exception("%s", "failed to boot domain")
- time.sleep(5)
-
- def getMacs(self, domName):
- """
- returns a dictionary with a network name
- mapped to the mac address of the domain on that net
- """
- try:
- dom = self.hypervisor.lookupByName(domName)
- xmlDesc = dom.XMLDesc(0)
- parsedXML = xml.dom.minidom.parseString(xmlDesc)
- interfacesXML = parsedXML.getElementsByTagName('interface')
- netDict = {}
- for iface in interfacesXML:
- src = iface.getElementsByTagName('source')[0]
- mac = iface.getElementsByTagName('mac')[0]
- netDict[src] = mac
- return netDict
- except Exception:
- self.log.exception("%s", "Domain not found")
-
- def defineVM(self, xmlConfig):
- """
- Generic method to define a persistent vm with the
- given config.
- Assumes that self.hypervisor is already connected.
- """
- if self.checkForVM(xmlConfig):
- vm = self.hypervisor.defineXML(xmlConfig)
- if vm is None:
- name = self.getName(xmlConfig)
- self.log.error("Failed to define vm %s. exiting", name)
- exit(1)
- else:
- self.log.info("Successfully created vm %s", vm.name())
- pass
- self.domains.append(vm)
-
- def checkForVM(self, xmlConfig):
- """
- Checks if another vm with the same name exists
- on the remote host already. If it does, it will
- delete that vm
- """
- allGood = False
- vms = self.hypervisor.listAllDomains(0)
- names = []
- for dom in vms:
- names.append(dom.name())
- vmName = Utilities.getName(xmlConfig)
- if vmName in names:
- self.log.warning("domain %s already exists", vmName)
- self.log.warning("%s", "Atempting to delete it")
- self.deleteVM(vmName)
- allGood = True
- else:
- allGood = True
- return allGood
-
- def deleteVM(self, name):
- """
- removes the given vm from the remote host
- """
- try:
- vm = self.hypervisor.lookupByName(name)
- except:
- return
- active = vm.isActive()
- persistent = vm.isPersistent()
- if active:
- try:
- vm.destroy()
- except:
- self.log.exception("%s", "Failed to destroy vm")
-
- if persistent:
- try:
- vm.undefine()
- except:
- self.log.exception("%s", "Failed to undefine domain")
- pass
-
- def openConnection(self):
- """
- opens a connection to the remote host
- and stores it in self.hypervisor
- """
- self.log.info("Attempting to connect to libvirt at %s", self.host)
- try:
- hostHypervisor = libvirt.open(self.URI)
- except:
- self.log.warning(
- "Failed to connect to %s. Trying again", self.host
- )
- time.sleep(5)
- try:
- hostHypervisor = libvirt.open(self.URI)
- except:
- self.log.exception("Cannot connect to %s. Exiting", self.host)
- exit(1)
-
- if hostHypervisor is None:
- self.log.error("Failed to connect to %s. Exiting", self.host)
- exit(1)
- self.hypervisor = hostHypervisor
-
- def restartVM(self, vm):
- """
- causes the given vm to reboot
- """
- dom = self.hypervisor.lookupByName(vm)
- dom.destroy()
- time.sleep(15)
- dom.create()
-
- def close(self):
- """
- Closes connection to remote hypervisor
- """
- self.log.info("Closing connection to the hypervisor %s", self.host)
- self.hypervisor.close()
-
- def defineAllDomains(self, path):
- """
- Defines a domain from all the xml files in a directory
- """
- files = Utilities.getXMLFiles(path)
- definitions = []
- for xml_desc in files:
- definitions.append(xml_desc.read())
-
- for definition in definitions:
- self.defineVM(definition)
-
- def createAllNetworks(self, path):
- """
- Creates a network from all xml files in a directory
- """
- files = Utilities.getXMLFiles(path)
- definitions = []
- for xml_desc in files:
- definitions.append(Utilities.fileToString(xml_desc))
-
- for definition in definitions:
- self.createNet(definition)
-
- def createNet(self, config):
- """
- creates the network on the remote host
- config is the xml in string representation
- that defines the network
- """
- if self.checkNet(config):
- network = self.hypervisor.networkDefineXML(config)
-
- if network is None:
- name = self.getName(config)
- self.log.warning("Failed to define network %s", name)
- network.create()
- if network.isActive() == 1:
- net = network.name()
- self.log.info("Successfully defined network %s", net)
- self.networks.append(network)
-
- def checkNet(self, config):
- """
- Checks if another net with the same name exists, and
- deletes that network if one is found
- """
- allGood = False
- netName = Utilities.getName(config)
- if netName not in self.hypervisor.listNetworks():
- return True
- else: # net name is already used
- self.log.warning(
- "Network %s already exists. Trying to delete it", netName
- )
- network = self.hypervisor.networkLookupByName(netName)
- self.deleteNet(network)
- allGood = True
- return allGood
-
- def deleteNet(self, net):
- """
- removes the given network from the host
- """
- active = net.isActive()
- persistent = net.isPersistent()
- if active:
- try:
- net.destroy()
- except:
- self.log.warning("%s", "Failed to destroy network")
-
- if persistent:
- try:
- net.undefine()
- except:
- self.log.warning("%s", "Failed to undefine network")
-
- def go(self):
- """
- This method does all the work of this class,
- Parsing the net and vm config files and creating
- all the requested nets/domains
- returns a list of all networks and a list of all domains
- as Network and Domain objects
- """
- nets = self.makeNetworks(self.net_conf)
- doms = self.makeDomains(self.dom_conf)
- return doms, nets
-
- def makeNetworks(self, conf):
- """
- Given a path to a config file, this method
- parses the config and creates all requested networks,
- and returns them in a list of Network objects
- """
- networks = []
- definitions = Network.parseConfigFile(conf)
- for definition in definitions:
- network = Network(definition)
- networks.append(network)
- self.createNet(network.toXML())
- return networks
-
- def makeDomains(self, conf):
- """
- Given a path to a config file, this method
- parses the config and creates all requested vm's,
- and returns them in a list of Domain objects
- """
- domains = []
- definitions = Domain.parseConfigFile(conf)
- for definition in definitions:
- domain = Domain(definition)
- domains.append(domain)
- self.defineVM(domain.toXML())
- return domains
-
- @staticmethod
- def getName(xmlString):
- """
- given xml with a name tag, this returns the value of name
- eg:
- <name>Parker</name>
- returns 'Parker'
- """
- xmlDoc = xml.dom.minidom.parseString(xmlString)
- nameNode = xmlDoc.documentElement.getElementsByTagName('name')
- name = str(nameNode[0].firstChild.nodeValue)
- return name
diff --git a/tools/laas-fog/source/api/vpn.py b/tools/laas-fog/source/api/vpn.py
deleted file mode 100644
index 336a681d..00000000
--- a/tools/laas-fog/source/api/vpn.py
+++ /dev/null
@@ -1,235 +0,0 @@
-from abc import ABCMeta, abstractmethod
-import ldap
-import os
-import random
-from base64 import b64encode
-from database import BookingDataBase
-
-
-class VPN_BaseClass:
- """
- the vpn handler abstract class / interface
-
- """
- __metaclass__ = ABCMeta
-
- @abstractmethod
- def __init__(self, config):
- """
- config is the parsed vpn.yaml file
- """
- pass
-
- @abstractmethod
- def makeNewUser(self, user=None):
- """
- This method is called when a vpn user is needed.
- This method should create a vpn user in whatever
- runs the vpn in our infrastructure. returns the
- credentials for the vpn user and some uid
- that will be associated with the booking in the
- database. This uid is used to track the vpn user and
- to delete the user when there are no bookings associated
- with that uid.
- """
- user = "username"
- passwd = "password"
- uid = "some way for you to identify this user in the database"
- return user, passwd, uid
-
- @abstractmethod
- def removeOldUsers(self):
- """
- checks the list of all vpn users against a list of
- vpn users associated with active bookings and removes
- users who dont have an active booking
-
- If you want your vpn accounts to be persistent,
- you can just ignore this
- """
- pass
-
-
-names = [
- 'frodo baggins', 'samwise gamgee', 'peregrin took', 'meriadoc brandybuck',
- 'bilbo baggins', 'gandalf grey', 'aragorn dunadan', 'arwen evenstar',
- 'saruman white', 'pippin took', 'merry brandybuck', 'legolas greenleaf',
- 'gimli gloin', 'anakin skywalker', 'padme amidala', 'han solo',
- 'jabba hut', 'mace windu', 'sount dooku', 'qui-gon jinn',
- 'admiral ackbar', 'emperor palpatine'
-]
-
-
-class VPN:
- """
- This class communicates with the ldap server to manage vpn users.
- This class extends the above ABC, and implements the makeNewUser,
- removeOldUser, and __init__ abstract functions you must override to
- extend the VPN_BaseClass
- """
-
- def __init__(self, config):
- """
- init takes the parsed vpn config file as an arguement.
- automatically connects and authenticates on the ldap server
- based on the configuration file
- """
- self.config = config
- server = config['server']
- self.uri = "ldap://"+server
-
- self.conn = None
- user = config['authentication']['user']
- pswd = config['authentication']['pass']
- if os.path.isfile(pswd):
- pswd = open(pswd).read()
- self.connect(user, pswd)
-
- def connect(self, root_dn, root_pass):
- """
- Opens a connection to the server in the config file
- and authenticates as the given user
- """
- self.conn = ldap.initialize(self.uri)
- self.conn.simple_bind_s(root_dn, root_pass)
-
- def addUser(self, full_name, passwd):
- """
- Adds a user to the ldap server. Creates the new user with the classes
- and in the directory given in the config file.
- full_name should be two tokens seperated by a space. The first token
- will become the username
- private helper function for the makeNewUser()
- """
- first = full_name.split(' ')[0]
- last = full_name.split(' ')[1]
- user_dir = self.config['directory']['user']
- user_dir += ','+self.config['directory']['root']
- dn = "uid=" + first + ',' + user_dir
- record = [
- ('objectclass', ['top', 'inetOrgPerson']),
- ('uid', first),
- ('cn', full_name),
- ('sn', last),
- ('userpassword', passwd),
- ('ou', self.config['directory']['user'].split('=')[1])
- ]
- self.conn.add_s(dn, record)
- return dn
-
- def makeNewUser(self, name=None):
- """
- creates a new user in the ldap database, with the given name
- if supplied. If no name is given, we will try to select from the
- pre-written list above, and will resort to generating a random string
- as a username if the preconfigured names are all taken.
- Returns the username and password the user needs to authenticate, and
- the dn that we can use to manage the user.
- """
- if name is None:
- i = 0
- while not self.checkName(name):
- i += 1
- if i == 20:
- name = self.randoString(8)
- name += ' '+self.randoString(8)
- break # generates a random name to prevent infinite loop
- name = self.genUserName()
- passwd = self.randoString(15)
- dn = self.addUser(name, passwd)
- return name, passwd, dn
-
- def checkName(self, name):
- """
- returns true if the name is available
- """
- if name is None:
- return False
- uid = name.split(' ')[0]
- base = self.config['directory']['user'] + ','
- base += self.config['directory']['root']
- filtr = '(uid=' + uid + ')'
- timeout = 5
- ans = self.conn.search_st(
- base,
- ldap.SCOPE_SUBTREE,
- filtr,
- timeout=timeout
- )
- return len(ans) < 1
-
- @staticmethod
- def randoString(n):
- """
- uses /dev/urandom to generate a random string of length n
- """
- n = int(n)
- # defines valid characters
- alpha = 'abcdefghijklmnopqrstuvwxyz'
- alpha_num = alpha
- alpha_num += alpha.upper()
- alpha_num += "0123456789"
-
- # generates random string from /dev/urandom
- rnd = b64encode(os.urandom(3*n)).decode('utf-8')
- random_string = ''
- for char in rnd:
- if char in alpha_num:
- random_string += char
- return str(random_string[:n])
-
- def genUserName(self):
- """
- grabs a random name from the list above
- """
- i = random.randint(0, len(names) - 1)
- return names[i]
-
- def deleteUser(self, dn):
- self.conn.delete(dn)
-
- def getAllUsers(self):
- """
- returns all the user dn's in the ldap database in a list
- """
- base = self.config['directory']['user'] + ','
- base += self.config['directory']['root']
- filtr = '(objectclass='+self.config['user']['objects'][-1]+')'
- timeout = 10
- ans = self.conn.search_st(
- base,
- ldap.SCOPE_SUBTREE,
- filtr,
- timeout=timeout
- )
- users = []
- for user in ans:
- users.append(user[0]) # adds the dn of each user
- return users
-
- def removeOldUsers(self):
- """
- removes users from the ldap server who dont have any active bookings.
- will not delete a user if their uid's are named in the config
- file as permanent users.
- """
- db = self.config['database']
- # the dn of all users who have an active booking
- active_users = BookingDataBase(db).getVPN()
- all_users = self.getAllUsers()
- for user in all_users:
- # checks if they are a permanent user
- if self.is_permanent_user(user):
- continue
- # deletes the user if they dont have an active booking
- if user not in active_users:
- self.deleteUser(user)
-
- def is_permanent_user(self, dn):
- for user in self.config['permanent_users']:
- if (user in dn) or (dn in user):
- return True
- return False
-
-
-VPN_BaseClass.register(VPN)
diff --git a/tools/laas-fog/source/database.py b/tools/laas-fog/source/database.py
deleted file mode 100644
index ca7e5c89..00000000
--- a/tools/laas-fog/source/database.py
+++ /dev/null
@@ -1,296 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-import sqlite3
-import sys
-import time
-
-
-class HostDataBase:
- """
- This class talks with a simple sqlite database and can select a free host
- when one is needed.
- The layout of the database is:
- TABLE host:
- name <hostname> status <status_code> book_start
- <Unix timestamp> book_end <Unix timestamp>
- status_codes:
- 0 - idle
- 1 - deploying
- 2 - deployed, in use
- 3 - expired, ready to be reset
- """
-
- def __init__(self, path):
- """
- init function. Will create the file at the end of path
- if it doesnt already exist
- """
- self.database = sqlite3.connect(path)
- self.cursor = self.database.cursor()
-
- def resetHosts(self, hosts):
- """
- Recreates the host table in the database.
- WILL ERASE ALL DATA. USE WITH CAUTION.
- """
- try:
- self.cursor.execute("DROP TABLE hosts")
- self.createTable()
- except:
- pass
-
- for host in hosts:
- self.addHost(host)
-
- def createTable(self):
- """
- This method creates the table hosts with
- a name and status field
- """
- self.cursor.execute("CREATE TABLE hosts (name text, status integer)")
- self.database.commit()
-
- def addHost(self, name):
- """
- Adds a host with name to the available hosts.
- When first added, the host is assumed to be idle.
- """
- host = (name, )
- self.cursor.execute("INSERT INTO hosts VALUES (?, 0) ", host)
- self.database.commit()
-
- def getHost(self, requested=None):
- """
- Returns the name of an available host.
- If a host is specifically requested,
- that host is returned.
- If the requested host is not available,
- this method will throw an error.
- If no host is specificaly requested,
- the next available host is returned.
- """
- self.cursor.execute("SELECT name FROM hosts WHERE status = 0")
- hostList = self.cursor.fetchall()
- if len(hostList) < 1:
- # throw and exception
- sys.exit(1)
- host = None
- if requested is not None:
- if (requested, ) in hostList and self.hostIsIdle(requested):
- host = requested # If requested, exists, and idle, return it
- else:
- sys.exit(1)
- else:
- host = hostList[0][0]
- self.makeHostBusy(host)
- return host
-
- def makeHostBusy(self, name):
- """
- makes the status of host 'name' equal 1,
- making it 'busy'
- """
- host = (name, )
- self.cursor.execute("UPDATE hosts SET status = 1 WHERE name=?", host)
- self.database.commit()
-
- def makeHostDeployed(self, name):
- """
- makes the status of host 'name' equal 2,
- making it 'deployed' and/or in use
- """
- host = (name, )
- self.cursor.execute("UPDATE hosts SET status = 2 WHERE name=?", host)
- self.database.commit()
-
- def makeHostExpired(self, name):
- """
- makes the status of host 'name' equal 3,
- meaning its booking has ended and needs to be cleaned.
- """
- host = (name, )
- self.cursor.execute("UPDATE hosts SET status = 3 WHERE name=?", host)
- self.database.commit()
-
- def getExpiredHosts(self):
- """
- returns a list of all hosts with an expired booking that
- need to be cleaned.
- """
- self.cursor.execute("SELECT name FROM hosts where status = 3")
- host_tuples = self.cursor.fetchall()
- hosts = []
- for host in host_tuples:
- hosts.append(host[0])
- return hosts # returns list of strings, not tuples
-
- def hostIsBusy(self, name):
- """
- returns True if the host is not idle
- """
- host = (name, )
- self.cursor.execute("SELECT status FROM hosts WHERE name=?", host)
- stat = self.cursor.fetchone()[0]
- if stat < 1:
- return False
- return True
-
- def hostIsIdle(self, name):
- """
- returns True if the host is idle.
- """
- return not self.hostIsBusy(name)
-
- def getAllHosts(self):
- """
- returns the whole host database.
- """
- self.cursor.execute("SELECT * FROM hosts")
- return self.cursor.fetchall()
-
- def close(self):
- """
- commits and closes connection to the database file.
- """
- self.database.commit()
- self.database.close()
-
-
-class BookingDataBase:
- """
- Database to hold all active bookings for our servers.
- Database contains table bookings - can be same or different
- db file as the host database
- bookings contains a field for every json key from the pharos dashboard,
- plus a "status" integer which is either
- 0 - waiting to start
- 1 - started
- 2 - booking over
-
- As written, the pharos listener will immediately store all bookings that
- are both for your dev pods and not
- yet over, regardless of when the booking starts. Once the booking ends
- and the dev pod is cleaned, the booking is deleted to save space and cpu.
- """
-
- def __init__(self, path):
- """
- creates a BookingDataBase object with the database located
- at path. if path does not yet exist, it will be created.
- """
- self.database = sqlite3.connect(path)
- self.cursor = self.database.cursor()
-
- def createTable(self):
- """
- Creates table in the database to store booking information
- """
- try:
- self.cursor.execute("DROP TABLE bookings")
- except:
- pass
- self.cursor.execute("""CREATE TABLE bookings
- (id integer, resource_id integer, start double, end double,
- installer_name text, scenario_name text,
- purpose text, status integer, vpn text)""")
- self.database.commit()
-
- def checkAddBooking(self, booking):
- """
- This method accepts a JSON booking definition from the dashboard
- api and adds it to the database if it does not already exist.
- """
- # first, check if booking is already expired
- if time.time() > booking['end']:
- return
- # check if booking is in database already
- b_id = (booking['id'], )
- self.cursor.execute("SELECT * FROM bookings WHERE id=?", b_id)
- if len(self.cursor.fetchall()) > 0: # booking already in the db
- return
- tup = (
- booking['id'],
- booking['resource_id'],
- booking['start'],
- booking['end'],
- booking['installer_name'],
- booking['scenario_name'],
- booking['purpose'],
- 0,
- ''
- )
- self.cursor.execute(
- "INSERT INTO bookings VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", tup)
- self.database.commit()
-
- def removeBooking(self, idNum):
- """
- deletes booking with given id from the database.
- """
- booking_id = (idNum, )
- self.cursor.execute("DELETE FROM bookings WHERE id=?", booking_id)
-
- def getBookings(self):
- """
- returns a list of all bookings.
- """
- self.cursor.execute("SELECT * FROM bookings")
- return self.cursor.fetchall()
-
- def setStatus(self, booking_id, status):
- """
- sets the status of the booking with booking id booking_id.
- as noted above, the status codes are:
- 0 - not yet started
- 1 - started, but not yet over
- 2 - over, expired
- """
- data = (status, booking_id)
- self.cursor.execute("UPDATE bookings SET status=? WHERE id=?", data)
- self.database.commit()
-
- def setVPN(self, resource, uid):
- data = (uid, resource, 1)
- self.cursor.execute(
- "UPDATE bookings SET vpn=? WHERE resource_id=? AND status=?",
- data
- )
- self.database.commit()
-
- def getVPN(self):
- """
- returns a list of all vpn users associated with current
- bookings.
- """
- self.cursor.execute("SELECT vpn FROM bookings WHERE status=1")
- users_messy = self.cursor.fetchall()
- users = []
- for user in users_messy:
- user = user[0] # get string rather than tuple
- user = user.strip()
- if len(user) < 1:
- continue
- users.append(user) # a list of non-empty strings
- return users
-
- def close(self):
- """
- commits changes and closes connection to db file.
- """
- self.database.commit()
- self.database.close()
diff --git a/tools/laas-fog/source/deploy.py b/tools/laas-fog/source/deploy.py
deleted file mode 100755
index a9c5e04f..00000000
--- a/tools/laas-fog/source/deploy.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/python
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import sys
-import yaml
-from pod_manager import Pod_Manager
-
-"""
-This file is the first executed when a booking begins.
-"""
-
-usage = """
-./deploy [--config CONFIG_FILE] [--host HOSTNAME] [--reset]
-"""
-
-
-def main(config_path, host):
- """
- starts the deployment with the given configuration.
- """
- config = yaml.safe_load(open(config_path))
-
- manager = Pod_Manager(config, requested_host=host)
- manager.start_deploy()
-
-
-def reset(config_path, host):
- """
- Tells the Pod Manager to clean and reset the given host.
- """
- config = yaml.safe_load(open(config_path))
- Pod_Manager(config, requested_host=host, reset=True)
-
-
-if __name__ == "__main__":
- # parse command line
- host = None
-
- if "--help" in sys.argv:
- print usage
- sys.exit(0)
-
- if "--config" in sys.argv:
- try:
- conf = sys.argv[1+sys.argv.index("--config")]
- open(conf)
- except Exception:
- print "bad config file"
- sys.exit(1)
- if "--host" in sys.argv:
- try:
- host = sys.argv[1+sys.argv.index("--host")]
- except:
- "host not provided. Exiting"
- sys.exit(1)
-
- try:
- config_file = yaml.safe_load(open(conf))
- except:
- print "Failed to read from config file"
- sys.exit(1)
- # reset or deploy host
- if "--reset" in sys.argv:
- reset(conf, host)
- else:
- main(conf, host)
diff --git a/tools/laas-fog/source/deployment_manager.py b/tools/laas-fog/source/deployment_manager.py
deleted file mode 100644
index f680fa52..00000000
--- a/tools/laas-fog/source/deployment_manager.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import logging
-from api.libvirt_api import Libvirt
-
-
-class Deployment_Manager:
- """
- This class manages the deployment of OPNFV on a booked host
- if it was requested. If no OPNFV installer was requested, this class will
- create the virtual machines and networks in the config files and exit.
- """
- def __init__(self, installerType, scenario, utility):
- """
- init function
- """
- # installerType will either be the constructor for an installer or None
- self.installer = installerType
- self.virt = Libvirt(
- utility.host,
- net_conf=utility.conf['hypervisor_config']['networks'],
- dom_conf=utility.conf['hypervisor_config']['vms']
- )
- self.host = utility.host
- self.util = utility
-
- def getIso(self):
- """
- checks if any of the domains expect an ISO file to exist
- and retrieves it.
- """
- isoDom = None
- for dom in self.doms:
- if dom.iso['used']:
- isoDom = dom
- break
- if isoDom:
- path = isoDom.iso['location']
- url = isoDom.iso['URL']
- self.util.sshExec(['wget', '-q', '-O', path, url])
-
- def getDomMacs(self):
- """
- assigns the 'macs' instance variable to the domains
- so that they know the mac addresses of their interfaces.
- """
- for dom in self.doms:
- dom.macs = self.virt.getMacs(dom.name)
-
- def makeDisks(self):
- """
- Creates the qcow2 disk files the domains expect on the remote host.
- """
- disks = []
- for dom in self.doms:
- disks.append(dom.disk)
- self.util.execRemoteScript("mkDisks.sh", disks)
-
- def go(self):
- """
- 'main' function.
- creates virtual machines/networks and either passes control to the
- OPNFV installer, or finishes up if an installer was not requested.
- """
- log = logging.getLogger(self.util.hostname)
- self.virt.setLogger(log)
- log.info("%s", "Connecting to the host hypervisor")
- self.virt.openConnection()
- domains, networks = self.virt.go()
- log.info("%s", "Created all networks and VM's on host")
- self.doms = domains
- self.nets = networks
- if self.installer is None:
- log.warning("%s", "No installer requested. Finishing deployment")
- self.util.finishDeployment()
- return
- log.info("%s", "retrieving ISO")
- self.getIso()
- self.getDomMacs()
- self.util.copyScripts()
- self.makeDisks()
- log.info("%s", "Beginning installation of OPNFV")
- try:
- installer = self.installer(
- self.doms,
- self.nets,
- self.virt,
- self.util
- )
- installer.go()
- except Exception:
- log.exception('%s', "failed to install OPNFV")
diff --git a/tools/laas-fog/source/domain.py b/tools/laas-fog/source/domain.py
deleted file mode 100644
index 6f00239a..00000000
--- a/tools/laas-fog/source/domain.py
+++ /dev/null
@@ -1,244 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import xml.dom
-import xml.dom.minidom
-import yaml
-
-
-class Domain:
- """
- This class defines a libvirt vm abstraction that can parse our simple
- config file and add all necessary boiler plate and info to write a full xml
- definition of itself for libvirt.
- """
-
- def __init__(self, propertiesDict):
- """
- init function.
- properiesDict should be one of the dictionaries returned by the static
- method parseConfigFile
- """
- self.name = propertiesDict['name']
- self.memory = propertiesDict['memory']
- self.vcpus = propertiesDict['vcpus']
- self.disk = propertiesDict['disk']
- self.iso = propertiesDict['iso']
- # the vm will either boot from an iso or pxe
- self.netBoot = not self.iso['used']
- self.interfaces = propertiesDict['interfaces']
-
- def toXML(self):
- """
- combines the given configuration with a lot of
- boiler plate to create a valid libvirt xml
- definition of a domain.
- returns a string
- """
- definition = xml.dom.minidom.parseString("<domain>\n</domain>")
- definition.documentElement.setAttribute('type', 'kvm')
-
- nameElem = definition.createElement('name')
- nameElem.appendChild(definition.createTextNode(self.name))
- definition.documentElement.appendChild(nameElem)
-
- memElem = definition.createElement('memory')
- memElem.appendChild(definition.createTextNode(str(self.memory)))
- definition.documentElement.appendChild(memElem)
-
- curMemElem = definition.createElement('currentMemory')
- curMemElem.appendChild(definition.createTextNode(str(self.memory)))
- definition.documentElement.appendChild(curMemElem)
-
- vcpuElem = definition.createElement('vcpu')
- vcpuElem.appendChild(definition.createTextNode(str(self.vcpus)))
- definition.documentElement.appendChild(vcpuElem)
-
- osElem = definition.createElement('os')
-
- typeElem = definition.createElement('type')
- typeElem.setAttribute('arch', 'x86_64')
- typeElem.appendChild(definition.createTextNode('hvm'))
- osElem.appendChild(typeElem)
-
- if self.netBoot:
- bootElem = definition.createElement('boot')
- bootElem.setAttribute('dev', 'network')
- osElem.appendChild(bootElem)
-
- bootElem = definition.createElement('boot')
- bootElem.setAttribute('dev', 'hd')
- osElem.appendChild(bootElem)
-
- if self.iso['used']:
- bootElem = definition.createElement('boot')
- bootElem.setAttribute('dev', 'cdrom')
- osElem.appendChild(bootElem)
-
- definition.documentElement.appendChild(osElem)
-
- featureElem = definition.createElement('feature')
- featureElem.appendChild(definition.createElement('acpi'))
- featureElem.appendChild(definition.createElement('apic'))
-
- definition.documentElement.appendChild(featureElem)
-
- cpuElem = definition.createElement('cpu')
- cpuElem.setAttribute('mode', 'custom')
- cpuElem.setAttribute('match', 'exact')
- modelElem = definition.createElement('model')
- modelElem.appendChild(definition.createTextNode('Broadwell'))
- cpuElem.appendChild(modelElem)
-
- definition.documentElement.appendChild(cpuElem)
-
- clockElem = definition.createElement('clock')
- clockElem.setAttribute('offset', 'utc')
-
- timeElem = definition.createElement('timer')
- timeElem.setAttribute('name', 'rtc')
- timeElem.setAttribute('tickpolicy', 'catchup')
- clockElem.appendChild(timeElem)
-
- timeElem = definition.createElement('timer')
- timeElem.setAttribute('name', 'pit')
- timeElem.setAttribute('tickpolicy', 'delay')
- clockElem.appendChild(timeElem)
-
- timeElem = definition.createElement('timer')
- timeElem.setAttribute('name', 'hpet')
- timeElem.setAttribute('present', 'no')
- clockElem.appendChild(timeElem)
-
- definition.documentElement.appendChild(clockElem)
-
- poweroffElem = definition.createElement('on_poweroff')
- poweroffElem.appendChild(definition.createTextNode('destroy'))
-
- definition.documentElement.appendChild(poweroffElem)
-
- rebootElem = definition.createElement('on_reboot')
- rebootElem.appendChild(definition.createTextNode('restart'))
-
- definition.documentElement.appendChild(rebootElem)
-
- crashElem = definition.createElement('on_reboot')
- crashElem.appendChild(definition.createTextNode('restart'))
-
- definition.documentElement.appendChild(crashElem)
-
- pmElem = definition.createElement('pm')
- memElem = definition.createElement('suspend-to-mem')
- memElem.setAttribute('enabled', 'no')
- pmElem.appendChild(memElem)
- diskElem = definition.createElement('suspend-to-disk')
- diskElem.setAttribute('enabled', 'no')
- pmElem.appendChild(diskElem)
-
- definition.documentElement.appendChild(pmElem)
-
- deviceElem = definition.createElement('devices')
-
- emuElem = definition.createElement('emulator')
- emuElem.appendChild(definition.createTextNode('/usr/libexec/qemu-kvm'))
- deviceElem.appendChild(emuElem)
-
- diskElem = definition.createElement('disk')
- diskElem.setAttribute('type', 'file')
- diskElem.setAttribute('device', 'disk')
-
- driverElem = definition.createElement('driver')
- driverElem.setAttribute('name', 'qemu')
- driverElem.setAttribute('type', 'qcow2')
- diskElem.appendChild(driverElem)
-
- sourceElem = definition.createElement('source')
- sourceElem.setAttribute('file', self.disk)
- diskElem.appendChild(sourceElem)
-
- targetElem = definition.createElement('target')
- targetElem.setAttribute('dev', 'hda')
- targetElem.setAttribute('bus', 'ide')
- diskElem.appendChild(targetElem)
-
- deviceElem.appendChild(diskElem)
-
- if self.iso['used']:
- diskElem = definition.createElement('disk')
- diskElem.setAttribute('type', 'file')
- diskElem.setAttribute('device', 'cdrom')
-
- driverElem = definition.createElement('driver')
- driverElem.setAttribute('name', 'qemu')
- driverElem.setAttribute('type', 'raw')
- diskElem.appendChild(driverElem)
-
- sourceElem = definition.createElement('source')
- sourceElem.setAttribute('file', self.iso['location'])
- diskElem.appendChild(sourceElem)
-
- targetElem = definition.createElement('target')
- targetElem.setAttribute('dev', 'hdb')
- targetElem.setAttribute('bus', 'ide')
- diskElem.appendChild(targetElem)
-
- diskElem.appendChild(definition.createElement('readonly'))
- deviceElem.appendChild(diskElem)
-
- for iface in self.interfaces:
- ifaceElem = definition.createElement('interface')
- ifaceElem.setAttribute('type', iface['type'])
- sourceElem = definition.createElement('source')
- sourceElem.setAttribute(iface['type'], iface['name'])
- modelElem = definition.createElement('model')
- modelElem.setAttribute('type', 'e1000')
- ifaceElem.appendChild(sourceElem)
- ifaceElem.appendChild(modelElem)
- deviceElem.appendChild(ifaceElem)
-
- graphicElem = definition.createElement('graphics')
- graphicElem.setAttribute('type', 'vnc')
- graphicElem.setAttribute('port', '-1')
- deviceElem.appendChild(graphicElem)
-
- consoleElem = definition.createElement('console')
- consoleElem.setAttribute('type', 'pty')
- deviceElem.appendChild(consoleElem)
-
- definition.documentElement.appendChild(deviceElem)
- return definition.toprettyxml()
-
- def writeXML(self, filePath):
- """
- writes this domain's xml definition to the given file.
- """
- f = open(filePath, 'w')
- f.write(self.toXML())
- f.close()
-
- @staticmethod
- def parseConfigFile(path):
- """
- parses the domains config file
- """
- configFile = open(path, 'r')
- try:
- config = yaml.safe_load(configFile)
- except Exception:
- print "Invalid domain configuration. exiting"
- return config
diff --git a/tools/laas-fog/source/installers/__init__.py b/tools/laas-fog/source/installers/__init__.py
deleted file mode 100644
index 7bb515b7..00000000
--- a/tools/laas-fog/source/installers/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
diff --git a/tools/laas-fog/source/installers/fuel.py b/tools/laas-fog/source/installers/fuel.py
deleted file mode 100644
index c5b647cf..00000000
--- a/tools/laas-fog/source/installers/fuel.py
+++ /dev/null
@@ -1,268 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import time
-import sys
-from installer import Installer
-from api.fuel_api import Fuel_api
-
-
-class Fuel_Installer(Installer):
- """
- This class is the installer for any OPNFV scenarios which use Fuel as the
- installer. This class uses the libvirt api handler
- to create all the virtual hosts,
- then installs fuel and uses the fuel api handler
- to create and deploy an openstack environment
-
- This class will get much smarter and have less configuration hardcoded
- as we grow support for more OPNFV scenarios
- """
-
- def __init__(self, doms, nets, libvirt_handler, util):
- """
- init function
- Calls the super constructor
- """
- super(Fuel_Installer, self).__init__(doms, nets, libvirt_handler, util)
- url = 'http://'+self.libvirt.host+':8000/'
- self.handler = Fuel_api(url, self.log, 'admin', 'admin')
- self.fuelNets = None
-
- def bootMaster(self):
- """
- Boots the fuel master node and waits
- for it to come up
- """
- self.libvirt.bootMaster()
- time.sleep(100)
-
- def bootNodes(self):
- """
- Boots all the slave nodes
- """
- self.libvirt.bootSlaves()
-
- def waitForNodes(self, numNodes):
- """
- Waits for the nodes to pxe boot and be recognized by Fuel
- """
- done = False
- self.log.info("Waiting for %i nodes to boot into Fuel", numNodes)
- discoveredNodes = 0
- while not done:
- discoveredNodes = len(self.handler.getNodes())
- nodes = int(discoveredNodes)
- self.log.info("found %d nodes", nodes)
-
- done = discoveredNodes == numNodes
-
- def installMaster(self):
- """
- runs the fuelInstall script, which uses the fuel iso to
- install fuel onto the master node
- """
- self.util.execRemoteScript("ipnat.sh", [self.libvirt.host])
- self.util.execRemoteScript("fuelInstall.sh", [self.util.remoteDir])
-
- def makeOpenstack(self):
- """
- creates an openstack environment and saves
- the openstack id
- """
- self.osid = self.handler.createOpenstack()
-
- def addNodesToOpenstack(self):
- """
- Adds the nodes to the openstack environment with
- compute / controller + cinder roles
- """
- nodesList = [
- {"id": 1, "roles": ["controller", "cinder"]},
- {"id": 2, "roles": ["controller", "cinder"]},
- {"id": 3, "roles": ["controller", "cinder"]},
- {"id": 4, "roles": ["compute"]},
- {"id": 5, "roles": ["compute"]}
- ]
-
- self.handler.addNodes(self.osid, nodesList)
-
- def configNetworks(self):
- """
- configures the openstack networks by calling the 3 helper
- methods
- """
- self.configPublicNet()
- self.configStorageNet()
- self.configManagementNet()
-
- def configPublicNet(self):
- """
- sets the default public network
- changes the cidr, gateway, and floating ranges
- """
- networks = self.handler.getNetworks(self.osid)
- for net in networks['networks']:
- if net['name'] == "public":
- net["ip_ranges"] = [["10.20.1.10", "10.20.1.126"]]
- net['cidr'] = "10.20.1.0/24"
- net['gateway'] = "10.20.1.1"
-
- # updates the floating ranges
- rng = [["10.20.1.130", "10.20.1.254"]]
- networks['networking_parameters']['floating_ranges'] = rng
- self.handler.uploadNetworks(networks, self.osid)
-
- def configStorageNet(self):
- """
- sets the default storage network to have the right
- cidr and gateway, and no vlan
- """
- networks = self.handler.getNetworks(self.osid)
- for net in networks['networks']:
- if net['name'] == "storage":
- net["ip_ranges"] = [["10.20.3.5", "10.20.3.254"]]
- net["cidr"] = "10.20.3.0/24"
- net["meta"]["notation"] = "ip_ranges"
- net["meta"]["use_gateway"] = True
- net["gateway"] = "10.20.3.1"
- net["vlan_start"] = None
- self.handler.uploadNetworks(networks, self.osid)
-
- def configManagementNet(self):
- """
- sets the default management net to have the right
- cidr and gatewar and no vlan
- """
- networks = self.handler.getNetworks(self.osid)
- for net in networks['networks']:
- if net['name'] == "management":
- net["ip_ranges"] = [["10.20.2.5", "10.20.2.254"]]
- net["cidr"] = "10.20.2.0/24"
- net["meta"]["notation"] = "ip_ranges"
- net["meta"]["use_gateway"] = True
- net["gateway"] = "10.20.2.1"
- net["vlan_start"] = None
- self.handler.uploadNetworks(networks, self.osid)
-
- # TODO: make this method smarter. I am making too many assumptions about
- # the order of interfaces and networks
- def configIfaces(self):
- """
- assigns the proper networks to each interface of the nodes
- """
- for x in range(1, 6):
- idNum = x
- ifaceJson = self.handler.getIfaces(idNum)
-
- ifaceJson[0]['assigned_networks'] = [
- {"id": 1, "name": "fuelweb_admin"},
- {"id": 5, "name": "private"}
- ]
- ifaceJson[2]['assigned_networks'] = [
- {"id": 4, "name": "storage"}
- ]
- ifaceJson[3]['assigned_networks'] = [
- {"id": 3, "name": "management"}
- ]
- if idNum < 4:
- ifaceJson[1]['assigned_networks'] = [{
- "id": 2,
- "name": "pubic"
- }]
-
- self.handler.setIfaces(idNum, ifaceJson)
-
- def clearAdminIface(self, ifaceJson, node):
- """
- makes the admin interface have *only* the admin network
- assigned to it
- """
- for iface in ifaceJson:
- if iface['mac'] == node.macs['admin']:
- iface['assigned_networks'] = [{
- "id": 1,
- "name": "fuelweb_admin"
- }]
-
- def deployOpenstack(self):
- """
- Once openstack is properly configured, this method
- deploy OS and returns when OS is running
- """
- self.log.info("%s", "Deploying Openstack environment.")
- self.log.info("%s", "This may take a while")
- self.handler.deployOpenstack(self.osid)
-
- def getKey(self):
- """
- Retrieves authentication tokens for the api handler,
- while allowing the first few attempts to fail to
- allow Fuel time to "wake up"
- """
- i = 0
- while i < 20:
- i += 1
- try:
- self.handler.getKey()
- return
- except Exception:
- self.log.warning("%s", "Failed to talk to Fuel api")
- self.log.warning("Exec try %d/20", i)
- try:
- self.handler.getKey()
- except Exception:
- self.logger.exception("%s", "Fuel api is unavailable")
- sys.exit(1)
-
- def go(self):
- """
- This method does all the work of this class.
- It installs the master node, boots the slaves
- into Fuel, creates and configures OS, and then
- deploys it and uses NAT to make the horizon dashboard
- reachable
- """
- self.libvirt.openConnection()
- self.log.info('%s', 'installing the Fuel master node.')
- self.log.info('%s', 'This will take some time.')
- self.installMaster()
- time.sleep(60)
- self.getKey()
- self.log.info('%s', 'The master node is installed.')
- self.log.info('%s', 'Waiting for bootstrap image to build')
- self.handler.waitForBootstrap()
- self.bootNodes()
- self.waitForNodes(5)
- self.log.info('%s', "Defining an openstack environment")
- self.makeOpenstack()
- self.addNodesToOpenstack()
- self.log.info('%s', "configuring interfaces...")
- self.configIfaces()
- self.log.info('%s', "configuring networks...")
- self.configNetworks()
- self.deployOpenstack()
-
- horizon = self.handler.getHorizonIP(self.osid)
- self.util.execRemoteScript(
- '/horizonNat.sh', [self.libvirt.host, horizon])
- notice = "You may access the Openstack dashboard at %s/horizon"
- self.log.info(notice, self.libvirt.host)
-
- self.libvirt.close()
- self.util.finishDeployment()
diff --git a/tools/laas-fog/source/installers/installer.py b/tools/laas-fog/source/installers/installer.py
deleted file mode 100644
index d4c4889f..00000000
--- a/tools/laas-fog/source/installers/installer.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-
-class Installer(object):
- """
- This is a simple base class to define a single constructor
- for all the different installer types.
- I may move more functionality to this class as we add support for more
- installers and there becomes common fucntions that would be nice to share
- between installers.
- """
-
- def __init__(self, domList, netList, libvirt_handler, util):
- self.doms = domList
- self.nets = netList
- self.libvirt = libvirt_handler
- self.osid = 0
- self.util = util
- self.log = util.createLogger(util.hostname)
diff --git a/tools/laas-fog/source/installers/joid.py b/tools/laas-fog/source/installers/joid.py
deleted file mode 100644
index a3f3bcf1..00000000
--- a/tools/laas-fog/source/installers/joid.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-"""
-This class will install Joid onto the remote host.
-Currently only supports joid's "default" configuration
-"""
-
-
-class Joid_Installer:
-
- def __init__(self, doms, nets, libvirt_handler, util):
- """
- init function calls the super constructor
- """
- super(Joid_Installer, self).__init__(doms, nets, libvirt_handler, util)
-
- def go(self):
- """
- does all the work of this class.
- Currently just runs the joidInstall script, which installs joid
- onto the remote host
- """
- self.logger.info("%s", "Executing joid virtual installation")
- self.util.execRemoteScript("joidInstall.sh")
diff --git a/tools/laas-fog/source/listen.py b/tools/laas-fog/source/listen.py
deleted file mode 100755
index ed714c9a..00000000
--- a/tools/laas-fog/source/listen.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/python
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-import subprocess
-import sys
-import os
-import yaml
-
-"""
-This is the file that the user will execute to start the whole process.
-This file will start the pharos api listener in a new process and then exit.
-"""
-
-
-def checkArgs():
- """
- error checks the cmd line args and gets the path
- of the config file
- """
- usage = "./listen.py --config <path_to_pharos_config>"
- if "--help" in sys.argv:
- print usage
- sys.exit(0)
-
- if "--config" not in sys.argv:
- print usage
- sys.exit(1)
-
- try:
- i = sys.argv.index("--config")
- config_file = sys.argv[i+1]
- # verifies that the file exists, is readable, and formatted correctly
- yaml.safe_load(open(config_file))
- return config_file
- except Exception:
- print "Bad config file"
- sys.exit(1)
-
-
-# reads args and starts the pharos listener in the background
-config = checkArgs()
-source_dir = os.path.dirname(os.path.realpath(__file__))
-pharos_path = os.path.join(source_dir, "pharos.py")
-subprocess.Popen(['/usr/bin/python', pharos_path, '--config', config])
diff --git a/tools/laas-fog/source/network.py b/tools/laas-fog/source/network.py
deleted file mode 100644
index 234ba22e..00000000
--- a/tools/laas-fog/source/network.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import sys
-import xml.dom
-import xml.dom.minidom
-import yaml
-
-
-class Network:
- """
- This class has a similar role as the Domain class.
- This class will parse a config file and
- write the xml definitions of those networks for libvirt.
- """
-
- def __init__(self, propertiesDict):
- """
- init. propertiesDict should be
- one of the dictionaries returned by parseConfigFile
- """
- self.name = propertiesDict['name']
- self.brName = propertiesDict['brName']
- self.brAddr = propertiesDict['brAddr']
- self.netmask = propertiesDict['netmask']
- self.forward = propertiesDict['forward']
- self.dhcp = propertiesDict['dhcp']
- self.cidr = propertiesDict['cidr']
-
- def toXML(self):
- """
- Takes the config of this network and writes a valid xml definition
- for libvirt.
- returns a string
- """
- definition = xml.dom.minidom.parseString("<network>\n</network>")
- nameElem = definition.createElement('name')
- nameElem.appendChild(definition.createTextNode(self.name))
- definition.documentElement.appendChild(nameElem)
-
- if self.forward['used']:
- forwardElem = definition.createElement('forward')
- forwardElem.setAttribute('mode', self.forward['type'])
- definition.documentElement.appendChild(forwardElem)
-
- bridgeElem = definition.createElement('bridge')
- bridgeElem.setAttribute('name', self.brName)
- bridgeElem.setAttribute('stp', 'on')
- bridgeElem.setAttribute('delay', '5')
- definition.documentElement.appendChild(bridgeElem)
-
- ipElem = definition.createElement('ip')
- ipElem.setAttribute('address', self.brAddr)
- ipElem.setAttribute('netmask', self.netmask)
- if self.dhcp['used']:
- dhcpElem = definition.createElement('dhcp')
- rangeElem = definition.createElement('range')
- rangeElem.setAttribute('start', self.dhcp['rangeStart'])
- rangeElem.setAttribute('end', self.dhcp['rangeEnd'])
- dhcpElem.appendChild(rangeElem)
- ipElem.appendChild(dhcpElem)
-
- definition.documentElement.appendChild(ipElem)
-
- self.xml = definition.toprettyxml()
- return self.xml
-
- def writeXML(self, filePath):
- """
- writes xml definition to given file
- """
- f = open(filePath, 'w')
- f.write(self.toXML())
- f.close()
-
- @staticmethod
- def parseConfigFile(path):
- """
- parses given config file
- """
- configFile = open(path, 'r')
- try:
- config = yaml.safe_load(configFile)
- except Exception:
- print "Bad network configuration file. exiting"
- sys.exit(1)
-
- return config
diff --git a/tools/laas-fog/source/pharos.py b/tools/laas-fog/source/pharos.py
deleted file mode 100755
index d5a6e8a8..00000000
--- a/tools/laas-fog/source/pharos.py
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/usr/bin/python
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import requests
-import time
-import calendar
-import subprocess
-import sys
-import yaml
-import os
-import logging
-from utilities import Utilities
-from database import BookingDataBase
-
-
-class Pharos_api:
- """
- This class listens to the dashboard and starts/stops bookings accordingly.
- This class should run in the background indefinitely.
- Do not execute this file directly - run ./listen.py instead
- """
- def __init__(self, config):
- """
- init function.
- config is the already-parsed config file
- """
- self.conf = config
- self.servers = yaml.safe_load(open(config['inventory']))
- self.log = self.createLogger("pharos_api")
- self.polling = 60 / int(config['polling'])
- self.log.info(
- "polling the dashboard once every %d seconds", self.polling)
- self.dashboard = config['dashboard']
- self.log.info("connecting to dashboard at %s", self.dashboard)
- if os.path.isfile(config['token']):
- self.token = open(config['token']).read()
- else:
- self.token = config['token']
- self.updateHeader()
- self.database = BookingDataBase(config['database'])
- self.log.info("using database at %s", self.conf['database'])
- self.deploy_path = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), "deploy.py")
- if not os.path.isfile(self.deploy_path):
- self.log.error(
- "Cannot find the deployment script at %s", self.deploy_path)
-
- def setToken(self, token):
- """
- Sets authentication token. Not yet needed.
- """
- self.token = token
- self.updateHeader()
-
- def setTokenFromFile(self, path):
- """
- reads auth token from a file. Not yet needed.
- """
- self.setToken(open(path).read())
-
- def updateHeader(self):
- """
- updates the http header used when talking to the dashboard
- """
- self.header = {"Authorization": "Token " + self.token}
-
- def listen(self):
- """
- this method will continuously poll the pharos dashboard.
- If a booking is found on our server,
- we will start a deployment in the background with the
- proper config file for the requested
- installer and scenario.
- """
- self.log.info("%s", "Beginning polling of dashboard")
- try:
- while True:
- time.sleep(self.polling)
- url = self.dashboard+"/api/bookings/"
- bookings = requests.get(url, headers=self.header).json()
- for booking in bookings:
- if booking['resource_id'] in self.servers.keys():
- self.convertTimes(booking)
- self.database.checkAddBooking(booking)
- self.checkBookings()
- except Exception:
- self.log.exception('%s', "failed to connect to dashboard")
-
- self.listen()
-
- def convertTimes(self, booking):
- """
- this method will take the time reported by Pharos in the
- format yyyy-mm-ddThh:mm:ssZ
- and convert it into seconds since the epoch,
- for easier management
- """
- booking['start'] = self.pharosToEpoch(booking['start'])
- booking['end'] = self.pharosToEpoch(booking['end'])
-
- def pharosToEpoch(self, timeStr):
- """
- Converts the dates from the dashboard to epoch time.
- """
- time_struct = time.strptime(timeStr, '%Y-%m-%dT%H:%M:%SZ')
- epoch_time = calendar.timegm(time_struct)
- return epoch_time
-
- def checkBookings(self):
- """
- This method checks all the bookings in our database to see if any
- action is required.
- """
- # get all active bookings from database into a usable form
- bookings = self.database.getBookings()
- for booking in bookings:
- # first, check if booking is over
- if time.time() > booking[3]:
- self.log.info("ending the booking with id %i", booking[0])
- self.endBooking(booking)
- # Then check if booking has begun and the host is still idle
- elif time.time() > booking[2] and booking[7] < 1:
- self.log.info("starting the booking with id %i", booking[0])
- self.startBooking(booking)
-
- def startBooking(self, booking):
- """
- Starts the scheduled booking on the requested host with
- the correct config file.
- The provisioning process gets spun up in a subproccess,
- so the api listener is not interupted.
- """
- try:
- host = self.servers[booking[1]]
- self.log.info("Detected a new booking started for host %s", host)
- config_file = self.conf['default_configs']["None"]
- try:
- config_file = self.conf['default_configs'][booking[4]]
- except KeyError:
- self.log.warning(
- "No installer detected in the booking request.")
- self.log.info("New booking started for host %s", host)
- self.database.setStatus(booking[0], 1) # mark booking started
- if not os.path.isfile(self.deploy_path):
- error = "Cannot find the deploment script at %s"
- self.log.error(error, self.deploy_path)
- subprocess.Popen([
- '/usr/bin/python',
- self.deploy_path,
- '--config', config_file,
- '--host', host
- ])
- except Exception:
- self.log.exception("Failed to start booking for %s", host)
-
- def endBooking(self, booking):
- """
- Resets a host once its booking has ended.
- """
- try:
- try:
- config_file = self.conf['default_configs'][booking[4]]
- except KeyError:
- warn = "No installer detected in booking request"
- self.log.warning("%s", warn)
- config_file = self.conf['default_configs']["None"]
-
- host = self.servers[booking[1]]
- log = logging.getLogger(host)
- log.info('Lease expired. Resetting host %s', host)
- self.database.setStatus(booking[0], 3)
- if not os.path.isfile(self.deploy_path):
- err = "Cannot find deployment script at %s"
- self.log.error(err, self.deploy_path)
- subprocess.Popen([
- '/usr/bin/python',
- self.deploy_path,
- '--config', config_file,
- '--host', host,
- '--reset'
- ])
- self.database.removeBooking(booking[0])
- except Exception:
- self.log.exception("Failed to end booking for %s", host)
-
- def createLogger(self, name):
- return Utilities.createLogger(name, self.conf['logging_dir'])
-
-
-if __name__ == "__main__":
- if "--config" not in sys.argv:
- print "Specify config file with --config option"
- sys.exit(1)
- config = None
- try:
- config_file = sys.argv[1+sys.argv.index('--config')]
- config = yaml.safe_load(open(config_file))
- except Exception:
- sys.exit(1)
- api = Pharos_api(config)
- api.listen()
diff --git a/tools/laas-fog/source/pod_manager.py b/tools/laas-fog/source/pod_manager.py
deleted file mode 100755
index 3e1caa8e..00000000
--- a/tools/laas-fog/source/pod_manager.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/python
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import time
-import sys
-import yaml
-import os
-from api.fog import FOG_Handler
-from utilities import Utilities
-from deployment_manager import Deployment_Manager
-from database import HostDataBase
-from installers import fuel
-from installers import joid
-
-
-class Pod_Manager:
- """
- This is the 'main' class that chooses a host and provisions & deploys it.
- this class can be run directly from the command line,
- or it can be called from the pharos dashboard listener when
- a deployment is requested.
- Either way, this file should be called with:
- ./pod_manager.py --config <CONFIG_FILE>
- """
- # This dictionary allows me to map the supported installers to the
- # respective installer classes, for easier parsing of the config file
- INSTALLERS = {
- "fuel": fuel.Fuel_Installer,
- "joid": joid.Joid_Installer,
- "none": None
- }
-
- def __init__(self, conf, requested_host=None, reset=False):
- """
- init function.
- conf is the read and parsed config file for this deployment
- requested_host is the optional hostname of the host you request
- if reset, we just flash the host to a clean state and return.
- """
- self.conf = conf
- if self.conf['installer'] is not None:
- inst = Pod_Manager.INSTALLERS[self.conf['installer'].lower()]
- self.conf['installer'] = inst
- self.fog = FOG_Handler(self.conf['fog']['server'])
- # Sets the fog keys, either from the config file
- # or the secrets file the config points to
- if os.path.isfile(self.conf['fog']['api_key']):
- self.fog.getFogKeyFromFile(self.conf['fog']['api_key'])
- else:
- self.fog.setFogKey(self.conf['fog']['api_key'])
-
- if os.path.isfile(self.conf['fog']['user_key']):
- self.fog.getUserKeyFromFile(self.conf['fog']['user_key'])
- else:
- self.fog.setUserKey(self.conf['fog']['user_key'])
- self.database = HostDataBase(self.conf['database'])
- self.request = requested_host
- if reset:
- mac = self.fog.getHostMac(self.request)
- log = self.conf['dhcp_log']
- dhcp_serv = self.conf['dhcp_server']
- ip = Utilities.getIPfromMAC(mac, log, remote=dhcp_serv)
- self.flash_host(self.request, ip)
-
- def start_deploy(self):
- """
- Ghosts the machine with the proper disk image and hands off
- control to the deployment manager.
- """
- try:
- host = self.database.getHost(self.request)
- hostMac = self.fog.getHostMac(host)
- dhcp_log = self.conf['dhcp_log']
- dhcp_server = self.conf['dhcp_server']
- host_ip = Utilities.getIPfromMAC(
- hostMac, dhcp_log, remote=dhcp_server
- )
- util = Utilities(host_ip, host, self.conf)
- util.resetKnownHosts()
- log = Utilities.createLogger(host, self.conf['logging_dir'])
- self.fog.setLogger(log)
- log.info("Starting booking on host %s", host)
- log.info("host is reachable at %s", host_ip)
- log.info('ghosting host %s with clean image', host)
- self.flash_host(host, host_ip, util)
- log.info('Host %s imaging complete', host)
- inst = self.conf['installer']
- scenario = self.conf['scenario']
- Deployment_Manager(inst, scenario, util).go()
- except Exception:
- log.exception("Encountered an unexpected error")
-
- def flash_host(self, host, host_ip, util=None):
- """
- We do this using a FOG server, but you can use whatever fits into your
- lab infrastructure. This method should put the host into a state as if
- centos was just freshly installed, updated,
- and needed virtualization software installed.
- This is the 'clean' starting point we work from
- """
- self.fog.setImage(host, self.conf['fog']['image_id'])
- self.fog.imageHost(host)
- Utilities.restartRemoteHost(host_ip)
- self.fog.waitForHost(host)
- # if util is not given, then we are just
- # flashing to reset after a booking expires
- if util is not None:
- time.sleep(30)
- util.waitForBoot()
- util.checkHost()
- time.sleep(15)
- util.checkHost()
-
-
-if __name__ == "__main__":
- configFile = ""
- host = ""
- for i in range(len(sys.argv) - 1):
- if "--config" in sys.argv[i]:
- configFile = sys.argv[i+1]
- elif "--host" in sys.argv[i]:
- host = sys.argv[i+1]
- if len(configFile) < 1:
- print "No config file specified"
- sys.exit(1)
- configFile = yaml.safe_load(open(configFile))
- manager = Pod_Manager(configFile, requested_host=host)
- manager.start_deploy()
diff --git a/tools/laas-fog/source/resetDataBase.py b/tools/laas-fog/source/resetDataBase.py
deleted file mode 100755
index ff141e58..00000000
--- a/tools/laas-fog/source/resetDataBase.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import sys
-import os
-import yaml
-from api.fog import FOG_Handler
-from database import HostDataBase
-from database import BookingDataBase
-
-"""
-This file just resets the host database with
-all the hosts in fog, with all of them
-showing as available
-
-This file is just provided to make populating the host db easier.
-If you wanted to do this yourself, you could do the following in
-a python command prompt:
- from database import HostDataBase
- db = HostDataBase("/path/to/file")
- db.addHost("host-name")
- db.addHost("host-name")
- db.addHost("host-name")
-
-"""
-config = None
-if "--config" in sys.argv:
- i = sys.argv.index("--config")
- if len(sys.argv) > i+1 and os.path.isfile(sys.argv[i+1]):
- try:
- config = yaml.safe_load(open(sys.argv[i+1]))
- except Exception:
- print "failed to read config file. exiting"
- sys.exit(1)
- else:
- print "config file not found. exiting"
- sys.exit(1)
-else:
- print "no config file given. Specify file with '--config <FILE_PATH>'"
- sys.exit(1)
-
-host = False
-if "--host" in sys.argv or "--both" in sys.argv:
- host = True
-
-booking = False
-if "--booking" in sys.argv or "--both" in sys.argv:
- booking = True
-
-
-if host:
-
- fog = FOG_Handler(
- config['fog']['server']
- )
- if os.path.isfile(config['fog']['api_key']):
- fog.getFogKeyFromFile(config['fog']['api_key'])
- else:
- fog.setFogKey(config['fog']['api_key'])
-
- if os.path.isfile(config['fog']['user_key']):
- fog.getUserKeyFromFile(config['fog']['user_key'])
- else:
- fog.setUserKey(config['fog']['user_key'])
- hosts = fog.getHostsinGroup("vm")
- host_names = []
- for host in hosts:
- host_names.append(host['name'])
-
- # creates the directory of the db, if it doesnt yet exist
- dbDir = os.path.dirname(config['database'])
- if not os.path.isdir(dbDir):
- os.makedirs(dbDir)
-
- db = HostDataBase(config['database'])
-
- # check if the table already exists or not
- try:
- db.cursor.execute("SELECT * FROM hosts")
- except Exception as err:
- if "no such table" in str(err):
- db.createTable()
-
- db.resetHosts(host_names)
-
-if booking:
- db = BookingDataBase(config['database'])
- db.createTable()
- db.close()
-
-else:
- print "you must specify the '--host', '--booking', or '--both' option"
- print "depending on which database you wish to reset"
- sys.exit(0)
diff --git a/tools/laas-fog/source/stop.sh b/tools/laas-fog/source/stop.sh
deleted file mode 100755
index e7214829..00000000
--- a/tools/laas-fog/source/stop.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-
-
-# This just finds all processes from this program and kills them.
-
-
-PIDS=$(ps -ef | grep laas/source/ | grep python | awk '{print $2}')
-
-kill ${PIDS[*]}
diff --git a/tools/laas-fog/source/utilities.py b/tools/laas-fog/source/utilities.py
deleted file mode 100644
index bbe09467..00000000
--- a/tools/laas-fog/source/utilities.py
+++ /dev/null
@@ -1,346 +0,0 @@
-"""
-#############################################################################
-#Copyright 2017 Parker Berberian and others #
-# #
-#Licensed under the Apache License, Version 2.0 (the "License"); #
-#you may not use this file except in compliance with the License. #
-#You may obtain a copy of the License at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-#Unless required by applicable law or agreed to in writing, software #
-#distributed under the License is distributed on an "AS IS" BASIS, #
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
-#See the License for the specific language governing permissions and #
-#limitations under the License. #
-#############################################################################
-"""
-
-import os
-import logging
-import string
-import sys
-import subprocess
-import xml.dom
-import xml.dom.minidom
-import re
-import random
-import yaml
-from database import HostDataBase, BookingDataBase
-from api.vpn import VPN
-LOGGING_DIR = ""
-
-
-class Utilities:
- """
- This class defines some useful functions that may be needed
- throughout the provisioning and deployment stage.
- The utility object is carried through most of the deployment process.
- """
- def __init__(self, host_ip, hostname, conf):
- """
- init function
- host_ip is the ip of the target host
- hostname is the FOG hostname of the host
- conf is the parsed config file
- """
- self.host = host_ip
- self.hostname = hostname
- root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
- self.scripts = os.path.join(root_dir, "hostScripts/")
- self.remoteDir = "/root/hostScripts/"
- self.conf = conf
- self.logger = logging.getLogger(hostname)
-
- def execRemoteScript(self, script, args=[]):
- """
- executes the given script on the
- remote host with the given args.
- script must be found in laas/hostScripts
- """
- cmd = [self.remoteDir+script]
- for arg in args:
- cmd.append(arg)
- self.sshExec(cmd)
-
- def waitForBoot(self):
- """
- Continually pings the host, waiting for it to boot
- """
- i = 0
- while (not self.pingHost()) and i < 30:
- i += 1
- if i == 30:
- self.logger.error("Host %s has not booted", self.host)
- sys.exit(1)
-
- def checkHost(self):
- """
- returns true if the host responds to two pings.
- Sometimes, while a host is pxe booting, a host will
- respond to one ping but quickly go back offline.
- """
- if self.pingHost() and self.pingHost():
- return True
- return False
-
- def pingHost(self):
- """
- returns true if the host responds to a ping
- """
- i = 0
- response = 1
- cmd = "ping -c 1 "+self.host
- cmd = cmd.split(' ')
- nul = open(os.devnull, 'w')
- while i < 10 and response != 0:
- response = subprocess.call(cmd, stdout=nul, stderr=nul)
- i = i + 1
- if response == 0:
- return True
- return False
-
- def copyDir(self, localDir, remoteDir):
- """
- uses scp to copy localDir to remoteDir on the
- remote host
- """
- cmd = "mkdir -p "+remoteDir
- self.sshExec(cmd.split(" "))
- cmd = "scp -o StrictHostKeyChecking=no -r "
- cmd += localDir+" root@"+self.host+":/root"
- cmd = cmd.split()
- nul = open(os.devnull, 'w')
- subprocess.call(cmd, stdout=nul, stderr=nul)
-
- def copyScripts(self):
- """
- Copies the hostScrpts dir to the remote host.
- """
- self.copyDir(self.scripts, self.remoteDir)
-
- def sshExec(self, args):
- """
- executes args as an ssh
- command on the remote host.
- """
- cmd = ['ssh', 'root@'+self.host]
- for arg in args:
- cmd.append(arg)
- nul = open(os.devnull, 'w')
- return subprocess.call(cmd, stdout=nul, stderr=nul)
-
- def resetKnownHosts(self):
- """
- edits your known hosts file to remove the previous entry of host
- Sometimes, the flashing process gives the remote host a new
- signature, and ssh complains about it.
- """
- lines = []
- sshFile = open('/root/.ssh/known_hosts', 'r')
- lines = sshFile.read()
- sshFile.close()
- lines = lines.split('\n')
- sshFile = open('/root/.ssh/known_hosts', 'w')
- for line in lines:
- if self.host not in line:
- sshFile.write(line+'\n')
- sshFile.close()
-
- def restartHost(self):
- """
- restarts the remote host
- """
- cmd = ['shutdown', '-r', 'now']
- self.sshExec(cmd)
-
- @staticmethod
- def randoString(length):
- """
- this is an adapted version of the code found here:
- https://stackoverflow.com/questions/2257441/
- random-string-generation-with-upper-case-letters-and-digits-in-python
- generates a random alphanumeric string of length length.
- """
- randStr = ''
- chars = string.ascii_uppercase + string.digits
- for x in range(length):
- randStr += random.SystemRandom().choice(chars)
- return randStr
-
- def changePassword(self):
- """
- Sets the root password to a random string and returns it
- """
- paswd = self.randoString(15)
- command = "printf "+paswd+" | passwd --stdin root"
- self.sshExec(command.split(' '))
- return paswd
-
- def markHostDeployed(self):
- """
- Tells the database that this host has finished its deployment
- """
- db = HostDataBase(self.conf['database'])
- db.makeHostDeployed(self.hostname)
- db.close()
-
- def make_vpn_user(self):
- """
- Creates a vpn user and associates it with this booking
- """
- config = yaml.safe_load(open(self.conf['vpn_config']))
- myVpn = VPN(config)
- # name = dashboard.getUserName()
- u, p, uid = myVpn.makeNewUser() # may pass name arg if wanted
- self.logger.info("%s", "created new vpn user")
- self.logger.info("username: %s", u)
- self.logger.info("password: %s", p)
- self.logger.info("vpn user uid: %s", uid)
- self.add_vpn_user(uid)
-
- def add_vpn_user(self, uid):
- """
- Adds the dn of the vpn user to the database
- so that we can clean it once the booking ends
- """
- db = BookingDataBase(self.conf['database'])
- # converts from hostname to pharos resource id
- inventory = yaml.safe_load(open(self.conf['inventory']))
- host_id = -1
- for resource_id in inventory.keys():
- if inventory[resource_id] == self.hostname:
- host_id = resource_id
- break
- db.setVPN(host_id, uid)
-
- def finishDeployment(self):
- """
- Last method call once a host is finished being deployed.
- It notifies the database and changes the password to
- a random string
- """
- self.markHostDeployed()
- self.make_vpn_user()
- passwd = self.changePassword()
- self.logger.info("host %s provisioning done", self.hostname)
- self.logger.info("You may access the host at %s", self.host)
- self.logger.info("The password is %s", passwd)
- notice = "You should change all passwords for security"
- self.logger.warning('%s', notice)
-
- @staticmethod
- def restartRemoteHost(host_ip):
- """
- This method assumes that you already have ssh access to the target
- """
- nul = open(os.devnull, 'w')
- ret_code = subprocess.call([
- 'ssh', '-o', 'StrictHostKeyChecking=no',
- 'root@'+host_ip,
- 'shutdown', '-r', 'now'],
- stdout=nul, stderr=nul)
-
- return ret_code
-
- @staticmethod
- def getName(xmlString):
- """
- Gets the name value from xml. for example:
- <name>Parker</name> returns Parker
- """
- xmlDoc = xml.dom.minidom.parseString(xmlString)
- nameNode = xmlDoc.documentElement.getElementsByTagName('name')
- name = str(nameNode[0].firstChild.nodeValue)
- return name
-
- @staticmethod
- def getXMLFiles(directory):
- """
- searches directory non-recursively and
- returns a list of all xml files
- """
- contents = os.listdir(directory)
- fileContents = []
- for item in contents:
- if os.path.isfile(os.path.join(directory, item)):
- fileContents.append(os.path.join(directory, item))
- xmlFiles = []
- for item in fileContents:
- if 'xml' in os.path.basename(item):
- xmlFiles.append(item)
- return xmlFiles
-
- @staticmethod
- def createLogger(name, log_dir=LOGGING_DIR):
- """
- Initializes the logger if it does not yet exist, and returns it.
- Because of how python logging works, calling logging.getLogger()
- with the same name always returns a reference to the same log file.
- So we can call this method from anywhere with the hostname as
- the name arguement and it will return the log file for that host.
- The formatting includes the level of importance and the time stamp
- """
- global LOGGING_DIR
- if log_dir != LOGGING_DIR:
- LOGGING_DIR = log_dir
- log = logging.getLogger(name)
- if len(log.handlers) > 0: # if this logger is already initialized
- return log
- log.setLevel(10)
- han = logging.FileHandler(os.path.join(log_dir, name+".log"))
- han.setLevel(10)
- log_format = '[%(levelname)s] %(asctime)s [#] %(message)s'
- formatter = logging.Formatter(fmt=log_format)
- han.setFormatter(formatter)
- log.addHandler(han)
- return log
-
- @staticmethod
- def getIPfromMAC(macAddr, logFile, remote=None):
- """
- searches through the dhcp logs for the given mac
- and returns the associated ip. Will retrieve the
- logFile from a remote host if remote is given.
- if given, remote should be an ip address or hostname that
- we can ssh to.
- """
- if remote is not None:
- logFile = Utilities.retrieveFile(remote, logFile)
- ip = Utilities.getIPfromLog(macAddr, logFile)
- if remote is not None:
- os.remove(logFile)
- return ip
-
- @staticmethod
- def retrieveFile(host, remote_loc, local_loc=os.getcwd()):
- """
- Retrieves file from host and puts it in the current directory
- unless local_loc is given.
- """
- subprocess.call(['scp', 'root@'+host+':'+remote_loc, local_loc])
- return os.path.join(local_loc, os.path.basename(remote_loc))
-
- @staticmethod
- def getIPfromLog(macAddr, logFile):
- """
- Helper method for getIPfromMAC.
- uses regex to find the ip address in the
- log
- """
- try:
- messagesFile = open(logFile, "r")
- allLines = messagesFile.readlines()
- except Exception:
- sys.exit(1)
- importantLines = []
- for line in allLines:
- if macAddr in line and "DHCPACK" in line:
- importantLines.append(line)
- ipRegex = r'(\d+\.\d+\.\d+\.\d+)'
- IPs = []
- for line in importantLines:
- IPs.append(re.findall(ipRegex, line))
- if len(IPs) > 0 and len(IPs[-1]) > 0:
- return IPs[-1][0]
- return None
diff --git a/tools/pharos-dashboard/.gitignore b/tools/pharos-dashboard/.gitignore
deleted file mode 100644
index 4154fdd3..00000000
--- a/tools/pharos-dashboard/.gitignore
+++ /dev/null
@@ -1,46 +0,0 @@
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-
-# C extensions
-*.so
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-.tox/
-.coverage
-.cache
-nosetests.xml
-coverage.xml
-
-# Django:
-*.log
-*.pot
-
-# Celery
-celerybeat-schedule.db
-
-# KDE:
-.directory
-
-# Pycharm:
-.idea/
-
-# Virtualenv:
-venv/
-
-# Vim:
-*.swp
-
-# Bower Components:
-bower_components/
-
-# Production settings
-config.env
-
-# rsa key files
-rsa.pem
-rsa.pub
diff --git a/tools/pharos-dashboard/Makefile b/tools/pharos-dashboard/Makefile
deleted file mode 100644
index 90709171..00000000
--- a/tools/pharos-dashboard/Makefile
+++ /dev/null
@@ -1,38 +0,0 @@
-build:
- docker-compose build
-
-up:
- docker-compose up -d
-
-start:
- docker-compose start
-
-stop:
- docker-compose stop
-
-data:
- docker volume create --name=pharos-data
-
-shell-nginx:
- docker exec -ti ng01 bash
-
-shell-web:
- docker exec -ti dg01 bash
-
-shell-db:
- docker exec -ti ps01 bash
-
-log-nginx:
- docker-compose logs nginx
-
-log-web:
- docker-compose logs web
-
-log-ps:
- docker-compose logs postgres
-
-log-rmq:
- docker-compose logs rabbitmq
-
-log-worker:
- docker-compose logs worker
diff --git a/tools/pharos-dashboard/__init__.py b/tools/pharos-dashboard/__init__.py
deleted file mode 100644
index ce1acf36..00000000
--- a/tools/pharos-dashboard/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-############################################################################## \ No newline at end of file
diff --git a/tools/pharos-dashboard/booking_communication_agent.py b/tools/pharos-dashboard/booking_communication_agent.py
deleted file mode 100644
index c52e98bd..00000000
--- a/tools/pharos-dashboard/booking_communication_agent.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from dashboard_notification.notification import Notification
-from dashboard_api.api import DashboardAPI
-
-CONFIG = {
- 'dashboard_ip': '127.0.0.1',
- 'dashboard_url': 'http://127.0.0.1',
- 'api_token': 'f33ff43c85ecb13f5d0632c05dbb0a7d85a5a8d1',
- 'user': 'opnfv',
- 'password': 'opnfvopnfv'
-}
-
-api = DashboardAPI(CONFIG['dashboard_url'], api_token=CONFIG['api_token'], verbose=True)
-
-
-def booking_start(message):
- content = message.content
- booking = api.get_booking(id=content['booking_id'])
-
- # do something here...
-
- # notify dashboard
- api.post_resource_status(resource_id=booking['resource_id'], type='info', title='pod setup',
- content='details')
-
-
-def booking_end(message):
- # do something here...
-
- # notify dashboard
- api.post_resource_status(resource_id=message.content['resource_id'], type='info',
- title='booking end', content='details')
-
-
-def main():
- with Notification(CONFIG['dashboard_ip'], CONFIG['user'], CONFIG['password']) as notification:
- notification.register(booking_start, 'Arm POD 2', 'booking_start')
- notification.register(booking_end, 'Arm POD 2', 'booking_end')
- notification.receive() # wait for notifications
-
-
-if __name__ == "__main__":
- main()
diff --git a/tools/pharos-dashboard/config.env.sample b/tools/pharos-dashboard/config.env.sample
deleted file mode 100644
index 060841c2..00000000
--- a/tools/pharos-dashboard/config.env.sample
+++ /dev/null
@@ -1,24 +0,0 @@
-DASHBOARD_URL=http://labs.opnfv.org
-
-# SECURITY WARNING: don't run with debug turned on in production!
-DEBUG=False
-
-DB_NAME=sample_name
-DB_USER=sample_user
-DB_PASS=sample_pass
-DB_SERVICE=postgres
-DB_PORT=5432
-
-# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY=http://www.miniwebtool.com/django-secret-key-generator/
-
-OAUTH_CONSUMER_KEY=sample_key
-OAUTH_CONSUMER_SECRET=sample_secret
-
-JIRA_URL=sample_url
-JIRA_USER_NAME=sample_jira_user
-JIRA_USER_PASSWORD=sample_jira_pass
-
-# Rabbitmq
-RABBITMQ_USER=opnfv
-RABBITMQ_PASSWORD=opnfvopnfv
diff --git a/tools/pharos-dashboard/config/nginx/pharos_dashboard.conf b/tools/pharos-dashboard/config/nginx/pharos_dashboard.conf
deleted file mode 100644
index 87b6f8e8..00000000
--- a/tools/pharos-dashboard/config/nginx/pharos_dashboard.conf
+++ /dev/null
@@ -1,24 +0,0 @@
-upstream web {
- ip_hash;
- server web:8000;
-}
-
-# portal
-server {
- listen 80;
- server_name localhost;
- charset utf-8;
-
- location /static {
- alias /static;
- }
-
- location /media {
- alias /media;
- }
-
- location / {
- proxy_set_header Host $host;
- proxy_pass http://web/;
- }
-}
diff --git a/tools/pharos-dashboard/config/postgres/docker-entrypoint-initdb.d/pharos_dashboard.sh b/tools/pharos-dashboard/config/postgres/docker-entrypoint-initdb.d/pharos_dashboard.sh
deleted file mode 100755
index 526228a8..00000000
--- a/tools/pharos-dashboard/config/postgres/docker-entrypoint-initdb.d/pharos_dashboard.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-#!/bin/env bash
-
-psql -U postgres -c "CREATE USER $DB_USER PASSWORD '$DB_PASS'"
-psql -U postgres -c "CREATE DATABASE $DB_NAME OWNER $DB_USER"
diff --git a/tools/pharos-dashboard/dashboard_api/__init__.py b/tools/pharos-dashboard/dashboard_api/__init__.py
deleted file mode 100644
index ce1acf36..00000000
--- a/tools/pharos-dashboard/dashboard_api/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-############################################################################## \ No newline at end of file
diff --git a/tools/pharos-dashboard/dashboard_api/api.py b/tools/pharos-dashboard/dashboard_api/api.py
deleted file mode 100644
index d40e0aa4..00000000
--- a/tools/pharos-dashboard/dashboard_api/api.py
+++ /dev/null
@@ -1,91 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import logging
-
-import requests
-
-URLS = {
- 'resources': '/api/resources/',
- 'servers': '/api/servers/',
- 'bookings': '/api/bookings',
- 'resource_status': '/api/resource_status/',
-}
-
-class DashboardAPI(object):
- def __init__(self, dashboard_url, api_token='', verbose=False):
- self._api_token = api_token
- self._verbose = verbose
- self._resources_url = dashboard_url + URLS['resources']
- self._servers_url = dashboard_url + URLS['servers']
- self._bookings_url = dashboard_url + URLS['bookings']
- self._resources_status_url = dashboard_url + URLS['resource_status']
- self._logger = logging.getLogger(__name__)
-
- def get_all_resources(self):
- return self._get_json(self._resources_url)
-
- def get_resource(self, id='', name='', url=''):
- if url != '':
- return self._get_json(url)[0]
- url = self._resources_url + self._url_parameter(id=id, name=name)
- return self._get_json(url)[0]
-
- def get_all_bookings(self):
- return self._get_json(self._bookings_url)
-
- def get_resource_bookings(self, resource_id):
- url = self._bookings_url + self._url_parameter(resource_id=resource_id)
- return self._get_json(url)
-
- def get_booking(self, id):
- url = self._bookings_url + self._url_parameter(id=id)
- return self._get_json(url)[0]
-
- def post_resource_status(self, resource_id, type, title, content):
- data = {
- 'resource': resource_id,
- 'type': type,
- 'title': title,
- 'content': content
- }
- return self._post_json(self._resources_status_url, data)
-
- def get_url(self, url):
- return self._get_json(url)
-
- def _url_parameter(self, **kwargs):
- res = ''
- prefix = '?'
- for key, val in kwargs.items():
- res += prefix + key + '=' + str(val)
- prefix = '&'
- return res
-
- def _get_json(self, url):
- try:
- response = requests.get(url)
- if self._verbose:
- print('Get JSON: ' + url)
- print(response.status_code, response.content)
- return response.json()
- except requests.exceptions.RequestException as e:
- self._logger.exception(e)
- except ValueError as e:
- self._logger.exception(e)
-
- def _post_json(self, url, json):
- if self._api_token == '':
- raise Exception('Need api token to POST data.')
- response = requests.post(url, json, headers={'Authorization': 'Token ' + self._api_token})
- if self._verbose:
- print('Post JSON: ' + url)
- print(response.status_code, response.content)
- return response.status_code
diff --git a/tools/pharos-dashboard/dashboard_notification/__init__.py b/tools/pharos-dashboard/dashboard_notification/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/dashboard_notification/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/dashboard_notification/notification.py b/tools/pharos-dashboard/dashboard_notification/notification.py
deleted file mode 100644
index 6843c761..00000000
--- a/tools/pharos-dashboard/dashboard_notification/notification.py
+++ /dev/null
@@ -1,120 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import jsonpickle
-import pika
-
-
-class Message(object):
- def __init__(self, type, topic, content):
- self.type = type
- self.topic = topic
- self.content = content
-
-
-class Notification(object):
- """
- This class can be used by the dashboard and the labs to exchange notifications about booking
- events and pod status. It utilizes rabbitmq to communicate.
-
- Notifications are associated to an event and to a topic.
- Events are:
- [ 'booking_start', 'booking_end']
- The topic is usually a POD name, ie:
- 'Intel POD 2'
- """
-
- def __init__(self, dashboard_url, user=None, password=None, verbose=False):
- self.rabbitmq_broker = dashboard_url
- self.verbose = verbose
- if user is None and password is None:
- self._connection = pika.BlockingConnection(pika.ConnectionParameters(
- host=self.rabbitmq_broker))
- else:
- self.credentials = pika.PlainCredentials(user, password)
- self._connection = pika.BlockingConnection(pika.ConnectionParameters(
- credentials=self.credentials,
- host=self.rabbitmq_broker))
- self._registry = {}
- self._channel = self._connection.channel()
- self._channel.exchange_declare(exchange='notifications', type='topic')
- self._result = self._channel.queue_declare(exclusive=True, durable=True)
- self._queue_name = self._result.method.queue
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._connection.close()
-
- def register(self, function, topic, type='all'):
- """
- Registers a function to be called for the specified event.
- :param function: the function to register
- :param event: the event type
- :param regex: a regex to specify for wich topics the function will be called. Some
- possible Expressions can be:
- 'Intel POD 2' : Intel POD 2
- """
-
- if topic not in self._registry:
- self._registry[topic] = [(function, type)]
- else:
- self._registry[topic].append((function, type))
-
- def receive(self):
- """
- Start receiving notifications. This is a blocking operation, if a notification is received,
- the registered functions will be called.
- """
- if self.verbose:
- print('Start receiving Notifications. Keys: ', self._registry.keys())
- self._receive_message(self._registry.keys())
-
- def send(self, message):
- """
- Send an event notification.
- :param event: the event type
- :param topic: the pod name
- :param content: a JSON-serializable dictionary
- """
- self._send_message(message)
-
- def _send_message(self, message):
- routing_key = message.topic
- message_json = jsonpickle.encode(message)
- self._channel.basic_publish(exchange='notifications',
- routing_key=routing_key,
- body=message_json,
- properties=pika.BasicProperties(
- content_type='application/json',
- delivery_mode=2, # make message persistent
- ))
- if self.verbose:
- print(" [x] Sent %r:%r" % (routing_key, message_json))
-
- def _receive_message(self, binding_keys):
- for key in binding_keys:
- self._channel.queue_bind(exchange='notifications',
- queue=self._queue_name,
- routing_key=key)
- self._channel.basic_consume(self._message_callback,
- queue=self._queue_name)
- self._channel.start_consuming()
-
- def _message_callback(self, ch, method, properties, body):
- if self.verbose:
- print(" [x] Got %r:%r" % (method.routing_key, body))
- if method.routing_key not in self._registry:
- return
- for func, type in self._registry[method.routing_key]:
- message = jsonpickle.decode(body.decode())
- if message.type == type:
- func(message)
- ch.basic_ack(delivery_tag=method.delivery_tag)
diff --git a/tools/pharos-dashboard/docker-compose.yml b/tools/pharos-dashboard/docker-compose.yml
deleted file mode 100644
index 44a263f0..00000000
--- a/tools/pharos-dashboard/docker-compose.yml
+++ /dev/null
@@ -1,77 +0,0 @@
----
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-version: '2'
-services:
- nginx:
- restart: always
- image: nginx:latest
- container_name: ng01
- ports:
- - "80:80"
- volumes:
- - ./config/nginx:/etc/nginx/conf.d
- - /var/lib/pharos_dashboard/static:/static
- - /var/lib/pharos_dashboard/media:/media
- depends_on:
- - web
-
- web:
- restart: always
- build: ./web/
- container_name: dg01
- # yamllint disable rule:line-length
- command: bash -c "python manage.py migrate && python manage.py collectstatic --no-input && gunicorn pharos_dashboard.wsgi -b 0.0.0.0:8000"
- # yamllint enable rule:line-length
- depends_on:
- - postgres
- links:
- - postgres
- env_file: config.env
- volumes:
- - ./:/pharos_dashboard
- - /var/lib/pharos_dashboard/static:/static
- - /var/lib/pharos_dashboard/media:/media
- expose:
- - "8000"
-
- postgres:
- restart: always
- image: postgres:latest
- container_name: ps01
- env_file: config.env
- volumes:
- - ./config/postgres/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- - pharos-data:/var/lib/postgresql/data
-
- rabbitmq:
- restart: always
- build: ./rabbitmq/
- container_name: rm01
- env_file: config.env
- ports:
- - "5672:5672"
-
- worker:
- restart: always
- build: ./worker/
- # yamllint disable rule:line-length
- command: bash -c "celery -A pharos_dashboard worker -l info -B --schedule=~/celerybeat-schedule"
- # yamllint enable rule:line-length
- env_file: config.env
- links:
- - postgres
- - rabbitmq
- volumes:
- - ./:/pharos_dashboard
-volumes:
- pharos-data:
- external: true
diff --git a/tools/pharos-dashboard/rabbitmq/Dockerfile b/tools/pharos-dashboard/rabbitmq/Dockerfile
deleted file mode 100644
index 71162a45..00000000
--- a/tools/pharos-dashboard/rabbitmq/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM rabbitmq
-
-ADD init.sh /init.sh
-CMD ["/init.sh"] \ No newline at end of file
diff --git a/tools/pharos-dashboard/rabbitmq/init.sh b/tools/pharos-dashboard/rabbitmq/init.sh
deleted file mode 100755
index 9d04dd11..00000000
--- a/tools/pharos-dashboard/rabbitmq/init.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-# Create Rabbitmq user
-( sleep 10 ; \
-rabbitmqctl add_user $RABBITMQ_USER $RABBITMQ_PASSWORD 2>/dev/null ; \
-rabbitmqctl set_user_tags $RABBITMQ_USER administrator ; \
-rabbitmqctl set_permissions -p / $RABBITMQ_USER ".*" ".*" ".*" ; \
-echo "*** User '$RABBITMQ_USER' with password '$RABBITMQ_PASSWORD' completed. ***") &
-
-rabbitmq-server $@
diff --git a/tools/pharos-dashboard/readme.txt b/tools/pharos-dashboard/readme.txt
deleted file mode 100644
index 2a259129..00000000
--- a/tools/pharos-dashboard/readme.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-The dashboard is deployed using docker-compose.
-Application / database files are saved in /var/lib/pharos_dashboard/.
-
-Deployment:
-
-- clone the repository
-- complete the config.env.sample file and save it as config.env
-- install docker, docker-compose and bower
-- run 'bower install' in ./src/static/ to fetch javascript dependencies
-- run 'make build' to build the containers
-- run 'make data'
-- run 'make up' to run the dashboard
-
-Updating:
-
-- make stop
-- git pull
-- run 'bower install' if javascript dependencies changed
-- make build
-- make start
-
-If there is migrations that need user input (like renaming a field), they need to be run manually!
-
-Logs / Shell access:
-
-- there is some shortcuts in the makefile
diff --git a/tools/pharos-dashboard/src/__init__.py b/tools/pharos-dashboard/src/__init__.py
deleted file mode 100644
index ce1acf36..00000000
--- a/tools/pharos-dashboard/src/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-############################################################################## \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/account/__init__.py b/tools/pharos-dashboard/src/account/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/account/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/account/admin.py b/tools/pharos-dashboard/src/account/admin.py
deleted file mode 100644
index 18b2e1a8..00000000
--- a/tools/pharos-dashboard/src/account/admin.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib import admin
-
-from account.models import UserProfile
-
-admin.site.register(UserProfile) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/account/apps.py b/tools/pharos-dashboard/src/account/apps.py
deleted file mode 100644
index 9814648f..00000000
--- a/tools/pharos-dashboard/src/account/apps.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.apps import AppConfig
-
-
-class AccountsConfig(AppConfig):
- name = 'account'
diff --git a/tools/pharos-dashboard/src/account/forms.py b/tools/pharos-dashboard/src/account/forms.py
deleted file mode 100644
index 7653e2b1..00000000
--- a/tools/pharos-dashboard/src/account/forms.py
+++ /dev/null
@@ -1,22 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import django.forms as forms
-import pytz as pytz
-
-from account.models import UserProfile
-
-
-class AccountSettingsForm(forms.ModelForm):
- class Meta:
- model = UserProfile
- fields = ['company', 'ssh_public_key', 'pgp_public_key', 'timezone']
-
- timezone = forms.ChoiceField(choices=[(x, x) for x in pytz.common_timezones], initial='UTC')
diff --git a/tools/pharos-dashboard/src/account/jira_util.py b/tools/pharos-dashboard/src/account/jira_util.py
deleted file mode 100644
index fdb87f77..00000000
--- a/tools/pharos-dashboard/src/account/jira_util.py
+++ /dev/null
@@ -1,65 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import base64
-import os
-
-import oauth2 as oauth
-from django.conf import settings
-from jira import JIRA
-from tlslite.utils import keyfactory
-
-
-class SignatureMethod_RSA_SHA1(oauth.SignatureMethod):
- name = 'RSA-SHA1'
-
- def signing_base(self, request, consumer, token):
- if not hasattr(request, 'normalized_url') or request.normalized_url is None:
- raise ValueError("Base URL for request is not set.")
-
- sig = (
- oauth.escape(request.method),
- oauth.escape(request.normalized_url),
- oauth.escape(request.get_normalized_parameters()),
- )
-
- key = '%s&' % oauth.escape(consumer.secret)
- if token:
- key += oauth.escape(token.secret)
- raw = '&'.join(sig)
- return key, raw
-
- def sign(self, request, consumer, token):
- """Builds the base signature string."""
- key, raw = self.signing_base(request, consumer, token)
-
- module_dir = os.path.dirname(__file__) # get current directory
- with open(module_dir + '/rsa.pem', 'r') as f:
- data = f.read()
- privateKeyString = data.strip()
- privatekey = keyfactory.parsePrivateKey(privateKeyString)
- raw = str.encode(raw)
- signature = privatekey.hashAndSign(raw)
- return base64.b64encode(signature)
-
-
-def get_jira(user):
- module_dir = os.path.dirname(__file__) # get current directory
- with open(module_dir + '/rsa.pem', 'r') as f:
- key_cert = f.read()
-
- oauth_dict = {
- 'access_token': user.userprofile.oauth_token,
- 'access_token_secret': user.userprofile.oauth_secret,
- 'consumer_key': settings.OAUTH_CONSUMER_KEY,
- 'key_cert': key_cert
- }
-
- return JIRA(server=settings.JIRA_URL, oauth=oauth_dict) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/account/middleware.py b/tools/pharos-dashboard/src/account/middleware.py
deleted file mode 100644
index 0f1dbd86..00000000
--- a/tools/pharos-dashboard/src/account/middleware.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.utils import timezone
-from django.utils.deprecation import MiddlewareMixin
-
-from account.models import UserProfile
-
-
-class TimezoneMiddleware(MiddlewareMixin):
- """
- Activate the timezone from request.user.userprofile if user is authenticated,
- deactivate the timezone otherwise and use default (UTC)
- """
- def process_request(self, request):
- if request.user.is_authenticated:
- try:
- tz = request.user.userprofile.timezone
- timezone.activate(tz)
- except UserProfile.DoesNotExist:
- UserProfile.objects.create(user=request.user)
- tz = request.user.userprofile.timezone
- timezone.activate(tz)
- else:
- timezone.deactivate()
diff --git a/tools/pharos-dashboard/src/account/migrations/0001_initial.py b/tools/pharos-dashboard/src/account/migrations/0001_initial.py
deleted file mode 100644
index 591f7024..00000000
--- a/tools/pharos-dashboard/src/account/migrations/0001_initial.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2016-11-03 13:33
-from __future__ import unicode_literals
-
-import account.models
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ]
-
- operations = [
- migrations.CreateModel(
- name='UserProfile',
- fields=[
- ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('timezone', models.CharField(default='UTC', max_length=100)),
- ('ssh_public_key', models.FileField(blank=True, null=True, upload_to=account.models.upload_to)),
- ('pgp_public_key', models.FileField(blank=True, null=True, upload_to=account.models.upload_to)),
- ('company', models.CharField(max_length=200)),
- ('oauth_token', models.CharField(max_length=1024)),
- ('oauth_secret', models.CharField(max_length=1024)),
- ('jira_url', models.CharField(default='', max_length=100)),
- ('full_name', models.CharField(default='', max_length=100)),
- ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
- ],
- options={
- 'db_table': 'user_profile',
- },
- ),
- ]
diff --git a/tools/pharos-dashboard/src/account/migrations/__init__.py b/tools/pharos-dashboard/src/account/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/account/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/account/models.py b/tools/pharos-dashboard/src/account/models.py
deleted file mode 100644
index c2e99028..00000000
--- a/tools/pharos-dashboard/src/account/models.py
+++ /dev/null
@@ -1,35 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib.auth.models import User
-from django.db import models
-
-
-def upload_to(object, filename):
- return object.user.username + '/' + filename
-
-class UserProfile(models.Model):
- user = models.OneToOneField(User, on_delete=models.CASCADE)
- timezone = models.CharField(max_length=100, blank=False, default='UTC')
- ssh_public_key = models.FileField(upload_to=upload_to, null=True, blank=True)
- pgp_public_key = models.FileField(upload_to=upload_to, null=True, blank=True)
- company = models.CharField(max_length=200, blank=False)
-
- oauth_token = models.CharField(max_length=1024, blank=False)
- oauth_secret = models.CharField(max_length=1024, blank=False)
-
- jira_url = models.CharField(max_length=100, default='')
- full_name = models.CharField(max_length=100, default='')
-
- class Meta:
- db_table = 'user_profile'
-
- def __str__(self):
- return self.user.username
diff --git a/tools/pharos-dashboard/src/account/rsa.pem b/tools/pharos-dashboard/src/account/rsa.pem
deleted file mode 100644
index dbd4eedd..00000000
--- a/tools/pharos-dashboard/src/account/rsa.pem
+++ /dev/null
@@ -1,17 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALRiMLAh9iimur8V
-A7qVvdqxevEuUkW4K+2KdMXmnQbG9Aa7k7eBjK1S+0LYmVjPKlJGNXHDGuy5Fw/d
-7rjVJ0BLB+ubPK8iA/Tw3hLQgXMRRGRXXCn8ikfuQfjUS1uZSatdLB81mydBETlJ
-hI6GH4twrbDJCR2Bwy/XWXgqgGRzAgMBAAECgYBYWVtleUzavkbrPjy0T5FMou8H
-X9u2AC2ry8vD/l7cqedtwMPp9k7TubgNFo+NGvKsl2ynyprOZR1xjQ7WgrgVB+mm
-uScOM/5HVceFuGRDhYTCObE+y1kxRloNYXnx3ei1zbeYLPCHdhxRYW7T0qcynNmw
-rn05/KO2RLjgQNalsQJBANeA3Q4Nugqy4QBUCEC09SqylT2K9FrrItqL2QKc9v0Z
-zO2uwllCbg0dwpVuYPYXYvikNHHg+aCWF+VXsb9rpPsCQQDWR9TT4ORdzoj+Nccn
-qkMsDmzt0EfNaAOwHOmVJ2RVBspPcxt5iN4HI7HNeG6U5YsFBb+/GZbgfBT3kpNG
-WPTpAkBI+gFhjfJvRw38n3g/+UeAkwMI2TJQS4n8+hid0uus3/zOjDySH3XHCUno
-cn1xOJAyZODBo47E+67R4jV1/gzbAkEAklJaspRPXP877NssM5nAZMU0/O/NGCZ+
-3jPgDUno6WbJn5cqm8MqWhW1xGkImgRk+fkDBquiq4gPiT898jusgQJAd5Zrr6Q8
-AO/0isr/3aa6O6NLQxISLKcPDk2NOccAfS/xOtfOz4sJYM3+Bs4Io9+dZGSDCA54
-Lw03eHTNQghS0A==
------END PRIVATE KEY-----
-
diff --git a/tools/pharos-dashboard/src/account/rsa.pub b/tools/pharos-dashboard/src/account/rsa.pub
deleted file mode 100644
index cc50e45e..00000000
--- a/tools/pharos-dashboard/src/account/rsa.pub
+++ /dev/null
@@ -1,6 +0,0 @@
------BEGIN PUBLIC KEY-----
-MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC0YjCwIfYoprq/FQO6lb3asXrx
-LlJFuCvtinTF5p0GxvQGu5O3gYytUvtC2JlYzypSRjVxwxrsuRcP3e641SdASwfr
-mzyvIgP08N4S0IFzEURkV1wp/IpH7kH41EtbmUmrXSwfNZsnQRE5SYSOhh+LcK2w
-yQkdgcMv11l4KoBkcwIDAQAB
------END PUBLIC KEY-----
diff --git a/tools/pharos-dashboard/src/account/tasks.py b/tools/pharos-dashboard/src/account/tasks.py
deleted file mode 100644
index bfb865dd..00000000
--- a/tools/pharos-dashboard/src/account/tasks.py
+++ /dev/null
@@ -1,34 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from celery import shared_task
-from django.contrib.auth.models import User
-from jira import JIRAError
-
-from account.jira_util import get_jira
-
-
-@shared_task
-def sync_jira_accounts():
- users = User.objects.all()
- for user in users:
- jira = get_jira(user)
- try:
- user_dict = jira.myself()
- except JIRAError:
- # User can be anonymous (local django admin account)
- continue
- user.email = user_dict['emailAddress']
- user.userprofile.url = user_dict['self']
- user.userprofile.full_name = user_dict['displayName']
- print(user_dict)
-
- user.userprofile.save()
- user.save() \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/account/tests/__init__.py b/tools/pharos-dashboard/src/account/tests/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/account/tests/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/account/tests/test_general.py b/tools/pharos-dashboard/src/account/tests/test_general.py
deleted file mode 100644
index e8f483b5..00000000
--- a/tools/pharos-dashboard/src/account/tests/test_general.py
+++ /dev/null
@@ -1,60 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib.auth.models import User
-from django.test import Client
-from django.test import TestCase
-from django.urls import reverse
-from django.utils import timezone
-
-from account.models import UserProfile
-
-
-class AccountMiddlewareTestCase(TestCase):
- def setUp(self):
- self.client = Client()
- self.user1 = User.objects.create(username='user1')
- self.user1.set_password('user1')
- self.user1profile = UserProfile.objects.create(user=self.user1)
- self.user1.save()
-
- def test_timezone_middleware(self):
- """
- The timezone should be UTC for anonymous users, for authenticated users it should be set
- to user.userprofile.timezone
- """
- #default
- self.assertEqual(timezone.get_current_timezone_name(), 'UTC')
-
- url = reverse('account:settings')
- # anonymous request
- self.client.get(url)
- self.assertEqual(timezone.get_current_timezone_name(), 'UTC')
-
- # authenticated user with UTC timezone (userprofile default)
- self.client.login(username='user1', password='user1')
- self.client.get(url)
- self.assertEqual(timezone.get_current_timezone_name(), 'UTC')
-
- # authenticated user with custom timezone (userprofile default)
- self.user1profile.timezone = 'Etc/Greenwich'
- self.user1profile.save()
- self.client.get(url)
- self.assertEqual(timezone.get_current_timezone_name(), 'Etc/Greenwich')
-
- # if there is no profile for a user, it should be created
- user2 = User.objects.create(username='user2')
- user2.set_password('user2')
- user2.save()
- self.client.login(username='user2', password='user2')
- self.client.get(url)
- self.assertTrue(user2.userprofile)
-
-
diff --git a/tools/pharos-dashboard/src/account/urls.py b/tools/pharos-dashboard/src/account/urls.py
deleted file mode 100644
index 3962a0c6..00000000
--- a/tools/pharos-dashboard/src/account/urls.py
+++ /dev/null
@@ -1,36 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""pharos_dashboard URL Configuration
-
-The `urlpatterns` list routes URLs to views. For more information please see:
- https://docs.djangoproject.com/en/1.10/topics/http/urls/
-Examples:
-Function views
- 1. Add an import: from my_app import views
- 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
-Class-based views
- 1. Add an import: from other_app.views import Home
- 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
-Including another URLconf
- 1. Import the include() function: from django.conf.urls import url, include
- 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
-"""
-from django.conf.urls import url
-
-from account.views import *
-
-urlpatterns = [
- url(r'^settings/', AccountSettingsView.as_view(), name='settings'),
- url(r'^authenticated/$', JiraAuthenticatedView.as_view(), name='authenticated'),
- url(r'^login/$', JiraLoginView.as_view(), name='login'),
- url(r'^logout/$', JiraLogoutView.as_view(), name='logout'),
- url(r'^users/$', UserListView.as_view(), name='users'),
-]
diff --git a/tools/pharos-dashboard/src/account/views.py b/tools/pharos-dashboard/src/account/views.py
deleted file mode 100644
index 17fbdc3a..00000000
--- a/tools/pharos-dashboard/src/account/views.py
+++ /dev/null
@@ -1,153 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import os
-import urllib
-
-import oauth2 as oauth
-from django.conf import settings
-from django.contrib import messages
-from django.contrib.auth import logout, authenticate, login
-from django.contrib.auth.decorators import login_required
-from django.contrib.auth.mixins import LoginRequiredMixin
-from django.contrib.auth.models import User
-from django.urls import reverse
-from django.utils.decorators import method_decorator
-from django.views.generic import RedirectView, TemplateView, UpdateView
-from jira import JIRA
-from rest_framework.authtoken.models import Token
-
-from account.forms import AccountSettingsForm
-from account.jira_util import SignatureMethod_RSA_SHA1
-from account.models import UserProfile
-
-
-@method_decorator(login_required, name='dispatch')
-class AccountSettingsView(UpdateView):
- model = UserProfile
- form_class = AccountSettingsForm
- template_name_suffix = '_update_form'
-
- def get_success_url(self):
- messages.add_message(self.request, messages.INFO,
- 'Settings saved')
- return '/'
-
- def get_object(self, queryset=None):
- return self.request.user.userprofile
-
- def get_context_data(self, **kwargs):
- token, created = Token.objects.get_or_create(user=self.request.user)
- context = super(AccountSettingsView, self).get_context_data(**kwargs)
- context.update({'title': "Settings", 'token': token})
- return context
-
-
-class JiraLoginView(RedirectView):
- def get_redirect_url(self, *args, **kwargs):
- consumer = oauth.Consumer(settings.OAUTH_CONSUMER_KEY, settings.OAUTH_CONSUMER_SECRET)
- client = oauth.Client(consumer)
- client.set_signature_method(SignatureMethod_RSA_SHA1())
-
- # Step 1. Get a request token from Jira.
- try:
- resp, content = client.request(settings.OAUTH_REQUEST_TOKEN_URL, "POST")
- except Exception as e:
- messages.add_message(self.request, messages.ERROR,
- 'Error: Connection to Jira failed. Please contact an Administrator')
- return '/'
- if resp['status'] != '200':
- messages.add_message(self.request, messages.ERROR,
- 'Error: Connection to Jira failed. Please contact an Administrator')
- return '/'
-
- # Step 2. Store the request token in a session for later use.
- self.request.session['request_token'] = dict(urllib.parse.parse_qsl(content.decode()))
- # Step 3. Redirect the user to the authentication URL.
- url = settings.OAUTH_AUTHORIZE_URL + '?oauth_token=' + \
- self.request.session['request_token']['oauth_token'] + \
- '&oauth_callback=' + settings.OAUTH_CALLBACK_URL
- return url
-
-
-class JiraLogoutView(LoginRequiredMixin, RedirectView):
- def get_redirect_url(self, *args, **kwargs):
- logout(self.request)
- return '/'
-
-
-class JiraAuthenticatedView(RedirectView):
- def get_redirect_url(self, *args, **kwargs):
- # Step 1. Use the request token in the session to build a new client.
- consumer = oauth.Consumer(settings.OAUTH_CONSUMER_KEY, settings.OAUTH_CONSUMER_SECRET)
- token = oauth.Token(self.request.session['request_token']['oauth_token'],
- self.request.session['request_token']['oauth_token_secret'])
- client = oauth.Client(consumer, token)
- client.set_signature_method(SignatureMethod_RSA_SHA1())
-
- # Step 2. Request the authorized access token from Jira.
- try:
- resp, content = client.request(settings.OAUTH_ACCESS_TOKEN_URL, "POST")
- except Exception as e:
- messages.add_message(self.request, messages.ERROR,
- 'Error: Connection to Jira failed. Please contact an Administrator')
- return '/'
- if resp['status'] != '200':
- messages.add_message(self.request, messages.ERROR,
- 'Error: Connection to Jira failed. Please contact an Administrator')
- return '/'
-
- access_token = dict(urllib.parse.parse_qsl(content.decode()))
-
- module_dir = os.path.dirname(__file__) # get current directory
- with open(module_dir + '/rsa.pem', 'r') as f:
- key_cert = f.read()
-
- oauth_dict = {
- 'access_token': access_token['oauth_token'],
- 'access_token_secret': access_token['oauth_token_secret'],
- 'consumer_key': settings.OAUTH_CONSUMER_KEY,
- 'key_cert': key_cert
- }
-
- jira = JIRA(server=settings.JIRA_URL, oauth=oauth_dict)
- username = jira.current_user()
- url = '/'
- # Step 3. Lookup the user or create them if they don't exist.
- try:
- user = User.objects.get(username=username)
- except User.DoesNotExist:
- # Save our permanent token and secret for later.
- user = User.objects.create_user(username=username,
- password=access_token['oauth_token_secret'])
- profile = UserProfile()
- profile.user = user
- profile.save()
- url = reverse('account:settings')
- user.userprofile.oauth_token = access_token['oauth_token']
- user.userprofile.oauth_secret = access_token['oauth_token_secret']
- user.userprofile.save()
- user.set_password(access_token['oauth_token_secret'])
- user.save()
- user = authenticate(username=username, password=access_token['oauth_token_secret'])
- login(self.request, user)
- # redirect user to settings page to complete profile
- return url
-
-
-@method_decorator(login_required, name='dispatch')
-class UserListView(TemplateView):
- template_name = "account/user_list.html"
-
- def get_context_data(self, **kwargs):
- users = User.objects.all()
- context = super(UserListView, self).get_context_data(**kwargs)
- context.update({'title': "Dashboard Users", 'users': users})
- return context
diff --git a/tools/pharos-dashboard/src/api/__init__.py b/tools/pharos-dashboard/src/api/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/api/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/api/migrations/__init__.py b/tools/pharos-dashboard/src/api/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/api/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/api/serializers.py b/tools/pharos-dashboard/src/api/serializers.py
deleted file mode 100644
index 237ca029..00000000
--- a/tools/pharos-dashboard/src/api/serializers.py
+++ /dev/null
@@ -1,39 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from rest_framework import serializers
-
-from booking.models import Booking
-from dashboard.models import Server, Resource, ResourceStatus
-
-class BookingSerializer(serializers.ModelSerializer):
- installer_name = serializers.CharField(source='installer.name')
- scenario_name = serializers.CharField(source='scenario.name')
-
- class Meta:
- model = Booking
- fields = ('id', 'resource_id', 'start', 'end', 'installer_name', 'scenario_name', 'purpose')
-
-
-class ServerSerializer(serializers.ModelSerializer):
- class Meta:
- model = Server
- fields = ('id', 'resource_id', 'name', 'model', 'cpu', 'ram', 'storage')
-
-
-class ResourceSerializer(serializers.ModelSerializer):
- class Meta:
- model = Resource
- fields = ('id', 'name', 'description', 'url', 'server_set')
-
-class ResourceStatusSerializer(serializers.ModelSerializer):
- class Meta:
- model = ResourceStatus
- fields = ('id', 'resource', 'timestamp','type', 'title', 'content')
diff --git a/tools/pharos-dashboard/src/api/urls.py b/tools/pharos-dashboard/src/api/urls.py
deleted file mode 100644
index a4a4b2ff..00000000
--- a/tools/pharos-dashboard/src/api/urls.py
+++ /dev/null
@@ -1,40 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""pharos_dashboard URL Configuration
-
-The `urlpatterns` list routes URLs to views. For more information please see:
- https://docs.djangoproject.com/en/1.10/topics/http/urls/
-Examples:
-Function views
- 1. Add an import: from my_app import views
- 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
-Class-based views
- 1. Add an import: from other_app.views import Home
- 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
-Including another URLconf
- 1. Import the include() function: from django.conf.urls import url, include
- 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
-"""
-from django.conf.urls import url, include
-from rest_framework import routers
-
-from api.views import *
-
-router = routers.DefaultRouter()
-router.register(r'resources', ResourceViewSet)
-router.register(r'servers', ServerViewSet)
-router.register(r'bookings', BookingViewSet)
-router.register(r'resource_status', ResourceStatusViewSet)
-
-urlpatterns = [
- url(r'^', include(router.urls)),
- url(r'^token$', GenerateTokenView.as_view(), name='generate_token'),
-] \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/api/views.py b/tools/pharos-dashboard/src/api/views.py
deleted file mode 100644
index 84fa1b52..00000000
--- a/tools/pharos-dashboard/src/api/views.py
+++ /dev/null
@@ -1,53 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib.auth.decorators import login_required
-from django.shortcuts import redirect
-from django.utils.decorators import method_decorator
-from django.views import View
-from rest_framework import viewsets
-from rest_framework.authtoken.models import Token
-
-from api.serializers import *
-from booking.models import Booking
-from dashboard.models import Resource, Server, ResourceStatus
-
-
-class BookingViewSet(viewsets.ModelViewSet):
- queryset = Booking.objects.all()
- serializer_class = BookingSerializer
- filter_fields = ('resource', 'id')
-
-
-class ServerViewSet(viewsets.ModelViewSet):
- queryset = Server.objects.all()
- serializer_class = ServerSerializer
- filter_fields = ('resource', 'name')
-
-
-class ResourceViewSet(viewsets.ModelViewSet):
- queryset = Resource.objects.all()
- serializer_class = ResourceSerializer
- filter_fields = ('name', 'id')
-
-class ResourceStatusViewSet(viewsets.ModelViewSet):
- queryset = ResourceStatus.objects.all()
- serializer_class = ResourceStatusSerializer
-
-
-@method_decorator(login_required, name='dispatch')
-class GenerateTokenView(View):
- def get(self, request, *args, **kwargs):
- user = self.request.user
- token, created = Token.objects.get_or_create(user=user)
- if not created:
- token.delete()
- Token.objects.create(user=user)
- return redirect('account:settings')
diff --git a/tools/pharos-dashboard/src/booking/__init__.py b/tools/pharos-dashboard/src/booking/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/booking/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/booking/admin.py b/tools/pharos-dashboard/src/booking/admin.py
deleted file mode 100644
index d883be1c..00000000
--- a/tools/pharos-dashboard/src/booking/admin.py
+++ /dev/null
@@ -1,17 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib import admin
-
-from booking.models import *
-
-admin.site.register(Booking)
-admin.site.register(Installer)
-admin.site.register(Scenario) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/booking/apps.py b/tools/pharos-dashboard/src/booking/apps.py
deleted file mode 100644
index 99bf115f..00000000
--- a/tools/pharos-dashboard/src/booking/apps.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.apps import AppConfig
-
-
-class BookingConfig(AppConfig):
- name = 'booking'
diff --git a/tools/pharos-dashboard/src/booking/forms.py b/tools/pharos-dashboard/src/booking/forms.py
deleted file mode 100644
index 2dbfacb0..00000000
--- a/tools/pharos-dashboard/src/booking/forms.py
+++ /dev/null
@@ -1,23 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import django.forms as forms
-
-from booking.models import Installer, Scenario
-
-
-class BookingForm(forms.Form):
- fields = ['start', 'end', 'purpose', 'installer', 'scenario']
-
- start = forms.DateTimeField()
- end = forms.DateTimeField()
- purpose = forms.CharField(max_length=300)
- installer = forms.ModelChoiceField(queryset=Installer.objects.all(), required=False)
- scenario = forms.ModelChoiceField(queryset=Scenario.objects.all(), required=False) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/booking/migrations/0001_initial.py b/tools/pharos-dashboard/src/booking/migrations/0001_initial.py
deleted file mode 100644
index 6932daea..00000000
--- a/tools/pharos-dashboard/src/booking/migrations/0001_initial.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2016-11-03 13:33
-from __future__ import unicode_literals
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('dashboard', '0001_initial'),
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Booking',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('start', models.DateTimeField()),
- ('end', models.DateTimeField()),
- ('jira_issue_id', models.IntegerField(null=True)),
- ('jira_issue_status', models.CharField(max_length=50)),
- ('purpose', models.CharField(max_length=300)),
- ],
- options={
- 'db_table': 'booking',
- },
- ),
- migrations.CreateModel(
- name='Installer',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(max_length=30)),
- ],
- ),
- migrations.CreateModel(
- name='Scenario',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(max_length=300)),
- ],
- ),
- migrations.AddField(
- model_name='booking',
- name='installer',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='booking.Installer'),
- ),
- migrations.AddField(
- model_name='booking',
- name='resource',
- field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dashboard.Resource'),
- ),
- migrations.AddField(
- model_name='booking',
- name='scenario',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='booking.Scenario'),
- ),
- migrations.AddField(
- model_name='booking',
- name='user',
- field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
- ),
- ]
diff --git a/tools/pharos-dashboard/src/booking/migrations/__init__.py b/tools/pharos-dashboard/src/booking/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/booking/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/booking/models.py b/tools/pharos-dashboard/src/booking/models.py
deleted file mode 100644
index 0b3fa3b1..00000000
--- a/tools/pharos-dashboard/src/booking/models.py
+++ /dev/null
@@ -1,77 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.conf import settings
-from django.contrib.auth.models import User
-from django.db import models
-from jira import JIRA
-from jira import JIRAError
-
-from dashboard.models import Resource
-
-
-class Installer(models.Model):
- id = models.AutoField(primary_key=True)
- name = models.CharField(max_length=30)
-
- def __str__(self):
- return self.name
-
-class Scenario(models.Model):
- id = models.AutoField(primary_key=True)
- name = models.CharField(max_length=300)
-
- def __str__(self):
- return self.name
-
-
-class Booking(models.Model):
- id = models.AutoField(primary_key=True)
- user = models.ForeignKey(User, models.CASCADE) # delete if user is deleted
- resource = models.ForeignKey(Resource, models.PROTECT)
- start = models.DateTimeField()
- end = models.DateTimeField()
- jira_issue_id = models.IntegerField(null=True)
- jira_issue_status = models.CharField(max_length=50)
-
- installer = models.ForeignKey(Installer, models.DO_NOTHING, null=True)
- scenario = models.ForeignKey(Scenario, models.DO_NOTHING, null=True)
- purpose = models.CharField(max_length=300, blank=False)
-
- class Meta:
- db_table = 'booking'
-
- def get_jira_issue(self):
- try:
- jira = JIRA(server=settings.JIRA_URL,
- basic_auth=(settings.JIRA_USER_NAME, settings.JIRA_USER_PASSWORD))
- issue = jira.issue(self.jira_issue_id)
- return issue
- except JIRAError:
- return None
-
- def save(self, *args, **kwargs):
- """
- Save the booking if self.user is authorized and there is no overlapping booking.
- Raise PermissionError if the user is not authorized
- Raise ValueError if there is an overlapping booking
- """
- if self.start >= self.end:
- raise ValueError('Start date is after end date')
- # conflicts end after booking starts, and start before booking ends
- conflicting_dates = Booking.objects.filter(resource=self.resource).exclude(id=self.id)
- conflicting_dates = conflicting_dates.filter(end__gt=self.start)
- conflicting_dates = conflicting_dates.filter(start__lt=self.end)
- if conflicting_dates.count() > 0:
- raise ValueError('This booking overlaps with another booking')
- return super(Booking, self).save(*args, **kwargs)
-
- def __str__(self):
- return str(self.resource) + ' from ' + str(self.start) + ' until ' + str(self.end)
diff --git a/tools/pharos-dashboard/src/booking/tests/__init__.py b/tools/pharos-dashboard/src/booking/tests/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/booking/tests/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/booking/tests/test_models.py b/tools/pharos-dashboard/src/booking/tests/test_models.py
deleted file mode 100644
index b4cd1133..00000000
--- a/tools/pharos-dashboard/src/booking/tests/test_models.py
+++ /dev/null
@@ -1,94 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-
-from django.contrib.auth.models import Permission
-from django.test import TestCase
-from django.utils import timezone
-
-from booking.models import *
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-
-
-class BookingModelTestCase(TestCase):
- def setUp(self):
- self.slave = JenkinsSlave.objects.create(name='test', url='test')
- self.owner = User.objects.create(username='owner')
-
- self.res1 = Resource.objects.create(name='res1', slave=self.slave, description='x',
- url='x',owner=self.owner)
- self.res2 = Resource.objects.create(name='res2', slave=self.slave, description='x',
- url='x',owner=self.owner)
-
- self.user1 = User.objects.create(username='user1')
-
- self.add_booking_perm = Permission.objects.get(codename='add_booking')
- self.user1.user_permissions.add(self.add_booking_perm)
-
- self.user1 = User.objects.get(pk=self.user1.id)
-
- self.installer = Installer.objects.create(name='TestInstaller')
- self.scenario = Scenario.objects.create(name='TestScenario')
-
- def test_start_end(self):
- """
- if the start of a booking is greater or equal then the end, saving should raise a
- ValueException
- """
- start = timezone.now()
- end = start - timedelta(weeks=1)
- self.assertRaises(ValueError, Booking.objects.create, start=start, end=end,
- resource=self.res1, user=self.user1)
- end = start
- self.assertRaises(ValueError, Booking.objects.create, start=start, end=end,
- resource=self.res1, user=self.user1)
-
- def test_conflicts(self):
- """
- saving an overlapping booking on the same resource should raise a ValueException
- saving for different resources should succeed
- """
- start = timezone.now()
- end = start + timedelta(weeks=1)
- self.assertTrue(
- Booking.objects.create(start=start, end=end, user=self.user1, resource=self.res1))
-
- self.assertRaises(ValueError, Booking.objects.create, start=start,
- end=end, resource=self.res1, user=self.user1)
- self.assertRaises(ValueError, Booking.objects.create, start=start + timedelta(days=1),
- end=end - timedelta(days=1), resource=self.res1, user=self.user1)
-
- self.assertRaises(ValueError, Booking.objects.create, start=start - timedelta(days=1),
- end=end, resource=self.res1, user=self.user1)
- self.assertRaises(ValueError, Booking.objects.create, start=start - timedelta(days=1),
- end=end - timedelta(days=1), resource=self.res1, user=self.user1)
-
- self.assertRaises(ValueError, Booking.objects.create, start=start,
- end=end + timedelta(days=1), resource=self.res1, user=self.user1)
- self.assertRaises(ValueError, Booking.objects.create, start=start + timedelta(days=1),
- end=end + timedelta(days=1), resource=self.res1, user=self.user1)
-
- self.assertTrue(Booking.objects.create(start=start - timedelta(days=1), end=start,
- user=self.user1, resource=self.res1))
- self.assertTrue(Booking.objects.create(start=end, end=end + timedelta(days=1),
- user=self.user1, resource=self.res1))
-
- self.assertTrue(
- Booking.objects.create(start=start - timedelta(days=2), end=start - timedelta(days=1),
- user=self.user1, resource=self.res1))
- self.assertTrue(
- Booking.objects.create(start=end + timedelta(days=1), end=end + timedelta(days=2),
- user=self.user1, resource=self.res1))
- self.assertTrue(
- Booking.objects.create(start=start, end=end,
- user=self.user1, resource=self.res2, scenario=self.scenario,
- installer=self.installer)) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/booking/tests/test_views.py b/tools/pharos-dashboard/src/booking/tests/test_views.py
deleted file mode 100644
index c1da013c..00000000
--- a/tools/pharos-dashboard/src/booking/tests/test_views.py
+++ /dev/null
@@ -1,106 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-
-from django.test import Client
-from django.test import TestCase
-from django.urls import reverse
-from django.utils import timezone
-from django.utils.encoding import force_text
-from registration.forms import User
-
-from account.models import UserProfile
-from booking.models import Booking
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-
-
-class BookingViewTestCase(TestCase):
- def setUp(self):
- self.client = Client()
- self.slave = JenkinsSlave.objects.create(name='test', url='test')
- self.owner = User.objects.create(username='owner')
- self.res1 = Resource.objects.create(name='res1', slave=self.slave, description='x',
- url='x',owner=self.owner)
- self.user1 = User.objects.create(username='user1')
- self.user1.set_password('user1')
- self.user1profile = UserProfile.objects.create(user=self.user1)
- self.user1.save()
-
- self.user1 = User.objects.get(pk=self.user1.id)
-
-
- def test_resource_bookings_json(self):
- url = reverse('booking:bookings_json', kwargs={'resource_id': 0})
- self.assertEqual(self.client.get(url).status_code, 404)
-
- url = reverse('booking:bookings_json', kwargs={'resource_id': self.res1.id})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertJSONEqual(force_text(response.content), {"bookings": []})
- booking1 = Booking.objects.create(start=timezone.now(),
- end=timezone.now() + timedelta(weeks=1), user=self.user1,
- resource=self.res1)
- response = self.client.get(url)
- json = response.json()
- self.assertEqual(response.status_code, 200)
- self.assertIn('bookings', json)
- self.assertEqual(len(json['bookings']), 1)
- self.assertIn('start', json['bookings'][0])
- self.assertIn('end', json['bookings'][0])
- self.assertIn('id', json['bookings'][0])
- self.assertIn('purpose', json['bookings'][0])
-
- def test_booking_form_view(self):
- url = reverse('booking:create', kwargs={'resource_id': 0})
- self.assertEqual(self.client.get(url).status_code, 404)
-
- # authenticated user
- url = reverse('booking:create', kwargs={'resource_id': self.res1.id})
- self.client.login(username='user1',password='user1')
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertTemplateUsed('booking/booking_calendar.html')
- self.assertTemplateUsed('booking/booking_form.html')
- self.assertIn('resource', response.context)
-
-
- def test_booking_view(self):
- start = timezone.now()
- end = start + timedelta(weeks=1)
- booking = Booking.objects.create(start=start, end=end, user=self.user1, resource=self.res1)
-
- url = reverse('booking:detail', kwargs={'booking_id':0})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 404)
-
- url = reverse('booking:detail', kwargs={'booking_id':booking.id})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertTemplateUsed('booking/booking_detail.html')
- self.assertIn('booking', response.context)
-
- def test_booking_list_view(self):
- start = timezone.now() - timedelta(weeks=2)
- end = start + timedelta(weeks=1)
- Booking.objects.create(start=start, end=end, user=self.user1, resource=self.res1)
-
- url = reverse('booking:list')
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertTemplateUsed('booking/booking_list.html')
- self.assertTrue(len(response.context['bookings']) == 0)
-
- start = timezone.now()
- end = start + timedelta(weeks=1)
- Booking.objects.create(start=start, end=end, user=self.user1, resource=self.res1)
- response = self.client.get(url)
- self.assertTrue(len(response.context['bookings']) == 1) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/booking/urls.py b/tools/pharos-dashboard/src/booking/urls.py
deleted file mode 100644
index 9e013164..00000000
--- a/tools/pharos-dashboard/src/booking/urls.py
+++ /dev/null
@@ -1,39 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""pharos_dashboard URL Configuration
-
-The `urlpatterns` list routes URLs to views. For more information please see:
- https://docs.djangoproject.com/en/1.10/topics/http/urls/
-Examples:
-Function views
- 1. Add an import: from my_app import views
- 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
-Class-based views
- 1. Add an import: from other_app.views import Home
- 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
-Including another URLconf
- 1. Import the include() function: from django.conf.urls import url, include
- 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
-"""
-from django.conf.urls import url
-
-from booking.views import *
-
-urlpatterns = [
- url(r'^(?P<resource_id>[0-9]+)/$', BookingFormView.as_view(), name='create'),
- url(r'^(?P<resource_id>[0-9]+)/bookings_json/$', ResourceBookingsJSON.as_view(),
- name='bookings_json'),
-
- url(r'^detail/$', BookingView.as_view(), name='detail_prefix'),
- url(r'^detail/(?P<booking_id>[0-9]+)/$', BookingView.as_view(), name='detail'),
-
- url(r'^list/$', BookingListView.as_view(), name='list')
-]
diff --git a/tools/pharos-dashboard/src/booking/views.py b/tools/pharos-dashboard/src/booking/views.py
deleted file mode 100644
index 6fdca0e0..00000000
--- a/tools/pharos-dashboard/src/booking/views.py
+++ /dev/null
@@ -1,122 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.conf import settings
-from django.contrib import messages
-from django.contrib.auth.mixins import LoginRequiredMixin
-from django.http import JsonResponse
-from django.shortcuts import get_object_or_404
-from django.urls import reverse
-from django.utils import timezone
-from django.views import View
-from django.views.generic import FormView
-from django.views.generic import TemplateView
-from jira import JIRAError
-
-from account.jira_util import get_jira
-from booking.forms import BookingForm
-from booking.models import Booking
-from dashboard.models import Resource
-
-
-def create_jira_ticket(user, booking):
- jira = get_jira(user)
- issue_dict = {
- 'project': 'PHAROS',
- 'summary': str(booking.resource) + ': Access Request',
- 'description': booking.purpose,
- 'issuetype': {'name': 'Task'},
- 'components': [{'name': 'POD Access Request'}],
- 'assignee': {'name': booking.resource.owner.username}
- }
- issue = jira.create_issue(fields=issue_dict)
- jira.add_attachment(issue, user.userprofile.pgp_public_key)
- jira.add_attachment(issue, user.userprofile.ssh_public_key)
- booking.jira_issue_id = issue.id
- booking.save()
-
-
-class BookingFormView(FormView):
- template_name = "booking/booking_calendar.html"
- form_class = BookingForm
-
- def dispatch(self, request, *args, **kwargs):
- self.resource = get_object_or_404(Resource, id=self.kwargs['resource_id'])
- return super(BookingFormView, self).dispatch(request, *args, **kwargs)
-
- def get_context_data(self, **kwargs):
- title = 'Booking: ' + self.resource.name
- context = super(BookingFormView, self).get_context_data(**kwargs)
- context.update({'title': title, 'resource': self.resource})
- return context
-
- def get_success_url(self):
- return reverse('booking:create', kwargs=self.kwargs)
-
- def form_valid(self, form):
- if not self.request.user.is_authenticated:
- messages.add_message(self.request, messages.ERROR,
- 'You need to be logged in to book a Pod.')
- return super(BookingFormView, self).form_invalid(form)
-
- user = self.request.user
- booking = Booking(start=form.cleaned_data['start'],
- end=form.cleaned_data['end'],
- purpose=form.cleaned_data['purpose'],
- installer=form.cleaned_data['installer'],
- scenario=form.cleaned_data['scenario'],
- resource=self.resource, user=user)
- try:
- booking.save()
- except ValueError as err:
- messages.add_message(self.request, messages.ERROR, err)
- return super(BookingFormView, self).form_invalid(form)
- try:
- if settings.CREATE_JIRA_TICKET:
- create_jira_ticket(user, booking)
- except JIRAError:
- messages.add_message(self.request, messages.ERROR, 'Failed to create Jira Ticket. '
- 'Please check your Jira '
- 'permissions.')
- booking.delete()
- return super(BookingFormView, self).form_invalid(form)
- messages.add_message(self.request, messages.SUCCESS, 'Booking saved')
- return super(BookingFormView, self).form_valid(form)
-
-
-class BookingView(TemplateView):
- template_name = "booking/booking_detail.html"
-
- def get_context_data(self, **kwargs):
- booking = get_object_or_404(Booking, id=self.kwargs['booking_id'])
- title = 'Booking Details'
- context = super(BookingView, self).get_context_data(**kwargs)
- context.update({'title': title, 'booking': booking})
- return context
-
-
-class BookingListView(TemplateView):
- template_name = "booking/booking_list.html"
-
- def get_context_data(self, **kwargs):
- bookings = Booking.objects.filter(end__gte=timezone.now())
- title = 'Search Booking'
- context = super(BookingListView, self).get_context_data(**kwargs)
- context.update({'title': title, 'bookings': bookings})
- return context
-
-
-class ResourceBookingsJSON(View):
- def get(self, request, *args, **kwargs):
- resource = get_object_or_404(Resource, id=self.kwargs['resource_id'])
- bookings = resource.booking_set.get_queryset().values('id', 'start', 'end', 'purpose',
- 'jira_issue_status',
- 'installer__name', 'scenario__name')
- return JsonResponse({'bookings': list(bookings)})
diff --git a/tools/pharos-dashboard/src/dashboard/__init__.py b/tools/pharos-dashboard/src/dashboard/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/dashboard/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/dashboard/admin.py b/tools/pharos-dashboard/src/dashboard/admin.py
deleted file mode 100644
index 0bfdef8f..00000000
--- a/tools/pharos-dashboard/src/dashboard/admin.py
+++ /dev/null
@@ -1,20 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.contrib import admin
-
-from dashboard.models import *
-
-admin.site.site_header = "Pharos Dashboard Administration"
-admin.site.site_title = "Pharos Dashboard"
-
-admin.site.register(Resource)
-admin.site.register(Server)
-admin.site.register(ResourceStatus)
diff --git a/tools/pharos-dashboard/src/dashboard/apps.py b/tools/pharos-dashboard/src/dashboard/apps.py
deleted file mode 100644
index e0c4f442..00000000
--- a/tools/pharos-dashboard/src/dashboard/apps.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.apps import AppConfig
-
-
-class DashboardConfig(AppConfig):
- name = 'dashboard'
diff --git a/tools/pharos-dashboard/src/dashboard/fixtures/dashboard.json b/tools/pharos-dashboard/src/dashboard/fixtures/dashboard.json
deleted file mode 100644
index f0ac3b2f..00000000
--- a/tools/pharos-dashboard/src/dashboard/fixtures/dashboard.json
+++ /dev/null
@@ -1,164 +0,0 @@
-[
-{
- "model": "dashboard.resource",
- "pk": 1,
- "fields": {
- "name": "Linux Foundation POD 1",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Lf+Lab"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 2,
- "fields": {
- "name": "Linux Foundation POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Lf+Lab"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 3,
- "fields": {
- "name": "Ericsson POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Ericsson+Hosting+and+Request+Process"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 4,
- "fields": {
- "name": "Intel POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod2"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 5,
- "fields": {
- "name": "Intel POD 5",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod5"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 6,
- "fields": {
- "name": "Intel POD 6",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod6"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 7,
- "fields": {
- "name": "Intel POD 8",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod8"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 8,
- "fields": {
- "name": "Huawei POD 1",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Huawei+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 9,
- "fields": {
- "name": "Intel POD 3",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod3"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 10,
- "fields": {
- "name": "Dell POD 1",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Dell+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 11,
- "fields": {
- "name": "Dell POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Dell+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 12,
- "fields": {
- "name": "Orange POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Opnfv-orange-pod2"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 13,
- "fields": {
- "name": "Arm POD 1",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Enea-pharos-lab"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 14,
- "fields": {
- "name": "Ericsson POD 1",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Ericsson+Hosting+and+Request+Process"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 15,
- "fields": {
- "name": "Huawei POD 2",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Huawei+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 16,
- "fields": {
- "name": "Huawei POD 3",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Huawei+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 17,
- "fields": {
- "name": "Huawei POD 4",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Huawei+Hosting"
- }
-},
-{
- "model": "dashboard.resource",
- "pk": 18,
- "fields": {
- "name": "Intel POD 9",
- "description": "Some description",
- "url": "https://wiki.opnfv.org/display/pharos/Intel+Pod9"
- }
-}
-]
diff --git a/tools/pharos-dashboard/src/dashboard/migrations/0001_initial.py b/tools/pharos-dashboard/src/dashboard/migrations/0001_initial.py
deleted file mode 100644
index aaf3945f..00000000
--- a/tools/pharos-dashboard/src/dashboard/migrations/0001_initial.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2016-11-03 13:33
-from __future__ import unicode_literals
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ('jenkins', '0001_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Resource',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(max_length=100, unique=True)),
- ('description', models.CharField(blank=True, max_length=300, null=True)),
- ('url', models.CharField(blank=True, max_length=100, null=True)),
- ('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_lab_owner', to=settings.AUTH_USER_MODEL)),
- ('slave', models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='jenkins.JenkinsSlave')),
- ('vpn_users', models.ManyToManyField(blank=True, related_name='user_vpn_users', to=settings.AUTH_USER_MODEL)),
- ],
- options={
- 'db_table': 'resource',
- },
- ),
- migrations.CreateModel(
- name='ResourceStatus',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('timestamp', models.DateTimeField(auto_now_add=True)),
- ('type', models.CharField(max_length=20)),
- ('title', models.CharField(max_length=50)),
- ('content', models.CharField(max_length=5000)),
- ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Resource')),
- ],
- options={
- 'db_table': 'resource_status',
- },
- ),
- migrations.CreateModel(
- name='Server',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(blank=True, max_length=100)),
- ('model', models.CharField(blank=True, max_length=100)),
- ('cpu', models.CharField(blank=True, max_length=100)),
- ('ram', models.CharField(blank=True, max_length=100)),
- ('storage', models.CharField(blank=True, max_length=100)),
- ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Resource')),
- ],
- options={
- 'db_table': 'server',
- },
- ),
- ]
diff --git a/tools/pharos-dashboard/src/dashboard/migrations/0002_auto_20170505_0815.py b/tools/pharos-dashboard/src/dashboard/migrations/0002_auto_20170505_0815.py
deleted file mode 100644
index 4285b887..00000000
--- a/tools/pharos-dashboard/src/dashboard/migrations/0002_auto_20170505_0815.py
+++ /dev/null
@@ -1,42 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2017-05-05 08:15
-from __future__ import unicode_literals
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0001_initial'),
- ]
-
- operations = [
- migrations.AddField(
- model_name='resource',
- name='dev_pod',
- field=models.BooleanField(default=False),
- ),
- migrations.AlterField(
- model_name='resource',
- name='owner',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_lab_owner', to=settings.AUTH_USER_MODEL),
- ),
- migrations.AlterField(
- model_name='resource',
- name='slave',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='jenkins.JenkinsSlave'),
- ),
- ]
diff --git a/tools/pharos-dashboard/src/dashboard/migrations/__init__.py b/tools/pharos-dashboard/src/dashboard/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/dashboard/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/dashboard/models.py b/tools/pharos-dashboard/src/dashboard/models.py
deleted file mode 100644
index 3de7db3d..00000000
--- a/tools/pharos-dashboard/src/dashboard/models.py
+++ /dev/null
@@ -1,95 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-
-from django.contrib.auth.models import User
-from django.db import models
-from django.utils import timezone
-
-from jenkins.models import JenkinsSlave
-
-
-class Resource(models.Model):
- id = models.AutoField(primary_key=True)
- name = models.CharField(max_length=100, unique=True)
- description = models.CharField(max_length=300, blank=True, null=True)
- url = models.CharField(max_length=100, blank=True, null=True)
- owner = models.ForeignKey(User, related_name='user_lab_owner', null=True, blank=True)
- vpn_users = models.ManyToManyField(User, related_name='user_vpn_users', blank=True)
- slave = models.ForeignKey(JenkinsSlave, on_delete=models.DO_NOTHING, null=True, blank=True)
- dev_pod = models.BooleanField(default=False)
-
- def get_booking_utilization(self, weeks):
- """
- Return a dictionary containing the count of booked and free seconds for a resource in the
- range [now,now + weeks] if weeks is positive,
- or [now-weeks, now] if weeks is negative
- """
-
- length = timedelta(weeks=abs(weeks))
- now = timezone.now()
-
- start = now
- end = now + length
- if weeks < 0:
- start = now - length
- end = now
-
- bookings = self.booking_set.filter(start__lt=start + length, end__gt=start)
-
- booked_seconds = 0
- for booking in bookings:
- booking_start = booking.start
- booking_end = booking.end
- if booking_start < start:
- booking_start = start
- if booking_end > end:
- booking_end = start + length
- total = booking_end - booking_start
- booked_seconds += total.total_seconds()
-
- return {'booked_seconds': booked_seconds,
- 'available_seconds': length.total_seconds() - booked_seconds}
-
- class Meta:
- db_table = 'resource'
-
- def __str__(self):
- return self.name
-
-class Server(models.Model):
- id = models.AutoField(primary_key=True)
- resource = models.ForeignKey(Resource, on_delete=models.CASCADE)
- name = models.CharField(max_length=100, blank=True)
- model = models.CharField(max_length=100, blank=True)
- cpu = models.CharField(max_length=100, blank=True)
- ram = models.CharField(max_length=100, blank=True)
- storage = models.CharField(max_length=100, blank=True)
-
- class Meta:
- db_table = 'server'
-
- def __str__(self):
- return self.name
-
-class ResourceStatus(models.Model):
- id = models.AutoField(primary_key=True)
- resource = models.ForeignKey(Resource, on_delete=models.CASCADE)
- timestamp = models.DateTimeField(auto_now_add=True)
- type = models.CharField(max_length=20)
- title = models.CharField(max_length=50)
- content = models.CharField(max_length=5000)
-
- class Meta:
- db_table = 'resource_status'
-
- def __str__(self):
- return self.resource.name + ': ' + self.title + ' ' + str(self.timestamp)
diff --git a/tools/pharos-dashboard/src/dashboard/tasks.py b/tools/pharos-dashboard/src/dashboard/tasks.py
deleted file mode 100644
index c5ef5054..00000000
--- a/tools/pharos-dashboard/src/dashboard/tasks.py
+++ /dev/null
@@ -1,24 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-
-from celery import shared_task
-from django.utils import timezone
-
-from jenkins.models import JenkinsStatistic
-from notification.models import BookingNotification
-
-
-@shared_task
-def database_cleanup():
- now = timezone.now()
- JenkinsStatistic.objects.filter(timestamp__lt=now - timedelta(weeks=4)).delete()
- BookingNotification.objects.filter(submit_time__lt=now - timedelta(weeks=4)).delete() \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/dashboard/templatetags/__init__.py b/tools/pharos-dashboard/src/dashboard/templatetags/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/dashboard/templatetags/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/dashboard/templatetags/jenkins_filters.py b/tools/pharos-dashboard/src/dashboard/templatetags/jenkins_filters.py
deleted file mode 100644
index e7e14257..00000000
--- a/tools/pharos-dashboard/src/dashboard/templatetags/jenkins_filters.py
+++ /dev/null
@@ -1,38 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.template.defaultfilters import register
-
-
-@register.filter
-def jenkins_job_color(job_result):
- if job_result == 'SUCCESS':
- return '#5cb85c'
- if job_result == 'FAILURE':
- return '#d9534f'
- if job_result == 'UNSTABLE':
- return '#EDD62B'
- return '#646F73' # job is still building
-
-
-@register.filter
-def jenkins_status_color(slave_status):
- if slave_status == 'offline':
- return '#d9534f'
- if slave_status == 'online':
- return '#5cb85c'
- if slave_status == 'online / idle':
- return '#5bc0de'
-
-
-@register.filter
-def jenkins_job_blink(job_result):
- if job_result == '': # job is still building
- return 'class=blink_me'
diff --git a/tools/pharos-dashboard/src/dashboard/templatetags/jira_filters.py b/tools/pharos-dashboard/src/dashboard/templatetags/jira_filters.py
deleted file mode 100644
index 9a97c1d5..00000000
--- a/tools/pharos-dashboard/src/dashboard/templatetags/jira_filters.py
+++ /dev/null
@@ -1,17 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.conf import settings
-from django.template.defaultfilters import register
-
-
-@register.filter
-def jira_issue_url(issue):
- return settings.JIRA_URL + '/browse/' + str(issue)
diff --git a/tools/pharos-dashboard/src/dashboard/tests/__init__.py b/tools/pharos-dashboard/src/dashboard/tests/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/dashboard/tests/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/dashboard/tests/test_models.py b/tools/pharos-dashboard/src/dashboard/tests/test_models.py
deleted file mode 100644
index 3a3aeab1..00000000
--- a/tools/pharos-dashboard/src/dashboard/tests/test_models.py
+++ /dev/null
@@ -1,69 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-from math import ceil, floor
-
-from django.test import TestCase
-from django.utils import timezone
-
-from booking.models import *
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-
-
-class ResourceModelTestCase(TestCase):
- def setUp(self):
- self.slave = JenkinsSlave.objects.create(name='test', url='test')
- self.owner = User.objects.create(username='owner')
-
- self.res1 = Resource.objects.create(name='res1', slave=self.slave, description='x',
- url='x', owner=self.owner)
-
- def test_booking_utilization(self):
- utilization = self.res1.get_booking_utilization(1)
- self.assertTrue(utilization['booked_seconds'] == 0)
- self.assertTrue(utilization['available_seconds'] == timedelta(weeks=1).total_seconds())
-
- start = timezone.now() + timedelta(days=1)
- end = start + timedelta(days=1)
- booking = Booking.objects.create(start=start, end=end, purpose='test', resource=self.res1,
- user=self.owner)
-
- utilization = self.res1.get_booking_utilization(1)
- booked_seconds = timedelta(days=1).total_seconds()
- self.assertEqual(utilization['booked_seconds'], booked_seconds)
-
- utilization = self.res1.get_booking_utilization(-1)
- self.assertEqual(utilization['booked_seconds'], 0)
-
- booking.delete()
- start = timezone.now() - timedelta(days=1)
- end = start + timedelta(days=2)
- booking = Booking.objects.create(start=start, end=end, purpose='test', resource=self.res1,
- user=self.owner)
- booked_seconds = self.res1.get_booking_utilization(1)['booked_seconds']
- # use ceil because a fraction of the booked time has already passed now
- booked_seconds = ceil(booked_seconds)
- self.assertEqual(booked_seconds, timedelta(days=1).total_seconds())
-
- booking.delete()
- start = timezone.now() + timedelta(days=6)
- end = start + timedelta(days=2)
- booking = Booking.objects.create(start=start, end=end, purpose='test', resource=self.res1,
- user=self.owner)
- booked_seconds = self.res1.get_booking_utilization(1)['booked_seconds']
- booked_seconds = floor(booked_seconds)
- self.assertEqual(booked_seconds, timedelta(days=1).total_seconds())
-
-
-
-
-
diff --git a/tools/pharos-dashboard/src/dashboard/tests/test_views.py b/tools/pharos-dashboard/src/dashboard/tests/test_views.py
deleted file mode 100644
index f5e17c2a..00000000
--- a/tools/pharos-dashboard/src/dashboard/tests/test_views.py
+++ /dev/null
@@ -1,75 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.test import TestCase
-from django.urls import reverse
-
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-
-
-class DashboardViewTestCase(TestCase):
- def setUp(self):
- self.slave_active = JenkinsSlave.objects.create(name='slave_active', url='x', active=True)
- self.slave_inactive = JenkinsSlave.objects.create(name='slave_inactive', url='x',
- active=False)
- self.res_active = Resource.objects.create(name='res_active', slave=self.slave_active,
- description='x', url='x')
- self.res_inactive = Resource.objects.create(name='res_inactive', slave=self.slave_inactive,
- description='x', url='x')
-
- def test_booking_utilization_json(self):
- url = reverse('dashboard:booking_utilization', kwargs={'resource_id': 0, 'weeks': 0})
- self.assertEqual(self.client.get(url).status_code, 404)
-
- url = reverse('dashboard:booking_utilization', kwargs={'resource_id': self.res_active.id,
- 'weeks': 0})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertContains(response, 'data')
-
- def test_jenkins_utilization_json(self):
- url = reverse('dashboard:jenkins_utilization', kwargs={'resource_id': 0, 'weeks': 0})
- self.assertEqual(self.client.get(url).status_code, 404)
-
- url = reverse('dashboard:jenkins_utilization', kwargs={'resource_id': self.res_active.id,
- 'weeks': 0})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertContains(response, 'data')
-
- def test_jenkins_slaves_view(self):
- url = reverse('dashboard:jenkins_slaves')
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertIn(self.slave_active, response.context['slaves'])
- self.assertNotIn(self.slave_inactive, response.context['slaves'])
-
- def test_ci_pods_view(self):
- url = reverse('dashboard:ci_pods')
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.context['ci_pods']), 0)
-
- self.slave_active.ci_slave = True
- self.slave_inactive.ci_slave = True
- self.slave_active.save()
- self.slave_inactive.save()
-
- response = self.client.get(url)
- self.assertIn(self.res_active, response.context['ci_pods'])
- self.assertNotIn(self.res_inactive, response.context['ci_pods'])
-
- def test_dev_pods_view(self):
- url = reverse('dashboard:dev_pods')
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.context['dev_pods']), 0)
-
diff --git a/tools/pharos-dashboard/src/dashboard/urls.py b/tools/pharos-dashboard/src/dashboard/urls.py
deleted file mode 100644
index 609e5d6f..00000000
--- a/tools/pharos-dashboard/src/dashboard/urls.py
+++ /dev/null
@@ -1,41 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""pharos_dashboard URL Configuration
-
-The `urlpatterns` list routes URLs to views. For more information please see:
- https://docs.djangoproject.com/en/1.10/topics/http/urls/
-Examples:
-Function views
- 1. Add an import: from my_app import views
- 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
-Class-based views
- 1. Add an import: from other_app.views import Home
- 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
-Including another URLconf
- 1. Import the include() function: from django.conf.urls import url, include
- 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
-"""
-from django.conf.urls import url
-
-from dashboard.views import *
-
-urlpatterns = [
- url(r'^ci_pods/$', CIPodsView.as_view(), name='ci_pods'),
- url(r'^dev_pods/$', DevelopmentPodsView.as_view(), name='dev_pods'),
- url(r'^jenkins_slaves/$', JenkinsSlavesView.as_view(), name='jenkins_slaves'),
- url(r'^resource/all/$', LabOwnerView.as_view(), name='resources'),
- url(r'^resource/(?P<resource_id>[0-9]+)/$', ResourceView.as_view(), name='resource'),
- url(r'^resource/(?P<resource_id>[0-9]+)/booking_utilization/(?P<weeks>-?\d+)/$',
- BookingUtilizationJSON.as_view(), name='booking_utilization'),
- url(r'^resource/(?P<resource_id>[0-9]+)/jenkins_utilization/(?P<weeks>-?\d+)/$',
- JenkinsUtilizationJSON.as_view(), name='jenkins_utilization'),
- url(r'^$', DevelopmentPodsView.as_view(), name="index"),
-]
diff --git a/tools/pharos-dashboard/src/dashboard/views.py b/tools/pharos-dashboard/src/dashboard/views.py
deleted file mode 100644
index 62a9f830..00000000
--- a/tools/pharos-dashboard/src/dashboard/views.py
+++ /dev/null
@@ -1,141 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-
-from django.http import JsonResponse
-from django.shortcuts import get_object_or_404
-from django.utils import timezone
-from django.views import View
-from django.views.generic import TemplateView
-
-from booking.models import Booking
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-
-
-class JenkinsSlavesView(TemplateView):
- template_name = "dashboard/jenkins_slaves.html"
-
- def get_context_data(self, **kwargs):
- slaves = JenkinsSlave.objects.filter(active=True)
- context = super(JenkinsSlavesView, self).get_context_data(**kwargs)
- context.update({'title': "Jenkins Slaves", 'slaves': slaves})
- return context
-
-
-class CIPodsView(TemplateView):
- template_name = "dashboard/ci_pods.html"
-
- def get_context_data(self, **kwargs):
- ci_pods = Resource.objects.filter(slave__ci_slave=True, slave__active=True)
- context = super(CIPodsView, self).get_context_data(**kwargs)
- context.update({'title': "CI Pods", 'ci_pods': ci_pods})
- return context
-
-
-class DevelopmentPodsView(TemplateView):
- template_name = "dashboard/dev_pods.html"
-
- def get_context_data(self, **kwargs):
- resources = Resource.objects.filter(dev_pod=True)
-
- bookings = Booking.objects.filter(start__lte=timezone.now())
- bookings = bookings.filter(end__gt=timezone.now())
-
- dev_pods = []
- for resource in resources:
- booking_utilization = resource.get_booking_utilization(weeks=4)
- total = booking_utilization['booked_seconds'] + booking_utilization['available_seconds']
- try:
- utilization_percentage = "%d%%" % (float(booking_utilization['booked_seconds']) /
- total * 100)
- except (ValueError, ZeroDivisionError):
- return ""
-
- dev_pod = (resource, None, utilization_percentage)
- for booking in bookings:
- if booking.resource == resource:
- dev_pod = (resource, booking, utilization_percentage)
- dev_pods.append(dev_pod)
-
- context = super(DevelopmentPodsView, self).get_context_data(**kwargs)
- context.update({'title': "Development Pods", 'dev_pods': dev_pods})
- return context
-
-
-class ResourceView(TemplateView):
- template_name = "dashboard/resource.html"
-
- def get_context_data(self, **kwargs):
- resource = get_object_or_404(Resource, id=self.kwargs['resource_id'])
- bookings = Booking.objects.filter(resource=resource, end__gt=timezone.now())
- context = super(ResourceView, self).get_context_data(**kwargs)
- context.update({'title': str(resource), 'resource': resource, 'bookings': bookings})
- return context
-
-
-class LabOwnerView(TemplateView):
- template_name = "dashboard/resource_all.html"
-
- def get_context_data(self, **kwargs):
- resources = Resource.objects.filter(slave__dev_pod=True, slave__active=True)
- pods = []
- for resource in resources:
- utilization = resource.slave.get_utilization(timedelta(days=7))
- bookings = Booking.objects.filter(resource=resource, end__gt=timezone.now())
- pods.append((resource, utilization, bookings))
- context = super(LabOwnerView, self).get_context_data(**kwargs)
- context.update({'title': "Overview", 'pods': pods})
- return context
-
-
-class BookingUtilizationJSON(View):
- def get(self, request, *args, **kwargs):
- resource = get_object_or_404(Resource, id=kwargs['resource_id'])
- utilization = resource.get_booking_utilization(int(kwargs['weeks']))
- utilization = [
- {
- 'label': 'Booked',
- 'data': utilization['booked_seconds'],
- 'color': '#d9534f'
- },
- {
- 'label': 'Available',
- 'data': utilization['available_seconds'],
- 'color': '#5cb85c'
- },
- ]
- return JsonResponse({'data': utilization})
-
-
-class JenkinsUtilizationJSON(View):
- def get(self, request, *args, **kwargs):
- resource = get_object_or_404(Resource, id=kwargs['resource_id'])
- weeks = int(kwargs['weeks'])
- utilization = resource.slave.get_utilization(timedelta(weeks=weeks))
- utilization = [
- {
- 'label': 'Offline',
- 'data': utilization['offline'],
- 'color': '#d9534f'
- },
- {
- 'label': 'Online',
- 'data': utilization['online'],
- 'color': '#5cb85c'
- },
- {
- 'label': 'Idle',
- 'data': utilization['idle'],
- 'color': '#5bc0de'
- },
- ]
- return JsonResponse({'data': utilization})
diff --git a/tools/pharos-dashboard/src/jenkins/__init__.py b/tools/pharos-dashboard/src/jenkins/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/jenkins/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/jenkins/adapter.py b/tools/pharos-dashboard/src/jenkins/adapter.py
deleted file mode 100644
index edf502f6..00000000
--- a/tools/pharos-dashboard/src/jenkins/adapter.py
+++ /dev/null
@@ -1,134 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import logging
-import re
-
-import requests
-from django.core.cache import cache
-
-logger = logging.getLogger(__name__)
-
-# TODO: implement caching decorator, cache get_* functions
-def get_json(url):
- if cache.get(url) is None:
- try:
- response = requests.get(url)
- json = response.json()
- cache.set(url, json, 180) # cache result for 180 seconds
- return json
- except requests.exceptions.RequestException as e:
- logger.exception(e)
- except ValueError as e:
- logger.exception(e)
- else:
- return cache.get(url)
-
-
-def get_all_slaves():
- url = "https://build.opnfv.org/ci/computer/api/json?tree=computer[displayName,offline,idle]"
- json = get_json(url)
- if json is not None:
- return json['computer'] # return list of dictionaries
- return []
-
-
-def get_slave(slavename):
- slaves = get_all_slaves()
- for slave in slaves:
- if slave['displayName'] == slavename:
- return slave
- return {}
-
-
-def get_ci_slaves():
- url = "https://build.opnfv.org/ci/label/ci-pod/api/json?tree=nodes[nodeName,offline,idle]"
- json = get_json(url)
- if json is not None:
- return json['nodes']
- return []
-
-
-def get_all_jobs():
- url = "https://build.opnfv.org/ci/api/json?tree=jobs[displayName,url,lastBuild[fullDisplayName,building,builtOn,timestamp,result]]"
- json = get_json(url)
- if json is not None:
- return json['jobs'] # return list of dictionaries
- return []
-
-
-def get_jenkins_job(slavename):
- jobs = get_all_jobs()
- max_time = 0
- last_job = None
- for job in jobs:
- if job['lastBuild'] is not None:
- if job['lastBuild']['builtOn'] == slavename:
- if job['lastBuild']['building'] is True:
- return job # return active build
- if job['lastBuild']['timestamp'] > max_time:
- last_job = job
- max_time = job['lastBuild']['timestamp']
- return last_job
-
-
-def is_ci_slave(slavename):
- ci_slaves = get_ci_slaves()
- for ci_slave in ci_slaves:
- if ci_slave['nodeName'] == slavename:
- return True
- return False
-
-
-def is_dev_pod(slavename):
- if is_ci_slave(slavename):
- return False
- if slavename.find('pod') != -1:
- return True
- return False
-
-
-def parse_job(job):
- result = parse_job_string(job['lastBuild']['fullDisplayName'])
- result['building'] = job['lastBuild']['building']
- result['result'] = ''
- if not job['lastBuild']['building']:
- result['result'] = job['lastBuild']['result']
- result['url'] = job['url']
- return result
-
-
-def parse_job_string(full_displayname):
- job = {}
- job['scenario'] = ''
- job['installer'] = ''
- job['branch'] = ''
- tokens = re.split(r'[ -]', full_displayname)
- for i in range(len(tokens)):
- if tokens[i] == 'os':
- job['scenario'] = '-'.join(tokens[i: i + 4])
- elif tokens[i] in ['fuel', 'joid', 'apex', 'compass']:
- job['installer'] = tokens[i]
- elif tokens[i] in ['master', 'arno', 'brahmaputra', 'colorado']:
- job['branch'] = tokens[i]
- tokens = full_displayname.split(' ')
- job['name'] = tokens[0]
- return job
-
-def get_slave_url(slave):
- return 'https://build.opnfv.org/ci/computer/' + slave['displayName']
-
-
-def get_slave_status(slave):
- if not slave['offline'] and slave['idle']:
- return 'online / idle'
- if not slave['offline']:
- return 'online'
- return 'offline'
diff --git a/tools/pharos-dashboard/src/jenkins/admin.py b/tools/pharos-dashboard/src/jenkins/admin.py
deleted file mode 100644
index c4996702..00000000
--- a/tools/pharos-dashboard/src/jenkins/admin.py
+++ /dev/null
@@ -1,17 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.conf import settings
-from django.contrib import admin
-
-from jenkins.models import JenkinsSlave
-
-if settings.DEBUG:
- admin.site.register(JenkinsSlave) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/jenkins/apps.py b/tools/pharos-dashboard/src/jenkins/apps.py
deleted file mode 100644
index 41faf600..00000000
--- a/tools/pharos-dashboard/src/jenkins/apps.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.apps import AppConfig
-
-
-class JenkinsConfig(AppConfig):
- name = 'jenkins'
diff --git a/tools/pharos-dashboard/src/jenkins/migrations/0001_initial.py b/tools/pharos-dashboard/src/jenkins/migrations/0001_initial.py
deleted file mode 100644
index b1c78897..00000000
--- a/tools/pharos-dashboard/src/jenkins/migrations/0001_initial.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2016-11-03 13:33
-from __future__ import unicode_literals
-
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ]
-
- operations = [
- migrations.CreateModel(
- name='JenkinsSlave',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(max_length=100, unique=True)),
- ('status', models.CharField(default='offline', max_length=30)),
- ('url', models.CharField(max_length=1024)),
- ('ci_slave', models.BooleanField(default=False)),
- ('dev_pod', models.BooleanField(default=False)),
- ('building', models.BooleanField(default=False)),
- ('last_job_name', models.CharField(default='', max_length=1024)),
- ('last_job_url', models.CharField(default='', max_length=1024)),
- ('last_job_scenario', models.CharField(default='', max_length=50)),
- ('last_job_branch', models.CharField(default='', max_length=50)),
- ('last_job_installer', models.CharField(default='', max_length=50)),
- ('last_job_result', models.CharField(default='', max_length=30)),
- ('active', models.BooleanField(default=False)),
- ],
- options={
- 'db_table': 'jenkins_slave',
- },
- ),
- migrations.CreateModel(
- name='JenkinsStatistic',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('offline', models.BooleanField(default=False)),
- ('idle', models.BooleanField(default=False)),
- ('online', models.BooleanField(default=False)),
- ('timestamp', models.DateTimeField(auto_now_add=True)),
- ('slave', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jenkins.JenkinsSlave')),
- ],
- options={
- 'db_table': 'jenkins_statistic',
- },
- ),
- ]
diff --git a/tools/pharos-dashboard/src/jenkins/migrations/__init__.py b/tools/pharos-dashboard/src/jenkins/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/jenkins/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/jenkins/models.py b/tools/pharos-dashboard/src/jenkins/models.py
deleted file mode 100644
index 8254ff39..00000000
--- a/tools/pharos-dashboard/src/jenkins/models.py
+++ /dev/null
@@ -1,62 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.db import models
-from django.utils import timezone
-
-
-class JenkinsSlave(models.Model):
- id = models.AutoField(primary_key=True)
- name = models.CharField(max_length=100, unique=True)
- status = models.CharField(max_length=30, default='offline')
- url = models.CharField(max_length=1024)
- ci_slave = models.BooleanField(default=False)
- dev_pod = models.BooleanField(default=False)
-
- building = models.BooleanField(default=False)
-
- last_job_name = models.CharField(max_length=1024, default='')
- last_job_url = models.CharField(max_length=1024, default='')
- last_job_scenario = models.CharField(max_length=50, default='')
- last_job_branch = models.CharField(max_length=50, default='')
- last_job_installer = models.CharField(max_length=50, default='')
- last_job_result = models.CharField(max_length=30, default='')
-
- active = models.BooleanField(default=False)
-
- def get_utilization(self, timedelta):
- """
- Return a dictionary containing the count of idle, online and offline measurements in the time from
- now-timedelta to now
- """
- utilization = {'idle': 0, 'online': 0, 'offline': 0}
- statistics = self.jenkinsstatistic_set.filter(timestamp__gte=timezone.now() - timedelta)
- utilization['idle'] = statistics.filter(idle=True).count()
- utilization['online'] = statistics.filter(online=True).count()
- utilization['offline'] = statistics.filter(offline=True).count()
- return utilization
-
- class Meta:
- db_table = 'jenkins_slave'
-
- def __str__(self):
- return self.name
-
-
-class JenkinsStatistic(models.Model):
- id = models.AutoField(primary_key=True)
- slave = models.ForeignKey(JenkinsSlave, on_delete=models.CASCADE)
- offline = models.BooleanField(default=False)
- idle = models.BooleanField(default=False)
- online = models.BooleanField(default=False)
- timestamp = models.DateTimeField(auto_now_add=True)
-
- class Meta:
- db_table = 'jenkins_statistic'
diff --git a/tools/pharos-dashboard/src/jenkins/tasks.py b/tools/pharos-dashboard/src/jenkins/tasks.py
deleted file mode 100644
index ea986c1f..00000000
--- a/tools/pharos-dashboard/src/jenkins/tasks.py
+++ /dev/null
@@ -1,64 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from celery import shared_task
-
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave, JenkinsStatistic
-from .adapter import *
-
-
-@shared_task
-def sync_jenkins():
- update_jenkins_slaves()
-
-
-def update_jenkins_slaves():
- JenkinsSlave.objects.all().update(active=False)
-
- jenkins_slaves = get_all_slaves()
- for slave in jenkins_slaves:
- jenkins_slave, created = JenkinsSlave.objects.get_or_create(name=slave['displayName'],
- url=get_slave_url(slave))
- jenkins_slave.active = True
- jenkins_slave.ci_slave = is_ci_slave(slave['displayName'])
- jenkins_slave.dev_pod = is_dev_pod(slave['displayName'])
- jenkins_slave.status = get_slave_status(slave)
-
- # if this is a new slave and a pod, check if there is a resource for it, create one if not
- if created and 'pod' in slave['displayName']:
- # parse resource name from slave name
- # naming example: orange-pod1, resource name: Orange POD 1
- tokens = slave['displayName'].split('-')
- name = tokens[0].capitalize() + ' POD '# company name
- name += tokens[1][3:] # remove 'pod'
- resource, created = Resource.objects.get_or_create(name=name)
- resource.slave = jenkins_slave
- resource.save()
-
- last_job = get_jenkins_job(jenkins_slave.name)
- if last_job is not None:
- last_job = parse_job(last_job)
- jenkins_slave.last_job_name = last_job['name']
- jenkins_slave.last_job_url = last_job['url']
- jenkins_slave.last_job_scenario = last_job['scenario']
- jenkins_slave.last_job_branch = last_job['branch']
- jenkins_slave.last_job_installer = last_job['installer']
- jenkins_slave.last_job_result = last_job['result']
- jenkins_slave.save()
-
- jenkins_statistic = JenkinsStatistic(slave=jenkins_slave)
- if jenkins_slave.status == 'online' or jenkins_slave.status == 'building':
- jenkins_statistic.online = True
- if jenkins_slave.status == 'offline':
- jenkins_statistic.offline = True
- if jenkins_slave.status == 'online / idle':
- jenkins_statistic.idle = True
- jenkins_statistic.save()
diff --git a/tools/pharos-dashboard/src/jenkins/tests.py b/tools/pharos-dashboard/src/jenkins/tests.py
deleted file mode 100644
index 3723cd38..00000000
--- a/tools/pharos-dashboard/src/jenkins/tests.py
+++ /dev/null
@@ -1,129 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-from unittest import TestCase
-
-import jenkins.adapter as jenkins
-from jenkins.models import *
-
-
-# Tests that the data we get with the jenkinsadapter contains all the
-# data we need. These test will fail if;
-# - there is no internet connection
-# - the opnfv jenkins url has changed
-# - the jenkins api has changed
-# - jenkins is not set up / there is no data
-class JenkinsAdapterTestCase(TestCase):
- def test_get_all_slaves(self):
- slaves = jenkins.get_all_slaves()
- self.assertTrue(len(slaves) > 0)
- for slave in slaves:
- self.assertTrue('displayName' in slave)
- self.assertTrue('idle' in slave)
- self.assertTrue('offline' in slave)
-
- def test_get_slave(self):
- slaves = jenkins.get_all_slaves()
- self.assertEqual(slaves[0], jenkins.get_slave(slaves[0]['displayName']))
- self.assertEqual({}, jenkins.get_slave('098f6bcd4621d373cade4e832627b4f6'))
-
- def test_get_ci_slaves(self):
- slaves = jenkins.get_ci_slaves()
- self.assertTrue(len(slaves) > 0)
- for slave in slaves:
- self.assertTrue('nodeName' in slave)
-
- def test_get_jenkins_job(self):
- slaves = jenkins.get_ci_slaves()
- job = None
- for slave in slaves:
- job = jenkins.get_jenkins_job(slave['nodeName'])
- if job is not None:
- break
- # We need to test at least one job
- self.assertNotEqual(job, None)
-
- def test_get_all_jobs(self):
- jobs = jenkins.get_all_jobs()
- lastBuild = False
- self.assertTrue(len(jobs) > 0)
- for job in jobs:
- self.assertTrue('displayName' in job)
- self.assertTrue('url' in job)
- self.assertTrue('lastBuild' in job)
- if job['lastBuild'] is not None:
- lastBuild = True
- self.assertTrue('building' in job['lastBuild'])
- self.assertTrue('fullDisplayName' in job['lastBuild'])
- self.assertTrue('result' in job['lastBuild'])
- self.assertTrue('timestamp' in job['lastBuild'])
- self.assertTrue('builtOn' in job['lastBuild'])
- self.assertTrue(lastBuild)
-
- def test_parse_job(self):
- job = {
- "displayName": "apex-deploy-baremetal-os-nosdn-fdio-noha-colorado",
- "url": "https://build.opnfv.org/ci/job/apex-deploy-baremetal-os-nosdn-fdio-noha-colorado/",
- "lastBuild": {
- "building": False,
- "fullDisplayName": "apex-deploy-baremetal-os-nosdn-fdio-noha-colorado #37",
- "result": "SUCCESS",
- "timestamp": 1476283629917,
- "builtOn": "lf-pod1"
- }
- }
-
- job = jenkins.parse_job(job)
- self.assertEqual(job['scenario'], 'os-nosdn-fdio-noha')
- self.assertEqual(job['installer'], 'apex')
- self.assertEqual(job['branch'], 'colorado')
- self.assertEqual(job['result'], 'SUCCESS')
- self.assertEqual(job['building'], False)
- self.assertEqual(job['url'],
- "https://build.opnfv.org/ci/job/apex-deploy-baremetal-os-nosdn-fdio-noha-colorado/")
- self.assertEqual(job['name'],
- 'apex-deploy-baremetal-os-nosdn-fdio-noha-colorado')
-
- def test_get_slave_status(self):
- slave = {
- 'offline': True,
- 'idle': False
- }
- self.assertEqual(jenkins.get_slave_status(slave), 'offline')
- slave = {
- 'offline': False,
- 'idle': False
- }
- self.assertEqual(jenkins.get_slave_status(slave), 'online')
- slave = {
- 'offline': False,
- 'idle': True
- }
- self.assertEqual(jenkins.get_slave_status(slave), 'online / idle')
-
-
-class JenkinsModelTestCase(TestCase):
- def test_get_utilization(self):
- jenkins_slave = JenkinsSlave.objects.create(name='test', status='offline', url='')
- utilization = jenkins_slave.get_utilization(timedelta(weeks=1))
- self.assertEqual(utilization['idle'], 0)
- self.assertEqual(utilization['offline'], 0)
- self.assertEqual(utilization['online'], 0)
-
- for i in range(10):
- JenkinsStatistic.objects.create(slave=jenkins_slave,
- offline=True, idle=True,
- online=True)
-
- utilization = jenkins_slave.get_utilization(timedelta(weeks=1))
- self.assertEqual(utilization['idle'], 10)
- self.assertEqual(utilization['offline'], 10)
- self.assertEqual(utilization['online'], 10)
diff --git a/tools/pharos-dashboard/src/manage.py b/tools/pharos-dashboard/src/manage.py
deleted file mode 100644
index 80c496f3..00000000
--- a/tools/pharos-dashboard/src/manage.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-#!/usr/bin/env python
-import os
-import sys
-
-if __name__ == "__main__":
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pharos_dashboard.settings")
- try:
- from django.core.management import execute_from_command_line
- except ImportError:
- # The above import may fail for some other reason. Ensure that the
- # issue is really that Django is missing to avoid masking other
- # exceptions on Python 2.
- try:
- import django
- except ImportError:
- raise ImportError(
- "Couldn't import Django. Are you sure it's installed and "
- "available on your PYTHONPATH environment variable? Did you "
- "forget to activate a virtual environment?"
- )
- raise
- execute_from_command_line(sys.argv)
diff --git a/tools/pharos-dashboard/src/notification/__init__.py b/tools/pharos-dashboard/src/notification/__init__.py
deleted file mode 100644
index 37dcbddf..00000000
--- a/tools/pharos-dashboard/src/notification/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-default_app_config = 'notification.apps.NotificationConfig' \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/notification/admin.py b/tools/pharos-dashboard/src/notification/admin.py
deleted file mode 100644
index bcaa1ab7..00000000
--- a/tools/pharos-dashboard/src/notification/admin.py
+++ /dev/null
@@ -1,17 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.conf import settings
-from django.contrib import admin
-
-from notification.models import BookingNotification
-
-if settings.DEBUG:
- admin.site.register(BookingNotification) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/notification/apps.py b/tools/pharos-dashboard/src/notification/apps.py
deleted file mode 100644
index 2de22c4e..00000000
--- a/tools/pharos-dashboard/src/notification/apps.py
+++ /dev/null
@@ -1,18 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.apps import AppConfig
-
-
-class NotificationConfig(AppConfig):
- name = 'notification'
-
- def ready(self):
- import notification.signals #noqa \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/notification/migrations/0001_initial.py b/tools/pharos-dashboard/src/notification/migrations/0001_initial.py
deleted file mode 100644
index 8b8414e6..00000000
--- a/tools/pharos-dashboard/src/notification/migrations/0001_initial.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.10 on 2016-11-03 13:33
-from __future__ import unicode_literals
-
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('booking', '0001_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='BookingNotification',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('type', models.CharField(max_length=100)),
- ('submit_time', models.DateTimeField()),
- ('submitted', models.BooleanField(default=False)),
- ('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='booking.Booking')),
- ],
- ),
- ]
diff --git a/tools/pharos-dashboard/src/notification/migrations/__init__.py b/tools/pharos-dashboard/src/notification/migrations/__init__.py
deleted file mode 100644
index b5914ce7..00000000
--- a/tools/pharos-dashboard/src/notification/migrations/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
diff --git a/tools/pharos-dashboard/src/notification/models.py b/tools/pharos-dashboard/src/notification/models.py
deleted file mode 100644
index 89b30234..00000000
--- a/tools/pharos-dashboard/src/notification/models.py
+++ /dev/null
@@ -1,33 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.db import models
-
-class BookingNotification(models.Model):
- id = models.AutoField(primary_key=True)
- type = models.CharField(max_length=100)
- booking = models.ForeignKey('booking.Booking', on_delete=models.CASCADE)
- submit_time = models.DateTimeField()
- submitted = models.BooleanField(default=False)
-
- def get_content(self):
- return {
- 'resource_id': self.booking.resource.id,
- 'booking_id': self.booking.id,
- 'user': self.booking.user.username,
- 'user_id': self.booking.user.id,
- }
-
- def save(self, *args, **kwargs):
- notifications = self.booking.bookingnotification_set.filter(type=self.type).exclude(
- id=self.id)
- #if notifications.count() > 0:
- # raise ValueError('Doubled Notification')
- return super(BookingNotification, self).save(*args, **kwargs)
diff --git a/tools/pharos-dashboard/src/notification/signals.py b/tools/pharos-dashboard/src/notification/signals.py
deleted file mode 100644
index 936c25ba..00000000
--- a/tools/pharos-dashboard/src/notification/signals.py
+++ /dev/null
@@ -1,25 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from django.db.models.signals import post_save
-from django.dispatch import receiver
-
-from booking.models import Booking
-from notification.models import BookingNotification
-
-
-@receiver(post_save, sender=Booking)
-def booking_notification_handler(sender, instance, **kwargs):
- BookingNotification.objects.update_or_create(
- booking=instance, type='booking_start', defaults={'submit_time': instance.start}
- )
- BookingNotification.objects.update_or_create(
- booking=instance, type='booking_end', defaults={'submit_time': instance.end}
- ) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/notification/tasks.py b/tools/pharos-dashboard/src/notification/tasks.py
deleted file mode 100644
index 7f737625..00000000
--- a/tools/pharos-dashboard/src/notification/tasks.py
+++ /dev/null
@@ -1,49 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import os
-import sys
-from datetime import timedelta
-
-from celery import shared_task
-from django.conf import settings
-from django.utils import timezone
-
-from notification.models import BookingNotification
-
-# this adds the top level directory to the python path, this is needed so that we can access the
-# notification library
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
-
-from dashboard_notification.notification import Notification, Message
-
-
-@shared_task
-def send_booking_notifications():
- with Notification(dashboard_url=settings.RABBITMQ_URL, user=settings.RABBITMQ_USER, password=settings.RABBITMQ_PASSWORD) as messaging:
- now = timezone.now()
- notifications = BookingNotification.objects.filter(submitted=False,
- submit_time__gt=now - timedelta(minutes=1),
- submit_time__lt=now + timedelta(minutes=5))
- for notification in notifications:
- message = Message(type=notification.type, topic=notification.booking.resource.name,
- content=notification.get_content())
- messaging.send(message)
- notification.submitted = True
- notification.save()
-
-@shared_task
-def notification_debug():
- with Notification(dashboard_url=settings.RABBITMQ_URL) as messaging:
- notifications = BookingNotification.objects.all()
- for notification in notifications:
- message = Message(type=notification.type, topic=notification.booking.resource.name,
- content=notification.get_content())
- messaging.send(message)
diff --git a/tools/pharos-dashboard/src/notification/tests.py b/tools/pharos-dashboard/src/notification/tests.py
deleted file mode 100644
index 9df9aa60..00000000
--- a/tools/pharos-dashboard/src/notification/tests.py
+++ /dev/null
@@ -1,41 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-from datetime import timedelta
-from unittest import TestCase
-
-from django.contrib.auth.models import User
-from django.utils import timezone
-
-from booking.models import Booking
-from dashboard.models import Resource
-from jenkins.models import JenkinsSlave
-from notification.models import *
-
-
-class JenkinsModelTestCase(TestCase):
- def setUp(self):
- self.slave = JenkinsSlave.objects.create(name='test1', url='test')
- self.res1 = Resource.objects.create(name='res1', slave=self.slave, description='x',
- url='x')
- self.user1 = User.objects.create(username='user1')
-
- start = timezone.now()
- end = start + timedelta(days=1)
- self.booking = Booking.objects.create(start=start, end=end, purpose='test',
- resource=self.res1, user=self.user1)
-
- def test_booking_notification(self):
- BookingNotification.objects.create(type='test', booking=self.booking,
- submit_time=timezone.now())
-
- self.assertRaises(ValueError, BookingNotification.objects.create, type='test',
- booking=self.booking,
- submit_time=timezone.now())
diff --git a/tools/pharos-dashboard/src/pharos_dashboard/__init__.py b/tools/pharos-dashboard/src/pharos_dashboard/__init__.py
deleted file mode 100644
index f104c4db..00000000
--- a/tools/pharos-dashboard/src/pharos_dashboard/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-# This will make sure the app is always imported when
-# Django starts so that shared_task will use this app.
-from .celery import app as celery_app # noqa
diff --git a/tools/pharos-dashboard/src/pharos_dashboard/celery.py b/tools/pharos-dashboard/src/pharos_dashboard/celery.py
deleted file mode 100644
index f60f2433..00000000
--- a/tools/pharos-dashboard/src/pharos_dashboard/celery.py
+++ /dev/null
@@ -1,30 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-import os
-
-from celery import Celery
-
-# set the default Django settings module for the 'celery' program.
-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pharos_dashboard.settings')
-
-from django.conf import settings # noqa
-
-app = Celery('pharos_dashboard')
-
-# Using a string here means the worker will not have to
-# pickle the object when using Windows.
-app.config_from_object('django.conf:settings')
-app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
-
-
-@app.task(bind=True)
-def debug_task(self):
- print('Request: {0!r}'.format(self.request)) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/pharos_dashboard/settings.py b/tools/pharos-dashboard/src/pharos_dashboard/settings.py
deleted file mode 100644
index 546b174e..00000000
--- a/tools/pharos-dashboard/src/pharos_dashboard/settings.py
+++ /dev/null
@@ -1,184 +0,0 @@
-import os
-from datetime import timedelta
-
-# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
-BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-
-# SECURITY WARNING: don't run with debug turned on in production!
-DEBUG = True
-
-# Application definition
-
-INSTALLED_APPS = [
- 'dashboard',
- 'booking',
- 'account',
- 'jenkins',
- 'notification',
- 'django.contrib.admin',
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.sessions',
- 'django.contrib.messages',
- 'django.contrib.staticfiles',
- 'django.contrib.humanize',
- 'bootstrap3',
- 'crispy_forms',
- 'rest_framework',
- 'rest_framework.authtoken',
-]
-
-MIDDLEWARE = [
- 'django.middleware.security.SecurityMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
- 'account.middleware.TimezoneMiddleware',
-]
-
-ROOT_URLCONF = 'pharos_dashboard.urls'
-
-TEMPLATES = [
- {
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': [os.path.join(BASE_DIR, 'templates')]
- ,
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.debug',
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- ],
- },
- },
-]
-
-WSGI_APPLICATION = 'pharos_dashboard.wsgi.application'
-
-# Password validation
-# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
-
-AUTH_PASSWORD_VALIDATORS = [
- {
- 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
- },
-]
-
-# Internationalization
-# https://docs.djangoproject.com/en/1.10/topics/i18n/
-
-LANGUAGE_CODE = 'en-us'
-
-TIME_ZONE = 'UTC'
-
-USE_I18N = True
-
-USE_L10N = True
-
-USE_TZ = True
-
-# Static files (CSS, JavaScript, Images)
-# https://docs.djangoproject.com/en/1.10/howto/static-files/
-MEDIA_URL = '/media/'
-STATIC_URL = '/static/'
-
-# Static files (CSS, JavaScript, Images)
-# https://docs.djangoproject.com/en/1.10/howto/static-files/
-STATICFILES_DIRS = [
- os.path.join(BASE_DIR, "static"),
-]
-
-LOGIN_REDIRECT_URL = '/'
-
-# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = os.environ['SECRET_KEY']
-
-BOOTSTRAP3 = {
- 'set_placeholder': False,
-}
-
-ALLOWED_HOSTS = ['*']
-
-# Database
-# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
-DATABASES = {
- 'default': {
- 'ENGINE': 'django.db.backends.postgresql',
- 'NAME': os.environ['DB_NAME'],
- 'USER': os.environ['DB_USER'],
- 'PASSWORD': os.environ['DB_PASS'],
- 'HOST': os.environ['DB_SERVICE'],
- 'PORT': os.environ['DB_PORT']
- }
-}
-
-
-# Rest API Settings
-REST_FRAMEWORK = {
- 'DEFAULT_PERMISSION_CLASSES': [
- 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
- ],
- 'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',),
- 'DEFAULT_AUTHENTICATION_CLASSES': (
- 'rest_framework.authentication.SessionAuthentication',
- 'rest_framework.authentication.TokenAuthentication',
- )
-}
-
-MEDIA_ROOT = '/media'
-STATIC_ROOT = '/static'
-
-# Jira Settings
-CREATE_JIRA_TICKET = False
-
-JIRA_URL = os.environ['JIRA_URL']
-
-JIRA_USER_NAME = os.environ['JIRA_USER_NAME']
-JIRA_USER_PASSWORD = os.environ['JIRA_USER_PASSWORD']
-
-OAUTH_CONSUMER_KEY = os.environ['OAUTH_CONSUMER_KEY']
-OAUTH_CONSUMER_SECRET = os.environ['OAUTH_CONSUMER_SECRET']
-
-OAUTH_REQUEST_TOKEN_URL = JIRA_URL + '/plugins/servlet/oauth/request-token'
-OAUTH_ACCESS_TOKEN_URL = JIRA_URL + '/plugins/servlet/oauth/access-token'
-OAUTH_AUTHORIZE_URL = JIRA_URL + '/plugins/servlet/oauth/authorize'
-
-OAUTH_CALLBACK_URL = os.environ['DASHBOARD_URL'] + '/accounts/authenticated'
-
-# Celery Settings
-CELERY_TIMEZONE = 'UTC'
-
-RABBITMQ_URL = 'rabbitmq'
-RABBITMQ_USER = os.environ['RABBITMQ_USER']
-RABBITMQ_PASSWORD = os.environ['RABBITMQ_PASSWORD']
-
-BROKER_URL = 'amqp://' + RABBITMQ_USER + ':' + RABBITMQ_PASSWORD + '@rabbitmq:5672//'
-
-CELERYBEAT_SCHEDULE = {
- 'sync-jenkins': {
- 'task': 'jenkins.tasks.sync_jenkins',
- 'schedule': timedelta(minutes=5)
- },
- 'send-booking-notifications': {
- 'task': 'notification.tasks.send_booking_notifications',
- 'schedule': timedelta(minutes=5)
- },
- 'clean-database': {
- 'task': 'dashboard.tasks.database_cleanup',
- 'schedule': timedelta(hours=24)
- },
-}
diff --git a/tools/pharos-dashboard/src/pharos_dashboard/urls.py b/tools/pharos-dashboard/src/pharos_dashboard/urls.py
deleted file mode 100644
index adcb5b8f..00000000
--- a/tools/pharos-dashboard/src/pharos_dashboard/urls.py
+++ /dev/null
@@ -1,44 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""pharos_dashboard URL Configuration
-
-The `urlpatterns` list routes URLs to views. For more information please see:
- https://docs.djangoproject.com/en/1.10/topics/http/urls/
-Examples:
-Function views
- 1. Add an import: from my_app import views
- 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
-Class-based views
- 1. Add an import: from other_app.views import Home
- 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
-Including another URLconf
- 1. Import the include() function: from django.conf.urls import url, include
- 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
-"""
-from django.conf import settings
-from django.conf.urls import url, include
-from django.conf.urls.static import static
-from django.contrib import admin
-
-
-urlpatterns = [
- url(r'^', include('dashboard.urls', namespace='dashboard')),
- url(r'^booking/', include('booking.urls', namespace='booking')),
- url(r'^accounts/', include('account.urls', namespace='account')),
-
- url(r'^admin/', admin.site.urls),
- url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
-
- url(r'^api/', include('api.urls'))
-]
-
-if settings.DEBUG is True:
- urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/pharos_dashboard/wsgi.py b/tools/pharos-dashboard/src/pharos_dashboard/wsgi.py
deleted file mode 100644
index 3d43361b..00000000
--- a/tools/pharos-dashboard/src/pharos_dashboard/wsgi.py
+++ /dev/null
@@ -1,26 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Max Breitenfeldt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-"""
-WSGI config for pharos_dashboard project.
-
-It exposes the WSGI callable as a module-level variable named ``application``.
-
-For more information on this file, see
-https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
-"""
-
-import os
-
-from django.core.wsgi import get_wsgi_application
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pharos_dashboard.settings")
-
-application = get_wsgi_application()
diff --git a/tools/pharos-dashboard/src/static/bower.json b/tools/pharos-dashboard/src/static/bower.json
deleted file mode 100644
index f473747f..00000000
--- a/tools/pharos-dashboard/src/static/bower.json
+++ /dev/null
@@ -1,24 +0,0 @@
-{
- "name": "pharos-dashboard-dependencies",
- "authors": [
- "maxbr <maxbr@mi.fu-berlin.de>"
- ],
- "description": "This package contains all the Js/CSS dependencies needed to run the Pharos Dashboard.",
- "main": "",
- "license": "Apache2",
- "homepage": "",
- "private": true,
- "ignore": [
- "**/.*",
- "node_modules",
- "bower_components",
- "test",
- "tests"
- ],
- "dependencies": {
- "eonasdan-bootstrap-datetimepicker": "^4.17.37",
- "fullcalendar": "^2.9.0",
- "jquery-migrate": "^3.0.0",
- "startbootstrap-sb-admin-2-blackrockdigital": "^3.3.7"
- }
-}
diff --git a/tools/pharos-dashboard/src/static/css/theme.css b/tools/pharos-dashboard/src/static/css/theme.css
deleted file mode 100644
index bd156372..00000000
--- a/tools/pharos-dashboard/src/static/css/theme.css
+++ /dev/null
@@ -1,13 +0,0 @@
-.blink_me {
- animation: blinker 1.5s linear infinite;
-}
-
-@keyframes blinker {
- 20% {
- opacity: 0.4;
- }
-}
-
-.modal p {
- word-wrap: break-word;
-} \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/static/js/booking-calendar.js b/tools/pharos-dashboard/src/static/js/booking-calendar.js
deleted file mode 100644
index f634293e..00000000
--- a/tools/pharos-dashboard/src/static/js/booking-calendar.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/*****************************************************************************
- * Copyright (c) 2016 Max Breitenfeldt and others.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Apache License, Version 2.0
- * which accompanies this distribution, and is available at
- * http://www.apache.org/licenses/LICENSE-2.0
- *****************************************************************************/
-
-
-function parseCalendarEvents(bookings) {
- var events = [];
- for (var i = 0; i < bookings.length; i++) {
- // convert ISO 8601 timestring to moment, needed for timezone handling
- start = moment(bookings[i]['start']);
- end = moment(bookings[i]['end']);
-
- installer = bookings[i]['installer__name'];
- if (installer === null) {
- installer = '';
- }
-
- scenario = bookings[i]['scenario__name'];
- if (scenario === null) {
- scenario = '';
- }
- title = bookings[i]['purpose'] + ' ' + installer + ' ' + scenario;
-
- event = {
- id: bookings[i]['id'],
- title: title,
- start: start,
- end: end,
- };
- events.push(event);
- }
- return events;
-}
-
-function loadEvents(url) {
- $.ajax({
- url: url,
- type: 'get',
- success: function (data) {
- $('#calendar').fullCalendar('addEventSource', parseCalendarEvents(data['bookings']));
- },
- failure: function (data) {
- alert('Error loading booking data');
- }
- });
-}
-
-$(document).ready(function () {
- $('#calendar').fullCalendar(calendarOptions);
- loadEvents(bookings_url);
- $('#starttimepicker').datetimepicker(timepickerOptions);
- $('#endtimepicker').datetimepicker(timepickerOptions);
-});
diff --git a/tools/pharos-dashboard/src/static/js/dataTables-sort.js b/tools/pharos-dashboard/src/static/js/dataTables-sort.js
deleted file mode 100644
index 3072d2f1..00000000
--- a/tools/pharos-dashboard/src/static/js/dataTables-sort.js
+++ /dev/null
@@ -1,36 +0,0 @@
-/*****************************************************************************
-* Copyright (c) 2016 Max Breitenfeldt and others.
-*
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License, Version 2.0
-* which accompanies this distribution, and is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*****************************************************************************/
-
-
-/**
- * This is a sort function for dataTables to sort tables by the status column.
- * The order should be: online < online/idle < offline
- */
-jQuery.extend(jQuery.fn.dataTableExt.oSort, {
- "status-pre": function (a) {
- switch (a) {
- case 'online':
- return 1;
- case 'online / idle':
- return 2;
- case 'offline':
- return 3;
- default:
- return a;
- }
- },
-
- "status-asc": function (a, b) {
- return ((a < b) ? -1 : ((a > b) ? 1 : 0));
- },
-
- "status-desc": function (a, b) {
- return ((a < b) ? 1 : ((a > b) ? -1 : 0));
- }
-}); \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/static/js/datetimepicker-options.js b/tools/pharos-dashboard/src/static/js/datetimepicker-options.js
deleted file mode 100644
index d43f5fb8..00000000
--- a/tools/pharos-dashboard/src/static/js/datetimepicker-options.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/*****************************************************************************
-* Copyright (c) 2016 Max Breitenfeldt and others.
-*
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License, Version 2.0
-* which accompanies this distribution, and is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*****************************************************************************/
-
-
-var timepickerOptions = {
- format: 'MM/DD/YYYY HH:00'
-}; \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/static/js/flot-pie-chart.js b/tools/pharos-dashboard/src/static/js/flot-pie-chart.js
deleted file mode 100644
index 3b80b2a2..00000000
--- a/tools/pharos-dashboard/src/static/js/flot-pie-chart.js
+++ /dev/null
@@ -1,30 +0,0 @@
-/*****************************************************************************
-* Copyright (c) 2016 Max Breitenfeldt and others.
-*
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License, Version 2.0
-* which accompanies this distribution, and is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*****************************************************************************/
-
-
-function loadChartData(chart_id, url) {
- $.ajax({
- url: url,
- type: 'get',
- success: function (data) {
- var data = data['data'];
- var plotObj = $.plot($("#" + chart_id), data, {
- series: {
- pie: {
- show: true
- }
- }
- });
- },
- failure: function (data) {
- alert('Error loading data');
- }
- });
-
-} \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/static/js/fullcalendar-options.js b/tools/pharos-dashboard/src/static/js/fullcalendar-options.js
deleted file mode 100644
index 22a1b95e..00000000
--- a/tools/pharos-dashboard/src/static/js/fullcalendar-options.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/*****************************************************************************
-* Copyright (c) 2016 Max Breitenfeldt and others.
-*
-* All rights reserved. This program and the accompanying materials
-* are made available under the terms of the Apache License, Version 2.0
-* which accompanies this distribution, and is available at
-* http://www.apache.org/licenses/LICENSE-2.0
-*****************************************************************************/
-
-
-var tmpevent;
-
-function sendEventToForm(event) {
- $('#starttimepicker').data("DateTimePicker").date(event.start);
- $('#endtimepicker').data("DateTimePicker").date(event.end);
-}
-
-var calendarOptions = {
- height: 600,
- header: {
- left: 'prev,next today',
- center: 'title',
- right: 'agendaWeek,month'
- },
- timezone: user_timezone, // set in booking_calendar.html
- defaultView: 'month',
- slotDuration: '00:60:00',
- slotLabelFormat: "HH:mm",
- firstDay: 1,
- allDaySlot: false,
- selectOverlap: false,
- eventOverlap: false,
- selectable: true,
- editable: false,
- eventLimit: true, // allow "more" link when too many events
- timeFormat: 'H(:mm)', // uppercase H for 24-hour clock
- unselectAuto: true,
- nowIndicator: true,
-
- // selectHelper is only working in the agendaWeek view, this is a workaround:
- // if an event is selected, the existing selection is removed and a temporary event is added
- // to the calendar
- select: function (start, end) {
- if (tmpevent != undefined) {
- $('#calendar').fullCalendar('removeEvents', tmpevent.id);
- $('#calendar').fullCalendar('rerenderEvents');
- tmpevent = undefined;
- }
- // the times need to be converted here to make them show up in the agendaWeek view if they
- // are created in the month view. If they are not converted, the tmpevent will only show
- // up in the (deactivated) allDaySlot
- start = moment(start);
- end = moment(end);
-
- tmpevent = {
- id: '537818f62bc63518ece15338fb86c8be',
- title: 'New Booking',
- start: start,
- end: end,
- editable: true
- };
-
- $('#calendar').fullCalendar('renderEvent', tmpevent, true);
- sendEventToForm(tmpevent);
- },
-
- eventClick: function (event) {
- if (tmpevent != undefined) {
- if (event.id != tmpevent.id) {
- $('#calendar').fullCalendar('removeEvents', tmpevent.id);
- $('#calendar').fullCalendar('rerenderEvents');
- tmpevent = undefined;
- }
- }
-
- // tmpevent is deleted if a real event is clicked, load event details
- if (tmpevent == undefined) {
- var booking_detail_url = booking_detail_prefix + event.id;
-
- $.ajax({
- url: booking_detail_url,
- type: 'get',
- success: function (data) {
- $('#booking_detail_content').html(data);
- },
- failure: function (data) {
- alert('Error loading booking details');
- }
- });
- $('#booking_detail_modal').modal('show');
- }
- },
-
- eventDrop: function (event) {
- sendEventToForm(event);
- },
-
- eventResize: function (event) {
- sendEventToForm(event);
- }
-}; \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/account/user_list.html b/tools/pharos-dashboard/src/templates/account/user_list.html
deleted file mode 100644
index 68178ebe..00000000
--- a/tools/pharos-dashboard/src/templates/account/user_list.html
+++ /dev/null
@@ -1,55 +0,0 @@
-{% extends "dashboard/table.html" %}
-{% load staticfiles %}
-
-{% block table %}
- <thead>
- <tr>
- <th>Username</th>
- <th>Full Name</th>
- <th>Email</th>
- <th>Company</th>
- <th>SSH Key</th>
- <th>GPG Key</th>
- </tr>
- </thead>
- <tbody>
- {% for user in users %}
- <tr>
- <td>
- {{ user.username }}
- </td>
- <td>
- {{ user.userprofile.full_name }}
- </td>
- <td>
- {{ user.email }}
- </td>
- <td>
- {{ user.userprofile.company }}
- </td>
- <td>
- {% if user.userprofile.ssh_public_key %}
- <a href={{ user.userprofile.ssh_public_key.url }}>SSH</a>
- {% endif %}
- </td>
- <td>
- {% if user.userprofile.pgp_public_key %}
- <a href={{ user.userprofile.pgp_public_key.url }}>GPG</a>
- {% endif %}
- </td>
- </tr>
- {% endfor %}
- </tbody>
-{% endblock table %}
-
-
-{% block tablejs %}
- <script type="text/javascript">
- $(document).ready(function () {
- $('#table').DataTable({
- scrollX: true,
- "order": [[0, "asc"]]
- });
- });
- </script>
-{% endblock tablejs %}
diff --git a/tools/pharos-dashboard/src/templates/account/userprofile_update_form.html b/tools/pharos-dashboard/src/templates/account/userprofile_update_form.html
deleted file mode 100644
index f4bb7b55..00000000
--- a/tools/pharos-dashboard/src/templates/account/userprofile_update_form.html
+++ /dev/null
@@ -1,38 +0,0 @@
-{% extends "layout.html" %}
-{% load bootstrap3 %}
-
-{% block basecontent %}
- <div class="container">
- <div class="row">
- <div class="col-md-4 col-md-offset-4">
- {% bootstrap_messages %}
- <div class="login-panel panel panel-default">
- <div class="panel-heading">
- <h3 class="panel-title">
- {{ title }}
- </h3>
- </div>
- <div class="panel-body">
- <form enctype="multipart/form-data" method="post">
- {% csrf_token %}
- {% bootstrap_form form %}
- <p><b>API Token</b>
- <a href="{% url 'generate_token' %}" class="btn btn-default">
- Generate
- </a>
- </p>
- <p style="word-wrap: break-word;">{{ token.key }}</p>
-
- <p></p>
- {% buttons %}
- <button type="submit" class="btn btn btn-success">
- Save Profile
- </button>
- {% endbuttons %}
- </form>
- </div>
- </div>
- </div>
- </div>
- </div>
-{% endblock basecontent %}
diff --git a/tools/pharos-dashboard/src/templates/base.html b/tools/pharos-dashboard/src/templates/base.html
deleted file mode 100644
index 4d8530ad..00000000
--- a/tools/pharos-dashboard/src/templates/base.html
+++ /dev/null
@@ -1,111 +0,0 @@
-{% extends "layout.html" %}
-{% load bootstrap3 %}
-
-{% block basecontent %}
- <div id="wrapper">
- <!-- Navigation -->
- <nav class="navbar navbar-default navbar-static-top" role="navigation"
- style="margin-bottom: 0">
- <div class="navbar-header">
- <button type="button" class="navbar-toggle" data-toggle="collapse"
- data-target=".navbar-collapse">
- <span class="sr-only">Toggle navigation</span>
- <span class="icon-bar"></span>
- <span class="icon-bar"></span>
- <span class="icon-bar"></span>
- </button>
- <a href="https://www.opnfv.org/" class="navbar-left"><img
- src="http://artifacts.opnfv.org/apex/review/14099/installation-instructions/_static/opnfv-logo.png"></a>
- <a class="navbar-brand" href={% url 'dashboard:index' %}>Pharos Dashboard</a>
- </div>
- <!-- /.navbar-header -->
-
- <ul class="nav navbar-top-links navbar-right">
- <li class="dropdown">
- <a class="dropdown-toggle" data-toggle="dropdown" href="#">
- <i class="fa fa-user fa-fw"></i> <i class="fa fa-caret-down"></i>
- </a>
- <ul class="dropdown-menu dropdown-user">
- {% if user.is_authenticated %}
- <li><a href="{% url 'account:settings' %}"><i
- class="fa fa-gear fa-fw"></i>
- Settings</a>
- </li>
- <li class="divider"></li>
- <li><a href="{% url 'account:logout' %}?next={{ request.path }}"><i
- class="fa fa-sign-out fa-fw"></i>
- Logout</a>
- </li>
- {% else %}
- <li><a href="{% url 'account:login' %}"><i
- class="fa fa-sign-in fa-fw"></i>
- Login with Jira</a>
- <li>
- {% endif %}
- </ul>
- <!-- /.dropdown-user -->
- </li>
- <!-- /.dropdown -->
- </ul>
- <!-- /.navbar-top-links -->
-
- <div class="navbar-default sidebar" role="navigation">
- <div class="sidebar-nav navbar-collapse">
- <ul class="nav" id="side-menu">
- <li>
- <a href="{% url 'dashboard:ci_pods' %}"><i
- class="fa fa-fw"></i>CI-Pods</a>
- </li>
- <li>
- <a href="{% url 'dashboard:dev_pods' %}"><i
- class="fa fa-fw"></i>Development
- Pods</a>
- </li>
- <li>
- <a href="{% url 'dashboard:jenkins_slaves' %}"><i
- class="fa fa-fw"></i>Jenkins
- Slaves</a>
- </li>
- <li>
- {% if user.is_authenticated %}
- <a href="{% url 'account:users' %}"><i
- class="fa fa-fw"></i>User List
- </a>
- {% endif %}
- </li>
- <li>
- <a href="{% url 'booking:list' %}"><i
- class="fa fa-fw"></i>Booking List
- </a>
- </li>
- <li>
- <a href="{% url 'api-root' %}"><i
- class="fa fa-fw"></i>API
- </a>
- </li>
- </ul>
- </div>
- <!-- /.sidebar-collapse -->
- </div>
- <!-- /.navbar-static-side -->
- </nav>
-
- <!-- Page Content -->
- <div id="page-wrapper">
- <div class="row">
- <div class="col-lg-12">
- <h1 class="page-header">{{ title }}</h1>
- </div>
- <!-- /.col-lg-12 -->
- </div>
-
- {% bootstrap_messages %}
-
- {% block content %}
-
- {% endblock content %}
- </div>
- <!-- /#page-wrapper -->
- </div>
- <!-- /#wrapper -->
-{% endblock basecontent %}
diff --git a/tools/pharos-dashboard/src/templates/booking/booking_calendar.html b/tools/pharos-dashboard/src/templates/booking/booking_calendar.html
deleted file mode 100644
index 4644e855..00000000
--- a/tools/pharos-dashboard/src/templates/booking/booking_calendar.html
+++ /dev/null
@@ -1,103 +0,0 @@
-{% extends "base.html" %}
-{% load staticfiles %}
-
-{% load bootstrap3 %}
-
-{% block extrahead %}
- <link href="{% static "bower_components/fullcalendar/dist/fullcalendar.css" %}"
- rel='stylesheet'/>
- <link href="{% static "bower_components/eonasdan-bootstrap-datetimepicker/build/css/bootstrap-datetimepicker.min.css" %}"
- rel='stylesheet'/>
-{% endblock extrahead %}
-
-{% block content %}
- <div class="col-lg-8">
- <div class="container-fluid">
- <div class="panel panel-default">
- <div class="panel-heading">
- <i class="fa fa-calendar fa-fw"></i>Calendar
- </div>
- <div class="panel-body">
- <div id='calendar'>
- </div>
- </div>
- <!-- /.panel-body -->
- </div>
- <!-- /.panel -->
- </div>
- </div>
-
- <div class="col-lg-4">
- <div class="panel panel-default">
- <div class="panel-heading">
- <i class="fa fa-edit fa-fw"></i>Booking
- </div>
- <div class="panel-body">
- {% if user.is_authenticated %}
- <div id="booking_form_div">
- {% bootstrap_form_errors form type='non_fields' %}
- <form method="post" action="" class="form" id="bookingform">
- {% csrf_token %}
-
- <div class='input-group' id='starttimepicker'>
- {% bootstrap_field form.start addon_after='<span class="glyphicon glyphicon-calendar"></span>' %}
- </div>
- <div class='input-group' id='endtimepicker'>
- {% bootstrap_field form.end addon_after='<span class="glyphicon glyphicon-calendar"></span>' %}
- </div>
- {% bootstrap_field form.purpose %}
- {% bootstrap_field form.installer %}
- {% bootstrap_field form.scenario %}
- {% buttons %}
- <button type="submit" class="btn btn btn-success">
- Book
- </button>
- {% endbuttons %}
- </form>
- </div>
- {% else %}
- <p>Please
- <a href="{% url 'account:login' %}">
- login with Jira</a>
- to book this Pod</p>
- {% endif %}
- </div>
- </div>
- </div>
-
- <div id="booking_detail_modal" class="modal fade" role="dialog">
- <div class="modal-dialog">
-
- <!-- Modal content-->
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal">&times;</button>
- <h4 class="modal-title">Booking Detail</h4>
- </div>
- <div class="modal-body" id="booking_detail_content">
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Close
- </button>
- </div>
- </div>
-
- </div>
- </div>
-{% endblock content %}
-
-{% block extrajs %}
- <script type="text/javascript">
- var bookings_url = "{% url 'booking:bookings_json' resource_id=resource.id %}";
- var booking_detail_prefix = "{% url 'booking:detail_prefix' %}";
- var user_timezone = "{{ request.user.userprofile.timezone }}"
- </script>
-
- <script src={% static "bower_components/moment/moment.js" %}></script>
- <script src={% static "bower_components/fullcalendar/dist/fullcalendar.js" %}></script>
- <script type="text/javascript"
- src={% static "bower_components/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js" %}></script>
- <script src={% static "js/fullcalendar-options.js" %}></script>
- <script src={% static "js/datetimepicker-options.js" %}></script>
- <script src={% static "js/booking-calendar.js" %}></script>
-{% endblock extrajs %} \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/booking/booking_detail.html b/tools/pharos-dashboard/src/templates/booking/booking_detail.html
deleted file mode 100644
index 4b016b29..00000000
--- a/tools/pharos-dashboard/src/templates/booking/booking_detail.html
+++ /dev/null
@@ -1,26 +0,0 @@
-{% load jira_filters %}
-
-<p>
- <b>Resource: </b>
- <a href="{{ booking.resource.url }}">
- {{ booking.resource.name }}
- </a>
-</p>
-<p>
- <b>User: </b> {{ booking.user.username }}
-</p>
-<p>
- <b>Start: </b> {{ booking.start }}
-</p>
-<p>
- <b>End: </b> {{ booking.end }}
-</p>
-<p>
- <b>Purpose: </b> {{ booking.purpose }}
-</p>
-<p>
- <b>Installer: </b> {{ booking.installer }}
-</p>
-<p>
- <b>Scenario: </b> {{ booking.scenario }}
-</p> \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/booking/booking_list.html b/tools/pharos-dashboard/src/templates/booking/booking_list.html
deleted file mode 100644
index ccdc46d7..00000000
--- a/tools/pharos-dashboard/src/templates/booking/booking_list.html
+++ /dev/null
@@ -1,48 +0,0 @@
-{% extends "base.html" %}
-{% load staticfiles %}
-
-{% block extrahead %}
- <!-- DataTables CSS -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <!-- DataTables Responsive CSS -->
- <link href="{% static "bower_components/datatables-responsive/css/dataTables.responsive.css" %}"
- rel="stylesheet">
-{% endblock extrahead %}
-
-{% block content %}
- <div class="row">
- <div class="panel-body">
- <div class="dataTables_wrapper">
- <table class="table table-striped table-bordered table-hover" id="table"
- cellspacing="0"
- width="100%">
- {% include "booking/booking_table.html" %}
- </table>
- </div>
- <!-- /.table-responsive -->
- <!-- /.panel-body -->
- <!-- /.panel -->
- </div>
- <!-- /.col-lg-12 -->
- </div>
-{% endblock content %}
-
-{% block extrajs %}
- <!-- DataTables JavaScript -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
-
- <script src={% static "bower_components/datatables/media/js/jquery.dataTables.min.js" %}></script>
- <script src={% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.min.js" %}></script>
-
- <script type="text/javascript">
- $(document).ready(function () {
- $('#table').DataTable({
- scrollX: true,
- });
- });
- </script>
-{% endblock extrajs %}
diff --git a/tools/pharos-dashboard/src/templates/booking/booking_table.html b/tools/pharos-dashboard/src/templates/booking/booking_table.html
deleted file mode 100644
index 655b0131..00000000
--- a/tools/pharos-dashboard/src/templates/booking/booking_table.html
+++ /dev/null
@@ -1,37 +0,0 @@
-{% load jira_filters %}
-
-
-<thead>
-<tr>
- <th>User</th>
- <th>Purpose</th>
- <th>Start</th>
- <th>End</th>
- <th>Installer</th>
- <th>Scenario</th>
-</tr>
-</thead>
-<tbody>
-{% for booking in bookings %}
- <tr>
- <td>
- {{ booking.user.username }}
- </td>
- <td>
- {{ booking.purpose }}
- </td>
- <td>
- {{ booking.start }}
- </td>
- <td>
- {{ booking.end }}
- </td>
- <td>
- {{ booking.installer }}
- </td>
- <td>
- {{ booking.scenario }}
- </td>
- </tr>
-{% endfor %}
-</tbody> \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/dashboard/ci_pods.html b/tools/pharos-dashboard/src/templates/dashboard/ci_pods.html
deleted file mode 100644
index a20be957..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/ci_pods.html
+++ /dev/null
@@ -1,61 +0,0 @@
-{% extends "dashboard/table.html" %}
-{% load staticfiles %}
-{% load jenkins_filters %}
-
-{% block table %}
- <thead>
- <tr>
- <th>Name</th>
- <th>Slave Name</th>
- <th>Status</th>
- <th>Installer</th>
- <th>Scenario</th>
- <th>Branch</th>
- <th>Job</th>
- </tr>
- </thead>
- <tbody>
- {% for pod in ci_pods %}
- <tr>
- <td>
- <a target='_blank' href={{ pod.url }}>{{ pod.name }}</a>
- </td>
- <td>
- <a target='_blank' href={{ pod.slave.url }}>{{ pod.slave.name }}</a>
- </td>
- <td style="background-color:{{ pod.slave.status | jenkins_status_color }}">
- {{ pod.slave.status }}
- </td>
- <td {{ pod.slave.last_job_result | jenkins_job_blink }}>
- {{ pod.slave.last_job_installer }}
- </td>
- <td {{ pod.slave.last_job_result | jenkins_job_blink }}>
- {{ pod.slave.last_job_scenario }}
- </td>
- <td {{ pod.slave.last_job_result | jenkins_job_blink }}>
- {{ pod.slave.last_job_branch }}
- </td>
- <td><a {{ pod.slave.last_job_result | jenkins_job_blink }}
- style="color:{{ pod.slave.last_job_result | jenkins_job_color }}"
- target='_blank'
- href={{ pod.slave.last_job_url }}>{{ pod.slave.last_job_name }}</a>
- </td>
- </tr>
- {% endfor %}
- </tbody>
-{% endblock table %}
-
-
-{% block tablejs %}
- <script type="text/javascript">
- $(document).ready(function () {
- $('#table').DataTable({
- scrollX: true,
- columnDefs: [
- {type: 'status', targets: 2}
- ],
- "order": [[2, "asc"]]
- });
- });
- </script>
-{% endblock tablejs %}
diff --git a/tools/pharos-dashboard/src/templates/dashboard/dev_pods.html b/tools/pharos-dashboard/src/templates/dashboard/dev_pods.html
deleted file mode 100644
index a6f3b2ee..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/dev_pods.html
+++ /dev/null
@@ -1,70 +0,0 @@
-{% extends "dashboard/table.html" %}
-{% load staticfiles %}
-{% load jenkins_filters %}
-
-{% block table %}
- <thead>
- <tr>
- <th>Name</th>
- <th>Slave Name</th>
- <th>Booked by</th>
- <th>Booked until</th>
- <th>Purpose</th>
- <th>Utilization</th>
- <th>Status</th>
- <th></th>
- <th></th>
- </tr>
- </thead>
- <tbody>
- {% for pod, booking, utilization in dev_pods %}
- <tr>
- <td>
- <a href={% url 'dashboard:resource' resource_id=pod.id %}>{{ pod.name }}</a>
- </td>
- <td>
- <a target='_blank' href={{ pod.slave.url }}>{{ pod.slave.name }}</a>
- </td>
- <td>
- {{ booking.user.username }}
- </td>
- <td>
- {{ booking.end }}
- </td>
- <td>
- {{ booking.purpose }}
- </td>
- <td>
- {{ utilization }}
- </td>
- <td style="background-color:{{ pod.slave.status | jenkins_status_color }}">
- {{ pod.slave.status }}
- </td>
- <td>
- <a href="{% url 'booking:create' resource_id=pod.id %}" class="btn btn-primary">
- Book
- </a>
- </td>
- <td>
- <a href="{% url 'dashboard:resource' resource_id=pod.id %}" class="btn btn-primary">
- Info
- </a>
- </td>
- </tr>
- {% endfor %}
- </tbody>
-{% endblock table %}
-
-{% block tablejs %}
- <script type="text/javascript">
- $(document).ready(function () {
- $('#table').DataTable({
- scrollX: true,
- columnDefs: [
- {type: 'status', targets: 6}
- ],
- "order": [[6, "asc"]]
- });
- });
- </script>
-{% endblock tablejs %}
diff --git a/tools/pharos-dashboard/src/templates/dashboard/jenkins_slaves.html b/tools/pharos-dashboard/src/templates/dashboard/jenkins_slaves.html
deleted file mode 100644
index fa361b13..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/jenkins_slaves.html
+++ /dev/null
@@ -1,46 +0,0 @@
-{% extends "dashboard/table.html" %}
-{% load staticfiles %}
-
-{% load jenkins_filters %}
-
-{% block table %}
- <thead>
- <tr>
- <th>Slave name</th>
- <th>Status</th>
- <th>Job</th>
- </tr>
- </thead>
- <tbody>
- {% for slave in slaves %}
- <tr>
- <td><a target='_blank'
- href={{ slave.url }}>{{ slave.name }}</a>
- </td>
- <td style="background-color:{{ slave.status | jenkins_status_color }}">
- {{ slave.status }}
- </td>
- <td><a {{ slave.last_job_result | jenkins_job_blink }}
- style="color:{{ slave.last_job_result | jenkins_job_color }}"
- target="_blank" href={{ slave.last_job_url }}>
- {{ slave.last_job_name }}</a>
- </td>
- </tr>
- {% endfor %}
- </tbody>
-{% endblock table %}
-
-
-{% block tablejs %}
- <script type="text/javascript">
- $(document).ready(function () {
- $('#table').DataTable({
- scrollX: true,
- columnDefs: [
- {type: 'status', targets: 1}
- ],
- "order": [[1, "asc"]]
- });
- });
- </script>
-{% endblock tablejs %}
diff --git a/tools/pharos-dashboard/src/templates/dashboard/resource.html b/tools/pharos-dashboard/src/templates/dashboard/resource.html
deleted file mode 100644
index c9e57354..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/resource.html
+++ /dev/null
@@ -1,58 +0,0 @@
-{% extends "base.html" %}
-{% load staticfiles %}
-
-{% block extrahead %}
- <!-- Morris Charts CSS -->
- <link href="{% static "bower_components/morrisjs/morris.css" %}" rel="stylesheet">
-
- <!-- DataTables CSS -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <!-- DataTables Responsive CSS -->
- <link href="{% static "bower_components/datatables-responsive/css/dataTables.responsive.css" %}"
- rel="stylesheet">
-{% endblock extrahead %}
-
-
-{% block content %}
- {% include "dashboard/resource_detail.html" %}
-{% endblock content %}
-
-
-{% block extrajs %}
- <!-- DataTables JavaScript -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <script src={% static "bower_components/datatables/media/js/jquery.dataTables.min.js" %}></script>
- <script src={% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.min.js" %}></script>
-
-
-
- <!-- Flot Charts JavaScript -->
- <script src="{% static "bower_components/flot/excanvas.min.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.pie.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.resize.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.time.js" %}"></script>
- <script src="{% static "bower_components/flot.tooltip/js/jquery.flot.tooltip.min.js" %}"></script>
-
- <script src="{% static "js/flot-pie-chart.js" %}"></script>
-
- <script type="text/javascript">
- $(document).ready(function () {
- $('#{{ resource.id }}_server_table').DataTable({});
- $('#{{ resource.id }}_bookings_table').DataTable({});
- $('#{{ resource.id }}_vpn_user_table').DataTable({});
-
- var chart_id = "{{ resource.id }}_booking_utilization";
- var utilization_url = "{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=4 %}";
- loadChartData(chart_id, utilization_url);
-
- var chart_id = "{{ resource.id }}_jenkins_utilization";
- var utilization_url = "{% url 'dashboard:jenkins_utilization' resource_id=resource.id weeks=1 %}";
- loadChartData(chart_id, utilization_url);
- });
- </script>
-{% endblock extrajs %} \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/dashboard/resource_all.html b/tools/pharos-dashboard/src/templates/dashboard/resource_all.html
deleted file mode 100644
index a770d4e8..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/resource_all.html
+++ /dev/null
@@ -1,73 +0,0 @@
-{% extends "base.html" %}
-{% load staticfiles %}
-
-{% block extrahead %}
- <!-- Morris Charts CSS -->
- <link href="{% static "bower_components/morrisjs/morris.css" %}" rel="stylesheet">
-
- <!-- DataTables CSS -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <!-- DataTables Responsive CSS -->
- <link href="{% static "bower_components/datatables-responsive/css/dataTables.responsive.css" %}"
- rel="stylesheet">
-{% endblock extrahead %}
-
-
-{% block content %}
- {% for resource, utilization, bookings in pods %}
- <div class="row">
- <div class="col-lg-12">
- <div class="panel panel-default">
- <div class="panel-heading">
- {{ resource.name }}
- </div>
- <div class="panel-body">
- {% include "dashboard/resource_detail.html" %}
- </div>
- </div>
- </div>
- </div>
- {% endfor %}
-{% endblock content %}
-
-
-{% block extrajs %}
- <!-- DataTables JavaScript -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <script src={% static "bower_components/datatables/media/js/jquery.dataTables.min.js" %}></script>
- <script src={% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.min.js" %}></script>
-
-
-
- <!-- Flot Charts JavaScript -->
- <script src="{% static "bower_components/flot/excanvas.min.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.pie.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.resize.js" %}"></script>
- <script src="{% static "bower_components/flot/jquery.flot.time.js" %}"></script>
- <script src="{% static "bower_components/flot.tooltip/js/jquery.flot.tooltip.min.js" %}"></script>
- <script src="{% static "js/flot-pie-chart.js" %}"></script><
-
- <script type="text/javascript">
- $(document).ready(function () {
- {% for resource, utilization, bookings in pods %}
-
- $('#{{ resource.id }}_server_table').DataTable({});
- $('#{{ resource.id }}_bookings_table').DataTable({});
- $('#{{ resource.id }}_vpn_user_table').DataTable({});
-
- var chart_id = "{{ resource.id }}_booking_utilization";
- var utilization_url = "{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=4 %}";
- loadChartData(chart_id, utilization_url);
-
- var chart_id = "{{ resource.id }}_jenkins_utilization";
- var utilization_url = "{% url 'dashboard:jenkins_utilization' resource_id=resource.id weeks=1 %}";
- loadChartData(chart_id, utilization_url);
- {% endfor %}
- });
- </script>
-{% endblock extrajs %} \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/dashboard/resource_detail.html b/tools/pharos-dashboard/src/templates/dashboard/resource_detail.html
deleted file mode 100644
index 740dd259..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/resource_detail.html
+++ /dev/null
@@ -1,205 +0,0 @@
-{% load jenkins_filters %}
-
-<div class="row">
- <div class="col-lg-3">
- <div class="panel panel-default">
- <div class="panel-heading">
- Jenkins Utilization
- <div class="pull-right">
- <div class="form-group">
- <select onchange="loadChartData('{{ resource.id }}_jenkins_utilization', this.value);">
- <option value="{% url 'dashboard:jenkins_utilization' resource_id=resource.id weeks=1 %}">
- Last Week
- </option>
- <option value="{% url 'dashboard:jenkins_utilization' resource_id=resource.id weeks=4 %}">
- Last Month
- </option>
- </select>
- </div>
- </div>
- </div>
- <div class="panel-body">
- <div class="flot-chart">
- <div class="flot-chart-content"
- id="{{ resource.id }}_jenkins_utilization"></div>
- </div>
- </div>
- </div>
- </div>
-
- <div class="col-lg-9">
- <div class="panel panel-default">
- <div class="panel-heading">
- Status
- </div>
- <div class="panel-body">
- <div class="list-group pre-scrollable">
- {% for status in resource.resourcestatus_set.all %}
- <a href="#" class="list-group-item">
- <i class="fa fa-info fa-fw"></i> {{ status.title }}
- <span class="pull-right text-muted small">
- <em>{{ status.timestamp }}</em>
- </span>
- </a>
- {% endfor %}
- </div>
- </div>
- </div>
- </div>
- <div class="col-lg-9">
- <div class="panel panel-default">
- <div class="panel-heading">
- Servers
- </div>
- <div class="panel-body">
- <div class="dataTables_wrapper">
- <table class="table table-striped table-bordered table-hover"
- id="{{ resource.id }}_server_table" cellspacing="0"
- width="100%">
- {% include "dashboard/server_table.html" %}
- </table>
- </div>
- </div>
- </div>
- </div>
-</div>
-<div class="row">
- <div class="col-lg-3">
- <div class="panel panel-default">
- <div class="panel-heading">
- Booking Utilization
- <div class="pull-right">
- <div class="form-group">
- <select onchange="loadChartData('{{ resource.id }}_booking_utilization', this.value);">
- <option value="{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=-4 %}">
- Last Month
- </option>
- <option value="{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=-1 %}">
- Last Week
- </option>
- <option value="{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=1 %}">
- Next Week
- </option>
- <option selected="selected"
- value="{% url 'dashboard:booking_utilization' resource_id=resource.id weeks=4 %}">
- Next Month
- </option>
- </select>
- </div>
- </div>
- </div>
- <div class="panel-body">
- <div class="flot-chart">
- <div class="flot-chart-content"
- id="{{ resource.id }}_booking_utilization"></div>
- </div>
- </div>
- </div>
- </div>
- <div class="col-lg-9">
- <div class="panel panel-default">
- <div class="panel-heading">
- Bookings
- </div>
- <div class="panel-body">
- <div class="dataTables_wrapper">
- <table class="table table-striped table-bordered table-hover"
- id="{{ resource.id }}_bookings_table" cellspacing="0"
- width="100%">
- {% include "booking/booking_table.html" %}
- </table>
- </div>
- </div>
- </div>
- </div>
-</div>
-<div class="row">
- <div class="col-lg-3">
- <div class="panel panel-default">
- <div class="panel-heading">
- Contact
- </div>
- <div class="panel-body">
- <p>
- <b>Lab Owner: </b>
- {{ resource.owner.username }}
- </p>
- <p>
- <b>Email: </b>
- {{ resource.owner.email }}
- </p>
- <p>
- <a href="{% url 'booking:create' resource_id=resource.id %}" class="btn
- btn-primary">
- Booking
- </a>
- <a href="{{ resource.url }}" class="btn
- btn-primary">
- OPNFV Wiki
- </a>
- </p>
- </div>
- </div>
- </div>
- <div class="col-lg-3">
- <div class="panel panel-default">
- <div class="panel-heading">
- Jenkins Status
- </div>
- <div class="panel-body">
- <p>
- <b>Slave Name: </b>
- <a target='_blank'
- href={{ resource.slave.url }}>{{ resource.slave.name }}</a>
- </p>
- <p>
- <b>Status: </b>
- {{ resource.slave.status }}
- </p>
- <p>
- <b>Last Job: </b>
- <a href="{{ resource.slave.last_job_url }}">
- {{ resource.slave.last_job_name }}
- </a>
- </p>
- </div>
- </div>
- </div>
- <div class="col-lg-6">
- <div class="panel panel-default">
- <div class="panel-heading">
- VPN Users
- </div>
- <div class="panel-body">
- <div class="dataTables_wrapper">
- <table class="table table-striped table-bordered table-hover"
- id="{{ resource.id }}_vpn_user_table" cellspacing="0"
- width="100%">
- <thead>
- <tr>
- <th>User</th>
- <th>Email</th>
- <th>Company</th>
- </tr>
- </thead>
- <tbody>
- {% for user in resource.vpn_users.all %}
- <tr>
- <td>
- {{ user.username }}
- </td>
- <td>
- {{ user.email }}
- </td>
- <td>
- {{ user.userprofile.company }}
- </td>
- </tr>
- {% endfor %}
- </table>
- </tbody>
- </div>
- </div>
- </div>
- </div>
-</div>
diff --git a/tools/pharos-dashboard/src/templates/dashboard/server_table.html b/tools/pharos-dashboard/src/templates/dashboard/server_table.html
deleted file mode 100644
index f01bd603..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/server_table.html
+++ /dev/null
@@ -1,30 +0,0 @@
-<thead>
-<tr>
- <th>Server</th>
- <th>Model</th>
- <th>CPU</th>
- <th>RAM</th>
- <th>Storage</th>
-</tr>
-</thead>
-<tbody>
-{% for server in resource.server_set.all %}
- <tr>
- <td>
- {{ server.name }}
- </td>
- <td>
- {{ server.model }}
- </td>
- <td>
- {{ server.cpu }}
- </td>
- <td>
- {{ server.ram }}
- </td>
- <td>
- {{ server.storage }}
- </td>
- </tr>
-{% endfor %}
-</tbody> \ No newline at end of file
diff --git a/tools/pharos-dashboard/src/templates/dashboard/table.html b/tools/pharos-dashboard/src/templates/dashboard/table.html
deleted file mode 100644
index d59f0e34..00000000
--- a/tools/pharos-dashboard/src/templates/dashboard/table.html
+++ /dev/null
@@ -1,43 +0,0 @@
-{% extends "base.html" %}
-{% load staticfiles %}
-
-{% block extrahead %}
- <!-- DataTables CSS -->
- <link href="{% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css" %}"
- rel="stylesheet">
-
- <!-- DataTables Responsive CSS -->
- <link href="{% static "bower_components/datatables-responsive/css/dataTables.responsive.css" %}" rel="stylesheet">
-{% endblock extrahead %}
-
-{% block content %}
- <div class="row">
- <div class="col-lg-12">
- <div class="dataTables_wrapper">
- <table class="table table-striped table-bordered table-hover" id="table" cellspacing="0"
- width="100%">
-
- {% block table %}
- {% endblock table %}
-
- </table>
- </div>
- <!-- /.table-responsive -->
- <!-- /.panel-body -->
- <!-- /.panel -->
- </div>
- <!-- /.col-lg-12 -->
- </div>
-{% endblock content %}
-
-{% block extrajs %}
- <!-- DataTables JavaScript -->
-
- <script src={% static "bower_components/datatables/media/js/jquery.dataTables.min.js" %}></script>
- <script src={% static "bower_components/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.min.js" %}></script>
-
- <script src={% static "js/dataTables-sort.js" %}></script>
-
- {% block tablejs %}
- {% endblock tablejs %}
-{% endblock extrajs %}
diff --git a/tools/pharos-dashboard/src/templates/layout.html b/tools/pharos-dashboard/src/templates/layout.html
deleted file mode 100644
index 9578e155..00000000
--- a/tools/pharos-dashboard/src/templates/layout.html
+++ /dev/null
@@ -1,73 +0,0 @@
-{% load staticfiles %}
-<!DOCTYPE html>
-<html lang="en">
-
-<head>
-
- <meta charset="utf-8">
- <meta http-equiv="X-UA-Compatible" content="IE=edge">
- <meta name="viewport" content="width=device-width, initial-scale=1">
- <meta name="description" content="">
- <meta name="author" content="">
-
- <title>OPNFV Pharos {{ title }}</title>
-
- <!-- Bootstrap Core CSS -->
- <link href="{% static "bower_components/bootstrap/dist/css/bootstrap.min.css" %}"
- rel="stylesheet">
-
- <!-- MetisMenu CSS -->
- <link href="{% static "bower_components/metisMenu/dist/metisMenu.min.css" %}" rel="stylesheet">
-
- <!-- Custom CSS -->
- <link href="{% static "bower_components/startbootstrap-sb-admin-2-blackrockdigital/dist/css/sb-admin-2.min.css" %}"
- rel="stylesheet">
- <link href="{% static "css/theme.css" %}" rel="stylesheet">
-
- <!-- Custom Fonts -->
- <link href="{% static "bower_components/font-awesome/css/font-awesome.min.css" %}"
- rel="stylesheet" type="text/css">
-
- <!-- Favicon -->
- <link rel="shortcut icon" href="{% static 'favicon.ico' %}">
-
- {% block extrahead %}
- {% endblock extrahead %}
-
- <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
- <!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
- <!--[if lt IE 9]>
- <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
- <script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script>
- <![endif]-->
-
-</head>
-
-{% block extrastyle %}
-{% endblock extrastyle %}
-
-<body>
-{% block basecontent %}
-{% endblock basecontent %}
-
-
-<script src="https://code.jquery.com/jquery-2.2.4.min.js"
- integrity="sha256-BbhdlvQf/xTY9gja0Dq3HiwQF8LaCRTXxZKRutelT44=" crossorigin="anonymous"></script>
-{#<!-- jQuery -->#}
-{#<script src="{% static "bower_components/jquery/dist/jquery.min.js" %}"></script>#}
-{#<script src="{% static "bower_components/jquery-migrate/jquery-migrate.min.js" %}"></script>#}
-
-{#<script src="https://code.jquery.com/jquery-2.2.0.min.js"></script>#}
-<!-- Bootstrap Core JavaScript -->
-<script src="{% static "bower_components/bootstrap/dist/js/bootstrap.min.js" %}"></script>
-
-<!-- Metis Menu Plugin JavaScript -->
-<script src="{% static "bower_components/metisMenu/dist/metisMenu.min.js" %}"></script>
-
-<!-- Custom Theme JavaScript -->
-<script src="{% static "bower_components/startbootstrap-sb-admin-2-blackrockdigital/dist/js/sb-admin-2.min.js" %}"></script>
-
-{% block extrajs %}
-{% endblock extrajs %}
-</body>
-</html>
diff --git a/tools/pharos-dashboard/src/templates/rest_framework/api.html b/tools/pharos-dashboard/src/templates/rest_framework/api.html
deleted file mode 100644
index 9c6c4f7d..00000000
--- a/tools/pharos-dashboard/src/templates/rest_framework/api.html
+++ /dev/null
@@ -1,9 +0,0 @@
-{% extends "rest_framework/base.html" %}
-
-{% block title %}Pharos Dashboard API{% endblock %}
-
-{% block branding %}
- <a class='navbar-brand' rel="nofollow" href=#>
- Pharos Dashboard API
- </a>
-{% endblock %} \ No newline at end of file
diff --git a/tools/pharos-dashboard/web/Dockerfile b/tools/pharos-dashboard/web/Dockerfile
deleted file mode 100644
index 228b0b0f..00000000
--- a/tools/pharos-dashboard/web/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM python:3.5
-ENV PYTHONUNBUFFERED 1
-RUN mkdir /config
-ADD ./requirements.txt /config/
-RUN pip install -r /config/requirements.txt
-RUN mkdir -p /pharos_dashboard/src
-WORKDIR /pharos_dashboard/src
diff --git a/tools/pharos-dashboard/web/requirements.txt b/tools/pharos-dashboard/web/requirements.txt
deleted file mode 100644
index f80f1c07..00000000
--- a/tools/pharos-dashboard/web/requirements.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-celery==3.1.23
-cryptography==1.4
-Django==1.10
-django-bootstrap3==7.0.1
-django-crispy-forms==1.6.0
-django-filter==0.14.0
-django-registration==2.1.2
-djangorestframework==3.4.6
-gunicorn==19.6.0
-jira==1.0.7
-jsonpickle==0.9.3
-oauth2==1.9.0.post1
-oauthlib==1.1.2
-pika==0.10.0
-psycopg2==2.6.2
-PyJWT==1.4.2
-requests==2.11.0
diff --git a/tools/pharos-dashboard/worker/Dockerfile b/tools/pharos-dashboard/worker/Dockerfile
deleted file mode 100644
index c1e8aff7..00000000
--- a/tools/pharos-dashboard/worker/Dockerfile
+++ /dev/null
@@ -1,8 +0,0 @@
-FROM python:3.5
-ENV PYTHONUNBUFFERED 1
-RUN mkdir /config
-ADD ./requirements.txt /config/
-RUN pip install -r /config/requirements.txt
-RUN useradd -ms /bin/bash celery
-USER celery
-WORKDIR /pharos_dashboard/src
diff --git a/tools/pharos-dashboard/worker/requirements.txt b/tools/pharos-dashboard/worker/requirements.txt
deleted file mode 100644
index f80f1c07..00000000
--- a/tools/pharos-dashboard/worker/requirements.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-celery==3.1.23
-cryptography==1.4
-Django==1.10
-django-bootstrap3==7.0.1
-django-crispy-forms==1.6.0
-django-filter==0.14.0
-django-registration==2.1.2
-djangorestframework==3.4.6
-gunicorn==19.6.0
-jira==1.0.7
-jsonpickle==0.9.3
-oauth2==1.9.0.post1
-oauthlib==1.1.2
-pika==0.10.0
-psycopg2==2.6.2
-PyJWT==1.4.2
-requests==2.11.0
diff --git a/tools/pharos-validator/.gitignore b/tools/pharos-validator/.gitignore
deleted file mode 100644
index 3d553289..00000000
--- a/tools/pharos-validator/.gitignore
+++ /dev/null
@@ -1,63 +0,0 @@
-# Other gitignore files may be used in each directory under /src
-
-# Compiled #
-############
-*.pyc
-*__pycache__
-*.class
-*.dll
-*.exe
-*.o
-*.so
-
-# OS generated files #
-######################
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.db
-
-# OS generated files #
-######################
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.db
-
-# Archive/Image files #
-###################
-.7z
-*.dmg
-*.gz
-*.iso
-*.jar
-*.rar
-*.tar
-*.zip
-*.pkg
-*.img
-*.iso
-*.cpio
-
-# logs and databases #
-######################
-*.aux
-*.log
-*.sql
-*.sqlite
-
-# OS generated files #
-######################
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.db
diff --git a/tools/pharos-validator/LICENSE b/tools/pharos-validator/LICENSE
deleted file mode 100644
index 7a7c11af..00000000
--- a/tools/pharos-validator/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2016 Todd Gaunt and others
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/tools/pharos-validator/Makefile b/tools/pharos-validator/Makefile
deleted file mode 100644
index 55695020..00000000
--- a/tools/pharos-validator/Makefile
+++ /dev/null
@@ -1,56 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-include config.mk
-
-# Variables passed down to subfolder make commands
-export DESTDIR
-export PREFIX
-
-SHELL=/bin/sh
-
-# Source paths
-SRCDIR="src/"
-PXE_INITRD=${SRCDIR}/pxe_initrd/
-VALIDATION_TOOL=${SRCDIR}/validation_tool/
-
-# makefile names
-PXE_MAKE=pxe_initrd.mk
-
-
-all: validation_tool pxe_initrd
-
-##########################################
-# The following forces rebuilding
-##########################################
-.PHONY: validation_tool ${VALIDATION_TOOL}
-validation_tool: ${VALIDATION_TOOL}
-
-${VALIDATION_TOOL}:
- pushd $@ && ./setup.py build && popd
-
-##########################################
-# The following forces rebuilding
-##########################################
-.PHONY: pxe_initrd ${PXE_INITRD}
-pxe_initrd: ${PXE_INITRD}
-
-${PXE_INITRD}:
- ${MAKE} -C $@ -f ${PXE_MAKE} all
-##########################################
-
-install: all
- # Install the programs to the passed in PREFIX dir
- ${MAKE} -C ${PXE_INITRD} -f ${PXE_MAKE} install
- pushd ${VALIDATION_TOOL} && ./setup.py install --prefix=${DESTDIR}/${PREFIX} && popd
-
-.PHONY: clean
-clean:
- ${MAKE} -C ${PXE_INITRD} -f ${PXE_MAKE} clean
- pushd ${VALIDATION_TOOL} && ./setup.py clean --all && popd
diff --git a/tools/pharos-validator/config.mk b/tools/pharos-validator/config.mk
deleted file mode 100644
index ccb32b15..00000000
--- a/tools/pharos-validator/config.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-# Customize below to fit your system
-
-# paths
-PREFIX=/usr/local/
-MANPREFIX=${PREFIX}/share/man
diff --git a/tools/pharos-validator/docs/howto/virt-manager/HOWTO b/tools/pharos-validator/docs/howto/virt-manager/HOWTO
deleted file mode 100644
index bed105a7..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/HOWTO
+++ /dev/null
@@ -1,50 +0,0 @@
-Syntax guide:
- 1. [[ denotes commands / code ]]
- 2. <> denotes bullet, sub-bullets will have an extra > appended depending on their sub-level
- 3. ${denotes variables the the user is expected to fill out depending on their specific needs}
-
-Tutorials:
- 1. Configure host machine for virtualization
- 2. Make a Virtual Machine with storage
- 3. Make a blank virtual machine awaiting PXE
- 4. Install and save default VM image
- 5. Configure Networking with VMs
-;
-
-1 --
- <> Install Host OS (CentOS7)
- <> Use package manager (yum) to install qemu, kvm, virt-install, and virt-manager)
- <> Edit kernel commandline option with "kvm-intel.nested=1" and or edit /etc/modprobe.d/kvm-intel.conf to contain "options kvm-intel nested=1". This will allow for nested performance to not be absolutely terrible and slow.
- <>> A command to do this is [[ echo "options kvm-intel nested=1" | sudo tee /etc/modprobe.d/kvm-intel.conf ]]
- <>
-
-2 --
- <> Creating new disks uses the command [[ qemu-img create -f raw ${image-name}.img ${size} ]], where image-name is the name of your vm's disk, and size is the size of the disk you want (e.g. 2G will be a 2 Gigabyte disk, 512M will be 512 Megabytes)
- <> Download some installation media (e.g. CentOS7-DVD.iso)
- <> Using those disks utilizing the virt-install tool [[ virt-install -n name_of_vm --graphics none --vcpus=2 -l /path/to/installation.iso --ram=512 --disk path=/path/to/disk.img,cache=none --extra-args="console=ttyS0" ]] don't use --extra-args="console=ttyS0" if you would rather have the VM use the X-display instead of a serial console.
- <>
-
-3 --
- <> TODO
-
-4 --
- <> Either script the install or make a template of the VM
-
-5 --
- <> [[ virsh attach-interface --domain ${name} --type network --source default --model virtio --mac ${mac-address} --config --live ]] Where name is the name of the virtual machine that virsh knows of, and ${mac-address} is any randomly generated MAC address.
- <> Each node will require at least 3 variants of the above command to have 3 more NICs in addition to the one a virtual machine has by default as the pharos specification requires.
- <> You can verify the addition of the above NICs with [[ virsh domiflist ${name} ]] where ${name} is the virtual machine you would like to see the NICs of.
- <> These NICs may be detached with the command [[ virsh detach-interface --domain ${name} --type network --mac ${mac-address} --config ]] where ${name} is the vm you're targeting and ${mac-address} is the NIC's specific MAC address.
-
-0 6 --
- <> Add a virtual interface to a bridge by editing the configuration file that qemu generates.
- <> Change the line from this:
- <interface type='network'>
- <mac address='00:11:22:33:44:55'/>
- <source network='default'/>
- </interface>
- <> To this:
- <interface type='bridge'>
- <mac address='00:11:22:33:44:55'/>
- <source bridge='br0'/>
- </interface>
diff --git a/tools/pharos-validator/docs/howto/virt-manager/bridgevm.sh b/tools/pharos-validator/docs/howto/virt-manager/bridgevm.sh
deleted file mode 100755
index 370132b5..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/bridgevm.sh
+++ /dev/null
@@ -1 +0,0 @@
-sudo /usr/libexec/qemu-kvm -hda /vm/template/jump-host.img -device e1000,netdev=net0,mac=DE:AD:BE:EF:FE:7A -netdev tap,id=net0
diff --git a/tools/pharos-validator/docs/howto/virt-manager/genmac.sh b/tools/pharos-validator/docs/howto/virt-manager/genmac.sh
deleted file mode 100755
index 10b12f92..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/genmac.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-# generate a random mac address for the qemu nic
-printf 'DE:AD:BE:EF:%02X:%02X\n' $((RANDOM%256)) $((RANDOM%256))
diff --git a/tools/pharos-validator/docs/howto/virt-manager/jump-server.sh b/tools/pharos-validator/docs/howto/virt-manager/jump-server.sh
deleted file mode 100755
index 465ea132..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/jump-server.sh
+++ /dev/null
@@ -1 +0,0 @@
-sudo virt-install -n jump-host-centos7_0 --graphics none --vcpus=2 --ram=512 --os-type=linux -l /iso/CentOS-7-x86_64-DVD-1511.iso --disk path=/vm/template/jump-host.img,cache=none --extra-args console=ttyS0
diff --git a/tools/pharos-validator/docs/howto/virt-manager/node-cycle.sh b/tools/pharos-validator/docs/howto/virt-manager/node-cycle.sh
deleted file mode 100755
index 5b945dc7..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/node-cycle.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-for i in range $(seq 1 5);do
- qemu-kvm -m 512M -boot n -enable-kvm -net nic -net user,tftp=/srv/tftp/,bootfile=/pxelinux.0 &
-done
diff --git a/tools/pharos-validator/docs/howto/virt-manager/virsh-commands.txt b/tools/pharos-validator/docs/howto/virt-manager/virsh-commands.txt
deleted file mode 100644
index 45f81856..00000000
--- a/tools/pharos-validator/docs/howto/virt-manager/virsh-commands.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-# Installing an OS on a new VM
-virt-install -n jump-host-centos7 --graphics none --vcpus=2 --ram=512 --os-type=linux -l /iso/CentOS-7-x86_64-DVD-1511.iso --disk path=/vm/template/jump-host.img,cache=none --extra-args="console=ttyS0"
-
-# PXE booting a new vm
-virt-install --name jump-host-centos7 --graphics none --vcpus 2 --ram=512 --os-type=linux --os-variant=centos7 --network=bridge:"network_birdge_name" --pxe
-
-# Unused option for pxe
-#--disk path=/vm/template/jump-host.img,cache=none
-
-# Can't delete a VM? Here are some troubleshooting options
-Remember to login as root if needing to destroy virtual machines created by root
-
-# Command to add network interfaces to VM guest
-attach-interface jump-host-centos7_0 --type network --source default --model virtio --mac DE:AD:BE:EF:B4:EF --config --live
diff --git a/tools/pharos-validator/docs/initial_proposal.txt b/tools/pharos-validator/docs/initial_proposal.txt
deleted file mode 100644
index c607ccb3..00000000
--- a/tools/pharos-validator/docs/initial_proposal.txt
+++ /dev/null
@@ -1,49 +0,0 @@
-##OPNFV - Pharos Qualification Tool Project Proposal
-
-Todd Gaunt
-
-May 20, 2016
-
-##Summary
-This proposal is for the project regarding developing a Pharos qualification tool over the course
-of 3 months, based in the requirements listed within the OPNFV Wiki (https://wiki.opnfv.org/display/DEV/Intern+Project%3A+Pharos+Qualification+Tool). I believe I am well suited for the job, as accomplishing the goal of developing a tool to probe for data on a machine is in line with my skill set. I work on linux-boxes daily, and have a good understanding of automating predictable and repeatable tasks such as probing for information and deploying the software to do the job. The tool for testing a
-POD to see if it meets the requirements for the Pharos specification could be a simple command-line suite of scripts written in a simple, ubiquitous language like sh, bash, or python and then be deployed onto a server using a container such as Docker, or even go with a simpler model of a tar.gz package using a tool like GNU stow to temporarily ”install” the package with symbolic links. Additional information for the validation of the pod, per the requirements list is included in the Proposed
-Design section below.
-
-##Proposed Design
-###Deployment of Test Tools to the POD
-Utilize a container solution such as Docker with a small base linux image of either alpine linux (5mb base image, 52mb with python3) or centOS 7 (The native system according to the pharos specification which would be around 197mb base image 334.5mb with python3) to allow for easy installation of the tool without pulling in any external dependencies. This will allow for ease of administration by allowing administrators to not worry about dependency resolution or having a proper package version installed. The base image will be built with a python3 interpreter and it’s dependencies to run the qualification tool. If a docker image approach is not feasible for some reason, a traditional package format such as rpm can be used.
-
-###Qualification of POD Resources
-Load the system configuration/Inventory files the pharos spec states the machines should have. Each machine should have an easily machine-readable file available with system configuration information. The tool will find as much hardware information as possible utilizing the linux filesystem and tools, and compare and or fallback to the inventory file.
-
-####Required Compute
-Poll /proc/cpuinfo for cpu specifications, Intel Xeon E5-2600v2 series or equivalent in processing power (Intel brand not a requirement):
-64-bit
-4 cores
-1.8ghz (maybe a softer requirement here like 1.5ghz or less as different architectures can perform equivalently at different speeds).
-
-####Required RAM
-Poll /proc/meminfo for memory specifications
-- 32G RAM Minimum, anything less than that and the test will fail.
-- ECC memory nice to have but not required
-
-####Required Software
-The jump host requires centos7 and the opnfv/openstack virtualized inside of it so an initial connection can be established and further connections can be made to the nodes within the pod.
-
-####Required Storage
-Poll /sys/block/ for all block devices and their size. Properties of the storage can be discovered here as well. Eg. disk size would be in /sys/block/sdX/size, disk type would be in /sys/block/sdX/queue/rotational. The passing metric is the minimum requirements as defined here: http://artifacts.opnfv.org/pharos/docs/specification/hardwarespec.html
-- Disks: 2 x 1TB HDD + 1 x 100GB SSD (or greater capacity)
-- The first HDD should be used for OS & additional software/tool installation
-- The second HDD is configured for CEPH object storage
-- The SSD should be used as the CEPH journal
-- Performance testing requires a mix of compute nodes with CEPH (Swift+Cinder) and without CEPH storage
-- Virtual ISO boot capabilities or a separate PXE boot server (DHCP/tftp or Cobbler)
-
-####Required Network Connectivity
-Use a tool such as ifconfig/iproute2 to find network interfaces and attempt to connect back to the jump host/ other nodes. Check for valid gpg key certified by someone with administrative access to LF infrastructure for VPNs. This VPN information is laid out in the inventory file provided by the pod.
-
-####Inventory of System(s) via IPMI
-Utilize IPMI interfaces to manage the rebooting of POD’s the tool connects to. If available perhaps use this as a way to find system information/configuration needed for the report.
-Definition of Results (pass/fail evaluation)
-Utilize the Jenkins build automation server to build and deploy the test-tool and receive results. If the test fails describe at which step and why it failed.
diff --git a/tools/pharos-validator/rpm/buildasroot.sh b/tools/pharos-validator/rpm/buildasroot.sh
deleted file mode 100755
index 94076c26..00000000
--- a/tools/pharos-validator/rpm/buildasroot.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env bash
-# Builds the pharosvalidator package as root (it must be built as root)
-# then copies the finished package to /tmp. The script can either be called
-# as root or with sudo
-
-tarball=pharos-validator-1.tar.gz
-specfile=pharosvalidator.spec
-arch=x86_64
-
-if [ "$(id -u)" != 0 ]; then
- echo "This script must be run as root (or with sudo)"
- exit 1
-fi;
-
-rpmdev-setuptree
-
-cp "$specfile" ~/rpmbuild/SPECS/
-
-# Prepare the "source package" for the rpm build process using .spec file
-pushd ../../
-tar -cvf "$tarball" pharos-validator
-cp "$tarball" ~/rpmbuild/SOURCES
-popd
-
-
-cd ~/rpmbuild/
-
-rpmlint SPECS/"$specfile"
-printf "\n\nRPM BUILD PROCESS\n\n"
-rpmbuild -ba SPECS/"$specfile"
-
-cp -rfv RPMS/"$arch"/* /tmp/
diff --git a/tools/pharos-validator/rpm/installpython3.sh b/tools/pharos-validator/rpm/installpython3.sh
deleted file mode 100755
index 8f2d8334..00000000
--- a/tools/pharos-validator/rpm/installpython3.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python3
-# This script is to be handed out (possibly find a different python3 source) so that server provisioners may install python3 in centOS7
-
-sudo yum install -y https://centos7.iuscommunity.org/ius-release.rpm
-sudo yum install -y python35u python35u-pip
diff --git a/tools/pharos-validator/src/pxe_initrd/.gitignore b/tools/pharos-validator/src/pxe_initrd/.gitignore
deleted file mode 100644
index b1ec7a83..00000000
--- a/tools/pharos-validator/src/pxe_initrd/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-tests/*.xml
-/tmp
-/initrd-build/
-/Linux*
-/linux*
diff --git a/tools/pharos-validator/src/pxe_initrd/LICENSE-3RD-PARTY.txt b/tools/pharos-validator/src/pxe_initrd/LICENSE-3RD-PARTY.txt
deleted file mode 100644
index abd1a5ae..00000000
--- a/tools/pharos-validator/src/pxe_initrd/LICENSE-3RD-PARTY.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This module of the tool uses Alpine linux and various tools included with it.
-For any questions regarding licenses please refer to https://alpinelinux.org/.
diff --git a/tools/pharos-validator/src/pxe_initrd/MIRRORS.txt b/tools/pharos-validator/src/pxe_initrd/MIRRORS.txt
deleted file mode 100644
index 96d6969d..00000000
--- a/tools/pharos-validator/src/pxe_initrd/MIRRORS.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-http://dl-cdn.alpinelinux.org/alpine/
-http://nl.alpinelinux.org/alpine/
-http://dl-2.alpinelinux.org/alpine/
-http://dl-3.alpinelinux.org/alpine/
-http://dl-4.alpinelinux.org/alpine/
-http://dl-5.alpinelinux.org/alpine/
-http://dl-6.alpinelinux.org/alpine/
-http://dl-8.alpinelinux.org/alpine/
-http://distrib-coffee.ipsl.jussieu.fr/pub/linux/alpine/alpine/
-http://mirror.yandex.ru/mirrors/alpine/
-http://mirrors.gigenet.com/alpinelinux/
-http://repos.lax-noc.com/alpine/
-http://repos.dfw.lax-noc.com/alpine/
-http://repos.mia.lax-noc.com/alpine/
-http://mirror1.hs-esslingen.de/pub/Mirrors/alpine/
-http://liskamm.alpinelinux.uk/
-http://mirrors.2f30.org/alpine/
-http://mirror.leaseweb.com/alpine/
-http://repository.fit.cvut.cz/mirrors/alpine/
-http://alpine.mirror.far.fi/
-http://lasca.ic.unicamp.br/pub/alpine/
-http://alpinelinux.c3sl.ufpr.br/
diff --git a/tools/pharos-validator/src/pxe_initrd/config.mk b/tools/pharos-validator/src/pxe_initrd/config.mk
deleted file mode 100644
index 7ff47b83..00000000
--- a/tools/pharos-validator/src/pxe_initrd/config.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-# pxe_initrd version
-VERSION=1
-
-# Customize below to fit your system
-
-# paths
-PREFIX=/usr/
-MANPREFIX=${PREFIX}/share/man
-TFTPPATH=/var/lib/tftpboot
-DESTDIR=/
-
-# See MIRRORS.txt for available mirrors as of 2016-06-21, a new version of this file can be found at http://nl.alpinelinux.org/alpine/MIRRORS.txt
-mirror="http://dl-cdn.alpinelinux.org/alpine/"
-
-# For latest bleeding edge software
-version="2.6.7-r1"
-branch="edge"
-
-# For a stable versioned release
-#version="2.6.7-r0"
-#branch="latest-stable"
-
diff --git a/tools/pharos-validator/src/pxe_initrd/etc/boot.msg b/tools/pharos-validator/src/pxe_initrd/etc/boot.msg
deleted file mode 100755
index 018dc7f3..00000000
--- a/tools/pharos-validator/src/pxe_initrd/etc/boot.msg
+++ /dev/null
@@ -1,2 +0,0 @@
-Pharos-test node image
--> test-tool (DEFAULT)
diff --git a/tools/pharos-validator/src/pxe_initrd/etc/ldlinux.c32 b/tools/pharos-validator/src/pxe_initrd/etc/ldlinux.c32
deleted file mode 100755
index f9acb516..00000000
--- a/tools/pharos-validator/src/pxe_initrd/etc/ldlinux.c32
+++ /dev/null
Binary files differ
diff --git a/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.0 b/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.0
deleted file mode 100755
index 61a38889..00000000
--- a/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.0
+++ /dev/null
Binary files differ
diff --git a/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.cfg/default b/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.cfg/default
deleted file mode 100755
index 06d3fdd2..00000000
--- a/tools/pharos-validator/src/pxe_initrd/etc/pxelinux.cfg/default
+++ /dev/null
@@ -1,15 +0,0 @@
-prompt 0
-display boot.msg
-F1 boot.msg
-timeout 20
-ontimeout test-tool
-
-DEFAULT test-tool
-
-LABEL test-tool
- MENU LABEL test-tool 64 bit (pharos-livecd)
- KERNEL vmlinuz
- INITRD initrd.gz
-
-LABEL Local-Boot
- localboot 0
diff --git a/tools/pharos-validator/src/pxe_initrd/etc/vmlinuz b/tools/pharos-validator/src/pxe_initrd/etc/vmlinuz
deleted file mode 100755
index c6e3e6d0..00000000
--- a/tools/pharos-validator/src/pxe_initrd/etc/vmlinuz
+++ /dev/null
Binary files differ
diff --git a/tools/pharos-validator/src/pxe_initrd/pxe_initrd.mk b/tools/pharos-validator/src/pxe_initrd/pxe_initrd.mk
deleted file mode 100644
index 6e8a1e81..00000000
--- a/tools/pharos-validator/src/pxe_initrd/pxe_initrd.mk
+++ /dev/null
@@ -1,79 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-include config.mk
-
-# Don't customize these
-CHROOT=initrd-build
-DATADIR=src
-ETC=etc
-
-# Perform all but install commands
-all: initrd.gz
-
-ready-source:
- mkdir -p tmp
- pushd tmp/ && wget -N \
- "${mirror}/${branch}/main/x86_64/apk-tools-static-${version}.apk" \
- && tar -xzf apk-tools-static-*.apk && popd
-
-initrd.gz: ready-source
- # Run the alpine installer
- ./tmp/sbin/apk.static -X "${mirror}"/"${branch}"/main \
- -U --allow-untrusted --root "${CHROOT}" --initdb add alpine-base
- # Virtual devices for /dev
- if ! [ -a "${CHROOT}"/dev/ram0 ];then mknod -m 666 "${CHROOT}"/dev/ram0 b 1 1 ;fi
- if ! [ -a "${CHROOT}"/dev/zero ];then mknod -m 666 "${CHROOT}"/dev/zero c 1 5 ;fi
- if ! [ -a "${CHROOT}"/dev/full ];then mknod -m 666 "${CHROOT}"/dev/full c 1 7 ;fi
- if ! [ -a "${CHROOT}"/dev/random ];then mknod -m 666 "${CHROOT}"/dev/random c 1 8 ;fi
- if ! [ -a "${CHROOT}"/dev/urandom ];then mknod -m 644 "${CHROOT}"/dev/urandom c 1 9 ;fi
- if ! [ -a "${CHROOT}"/dev/tty1 ];then mknod -m 666 "${CHROOT}"/dev/tty1 c 4 1 ;fi
- if ! [ -a "${CHROOT}"/dev/tty2 ];then mknod -m 666 "${CHROOT}"/dev/tty2 c 4 2 ;fi
- if ! [ -a "${CHROOT}"/dev/tty3 ];then mknod -m 666 "${CHROOT}"/dev/tty3 c 4 3 ;fi
- if ! [ -a "${CHROOT}"/dev/tty4 ];then mknod -m 666 "${CHROOT}"/dev/tty4 c 4 4 ;fi
- if ! [ -a "${CHROOT}"/dev/tty5 ];then mknod -m 666 "${CHROOT}"/dev/tty5 c 4 5 ;fi
- if ! [ -a "${CHROOT}"/dev/tty6 ];then mknod -m 666 "${CHROOT}"/dev/tty6 c 4 6 ;fi
- if ! [ -a "${CHROOT}"/dev/tty ];then mknod -m 666 "${CHROOT}"/dev/tty c 5 0 ;fi
- if ! [ -a "${CHROOT}"/dev/console ];then mknod -m 666 "${CHROOT}"/dev/console c 5 1 ;fi
- if ! [ -a "${CHROOT}"/dev/ptmx ];then mknod -m 666 "${CHROOT}"/dev/ptmx c 5 2 ;fi
- # link /usr/bin to /bin for package installation purposespurposes
- chroot "${CHROOT}" /bin/ln -sf /bin /usr/bin
- # Get the latest alpine mirror
- mkdir -p "${CHROOT}/etc/apk"
- echo "${mirror}/${branch}/main" > "${CHROOT}/etc/apk/repositories"
- ######################################
- # Update all packages and custom files
- ######################################
- mkdir -p "${CHROOT}/usr/src/"
- # Copy over custom scripts/files
- cp -rfv "../validation_tool/" "${CHROOT}/usr/src/"
- echo "${DATADIR}/"
- cp -rfv "${DATADIR}"/* "${CHROOT}/"
- # Run a script to update all packages
- chroot "${CHROOT}" /bin/update_pkgs.sh
- # Installs the validation tool into the chroot directory
- chroot "${CHROOT}" /bin/install_validation_tool.sh
- # Enables required services with initrd's service manager
- chroot "${CHROOT}" /bin/enable_services.sh
- ######################################
- # Create the initrd.gz
- ######################################
- cd "${CHROOT}" && find . | cpio -o -H newc | gzip > ../initrd.gz
-
-install: all
- mkdir -p "${DESTDIR}"/"${TFTPPATH}"
- cp -rf "${ETC}"/* "${DESTDIR}"/"${TFTPPATH}"/
- cp -rf initrd.gz "${DESTDIR}"/"${TFTPPATH}"/
-
-.PHONY: clean
-clean:
- rm -f initrd.gz
- rm -rf "${CHROOT}"
- rm -f apk-tools-static-*.apk
- rm -rf tmp/
diff --git a/tools/pharos-validator/src/pxe_initrd/src/bin/enable_services.sh b/tools/pharos-validator/src/pxe_initrd/src/bin/enable_services.sh
deleted file mode 100755
index f2560af5..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/bin/enable_services.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-
-# Source profile for $PATH just in case it wasn't auto-loaded
-source /etc/profile
-
-rc-update add mdev sysinit
-rc-update add devfs sysinit
-rc-update add dmesg sysinit
-rc-update add hostname sysinit
-rc-update add sysctl sysinit
-rc-update add syslog sysinit
-rc-update add initialnetwork sysinit
-#rc-update add networking sysinit
-#rc-update add bootmisc sysinit
-#rc-update add hwclock sysinit
-
-rc-update add mount-ro shutdown
-rc-update add killprocs shutdown
-rc-update add savecache shutdown
-
-rc-update add sshd default
diff --git a/tools/pharos-validator/src/pxe_initrd/src/bin/initial_network.py b/tools/pharos-validator/src/pxe_initrd/src/bin/initial_network.py
deleted file mode 100755
index 6c98f6fe..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/bin/initial_network.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-# bin/setup_interface
-
-# -----------------------------------------------------------------------
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-
-# http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# ------------------------------------------------------------------------
-
-# Author: Todd Gaunt, toddgaunt@iol.unh.edu or toddgaunt@gmail.com
-# License: Apache v2.0
-# Description: Script for setting up initial network interfaces
-# it activates dhcp on all interfaces in order to at least get the admin
-# network up
-
-import os
-import subprocess
-import netifaces
-
-def generate_interfaces_file(ifaces, os_network_file):
- """Takes a list of interfaces and a location to save a network
- interfaces file"""
- interfaces = ""
- for i in ifaces:
- n = "auto " + str(i) + "\n" \
- + "iface " + str(i) + " inet dhcp\n"
- interfaces += n
- return interfaces
-
-def set_interfaces_up(ifaces):
- """Uses ifup command to put network devices up according to
- interfaces file"""
- for iface in ifaces:
- ifupcmd = [ \
- "ifup",
- iface]
- ifdowncmd = [ \
- "ifdown",
- iface]
- with open(os.devnull, 'w') as fn:
- status = subprocess.Popen(ifdowncmd, stdout=fn, stderr=fn).wait()
- status = subprocess.Popen(ifupcmd, stdout=fn, stderr=fn).wait()
- print(str(iface) + " " + str(status))
-
-def main():
- os_network_file="/etc/network/interfaces"
- ifaces = netifaces.interfaces()
- interfaces = generate_interfaces_file(ifaces, os_network_file)
- with open(os_network_file, 'w') as fd:
- fd.write(interfaces)
- set_interfaces_up(ifaces)
-
-if __name__ == "__main__":
- main()
diff --git a/tools/pharos-validator/src/pxe_initrd/src/bin/install_validation_tool.sh b/tools/pharos-validator/src/pxe_initrd/src/bin/install_validation_tool.sh
deleted file mode 100755
index a6688663..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/bin/install_validation_tool.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-cd /usr/src/validation_tool/ && ./setup.py install
diff --git a/tools/pharos-validator/src/pxe_initrd/src/bin/update_pkgs.sh b/tools/pharos-validator/src/pxe_initrd/src/bin/update_pkgs.sh
deleted file mode 100755
index 2ac095fd..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/bin/update_pkgs.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-
-source /etc/profile
-
-# Update package list and upgrade all packages to the latest version
-apk update && apk upgrade
-
-# Install python3 and development tools to install a python module
-apk add build-base gcc make abuild binutils linux-headers musl-dev python3-dev python3 openssh
-pip3 install --upgrade pip netifaces watchdog
-
-# Remove all the build tools to make the initrd smaller
-apk del build-base gcc make abuild binutils linux-headers musl-dev python3-dev
diff --git a/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/initialnetwork b/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/initialnetwork
deleted file mode 100755
index 233c0b75..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/initialnetwork
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/sbin/openrc-run
-#
-#
-
-depend()
-{
- need localmount
- after firewall
-}
-
-start()
-{
- python3 /bin/initial_network.py
- return 0
-}
-
-stop()
-{
- return 0
-}
diff --git a/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/tmpfs b/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/tmpfs
deleted file mode 100755
index cea765cb..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/etc/init.d/tmpfs
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/sbin/openrc-run
-#
-#
-
-depend()
-{
- need localmount
- after firewall
-}
-
-start()
-{
- mount -t tmpfs tmp /tmp
- return 0
-}
-
-stop()
-{
- return 0
-}
diff --git a/tools/pharos-validator/src/pxe_initrd/src/etc/profile b/tools/pharos-validator/src/pxe_initrd/src/etc/profile
deleted file mode 100644
index 34802483..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/etc/profile
+++ /dev/null
@@ -1,5 +0,0 @@
-export CHARSET=UTF-8
-export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
-export PAGER=less
-export PS1='\h:\w\$ '
-umask 022
diff --git a/tools/pharos-validator/src/pxe_initrd/src/etc/resolv.conf b/tools/pharos-validator/src/pxe_initrd/src/etc/resolv.conf
deleted file mode 100644
index 0ffa7a23..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/etc/resolv.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-# Change this to what you need
-nameserver 8.8.8.8
-nameserver 8.8.4.4
diff --git a/tools/pharos-validator/src/pxe_initrd/src/etc/ssh/sshd_config b/tools/pharos-validator/src/pxe_initrd/src/etc/ssh/sshd_config
deleted file mode 100644
index 856c2cd5..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/etc/ssh/sshd_config
+++ /dev/null
@@ -1,132 +0,0 @@
-# $OpenBSD: sshd_config,v 1.99 2016/07/11 03:19:44 tedu Exp $
-
-# This is the sshd server system-wide configuration file. See
-# sshd_config(5) for more information.
-
-# This sshd was compiled with PATH=/bin:/usr/bin:/sbin:/usr/sbin
-
-# The strategy used for options in the default sshd_config shipped with
-# OpenSSH is to specify options with their default value where
-# possible, but leave them commented. Uncommented options override the
-# default value.
-
-#Port 22
-#AddressFamily any
-#ListenAddress 0.0.0.0
-#ListenAddress ::
-
-# The default requires explicit activation of protocol 1
-#Protocol 2
-
-# HostKey for protocol version 1
-#HostKey /etc/ssh/ssh_host_key
-# HostKeys for protocol version 2
-#HostKey /etc/ssh/ssh_host_rsa_key
-#HostKey /etc/ssh/ssh_host_dsa_key
-#HostKey /etc/ssh/ssh_host_ecdsa_key
-#HostKey /etc/ssh/ssh_host_ed25519_key
-
-# Lifetime and size of ephemeral version 1 server key
-#KeyRegenerationInterval 1h
-#ServerKeyBits 1024
-
-# Ciphers and keying
-#RekeyLimit default none
-
-# Logging
-#SyslogFacility AUTH
-#LogLevel INFO
-
-# Authentication:
-
-#LoginGraceTime 2m
-PermitRootLogin yes
-#StrictModes yes
-#MaxAuthTries 6
-#MaxSessions 10
-
-#RSAAuthentication yes
-#PubkeyAuthentication yes
-
-# The default is to check both .ssh/authorized_keys and .ssh/authorized_keys2
-# but this is overridden so installations will only check .ssh/authorized_keys
-AuthorizedKeysFile .ssh/authorized_keys
-
-#AuthorizedPrincipalsFile none
-
-#AuthorizedKeysCommand none
-#AuthorizedKeysCommandUser nobody
-
-# For this to work you will also need host keys in /etc/ssh/ssh_known_hosts
-#RhostsRSAAuthentication no
-# similar for protocol version 2
-#HostbasedAuthentication no
-# Change to yes if you don't trust ~/.ssh/known_hosts for
-# RhostsRSAAuthentication and HostbasedAuthentication
-#IgnoreUserKnownHosts no
-# Don't read the user's ~/.rhosts and ~/.shosts files
-#IgnoreRhosts yes
-
-# To disable tunneled clear text passwords, change to no here!
-#PasswordAuthentication yes
-#PermitEmptyPasswords no
-
-# Change to no to disable s/key passwords
-#ChallengeResponseAuthentication yes
-
-# Kerberos options
-#KerberosAuthentication no
-#KerberosOrLocalPasswd yes
-#KerberosTicketCleanup yes
-#KerberosGetAFSToken no
-
-# GSSAPI options
-#GSSAPIAuthentication no
-#GSSAPICleanupCredentials yes
-
-# Set this to 'yes' to enable PAM authentication, account processing,
-# and session processing. If this is enabled, PAM authentication will
-# be allowed through the ChallengeResponseAuthentication and
-# PasswordAuthentication. Depending on your PAM configuration,
-# PAM authentication via ChallengeResponseAuthentication may bypass
-# the setting of "PermitRootLogin without-password".
-# If you just want the PAM account and session checks to run without
-# PAM authentication, then enable this but set PasswordAuthentication
-# and ChallengeResponseAuthentication to 'no'.
-#UsePAM no
-
-#AllowAgentForwarding yes
-#AllowTcpForwarding yes
-#GatewayPorts no
-#X11Forwarding no
-#X11DisplayOffset 10
-#X11UseLocalhost yes
-#PermitTTY yes
-#PrintMotd yes
-#PrintLastLog yes
-#TCPKeepAlive yes
-#UseLogin no
-#UsePrivilegeSeparation sandbox
-#PermitUserEnvironment no
-#Compression delayed
-#ClientAliveInterval 0
-#ClientAliveCountMax 3
-#UseDNS no
-#PidFile /run/sshd.pid
-#MaxStartups 10:30:100
-#PermitTunnel no
-#ChrootDirectory none
-#VersionAddendum none
-
-# no default banner path
-#Banner none
-
-# override default of no subsystems
-Subsystem sftp /usr/lib/ssh/sftp-server
-
-# Example of overriding settings on a per-user basis
-#Match User anoncvs
-# X11Forwarding no
-# AllowTcpForwarding no
-# PermitTTY no
-# ForceCommand cvs server
diff --git a/tools/pharos-validator/src/pxe_initrd/src/init b/tools/pharos-validator/src/pxe_initrd/src/init
deleted file mode 120000
index a0b71977..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/init
+++ /dev/null
@@ -1 +0,0 @@
-/sbin/init \ No newline at end of file
diff --git a/tools/pharos-validator/src/pxe_initrd/src/root/.profile b/tools/pharos-validator/src/pxe_initrd/src/root/.profile
deleted file mode 100644
index 34802483..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/root/.profile
+++ /dev/null
@@ -1,5 +0,0 @@
-export CHARSET=UTF-8
-export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
-export PAGER=less
-export PS1='\h:\w\$ '
-umask 022
diff --git a/tools/pharos-validator/src/pxe_initrd/src/root/.ssh/authorized_keys b/tools/pharos-validator/src/pxe_initrd/src/root/.ssh/authorized_keys
deleted file mode 100644
index 00d8ae5d..00000000
--- a/tools/pharos-validator/src/pxe_initrd/src/root/.ssh/authorized_keys
+++ /dev/null
@@ -1 +0,0 @@
-ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDk7UWhzibHSz4zdt8hQ/5j4tT++gfsrRsqKGBnI16G2nDotSsuQGcUe7BygT4t6U/H/lSxt3eYeThJG3ad40sC7x4cNgzojmD7k+bwqhjVgw9brnrlymCqhwuhBW1dulKQV1qOO21XbOUNj7NwJ0A3cihvQ3kSvqBefdo/FloRUiRYAv1BFC6Pmkm7hGIp0bXchrmSXMcVdOMv7GclFkdUWXAIb9NrLpNLlpLVYqy2ogTVGDmxQE/0Nnwffug0YEhS8mIzmNktL6kydAruTi472HCB/KxZLAeYP7levusfryTqWWu7/NA34S5mb0QodIEKsSgKB0H+vE/O6hG0QBCx root@d121025.iol.unh.edu
diff --git a/tools/pharos-validator/src/pxe_initrd/vmlinux/config b/tools/pharos-validator/src/pxe_initrd/vmlinux/config
deleted file mode 100644
index 20c25745..00000000
--- a/tools/pharos-validator/src/pxe_initrd/vmlinux/config
+++ /dev/null
@@ -1,7521 +0,0 @@
-#
-# Automatically generated file; DO NOT EDIT.
-# Linux/x86 4.4.23 Kernel Configuration
-#
-CONFIG_64BIT=y
-CONFIG_X86_64=y
-CONFIG_X86=y
-CONFIG_INSTRUCTION_DECODER=y
-CONFIG_PERF_EVENTS_INTEL_UNCORE=y
-CONFIG_OUTPUT_FORMAT="elf64-x86-64"
-CONFIG_ARCH_DEFCONFIG="arch/x86/configs/x86_64_defconfig"
-CONFIG_LOCKDEP_SUPPORT=y
-CONFIG_STACKTRACE_SUPPORT=y
-CONFIG_HAVE_LATENCYTOP_SUPPORT=y
-CONFIG_MMU=y
-CONFIG_NEED_DMA_MAP_STATE=y
-CONFIG_NEED_SG_DMA_LENGTH=y
-CONFIG_GENERIC_ISA_DMA=y
-CONFIG_GENERIC_BUG=y
-CONFIG_GENERIC_BUG_RELATIVE_POINTERS=y
-CONFIG_GENERIC_HWEIGHT=y
-CONFIG_ARCH_MAY_HAVE_PC_FDC=y
-CONFIG_RWSEM_XCHGADD_ALGORITHM=y
-CONFIG_GENERIC_CALIBRATE_DELAY=y
-CONFIG_ARCH_HAS_CPU_RELAX=y
-CONFIG_ARCH_HAS_CACHE_LINE_SIZE=y
-CONFIG_HAVE_SETUP_PER_CPU_AREA=y
-CONFIG_NEED_PER_CPU_EMBED_FIRST_CHUNK=y
-CONFIG_NEED_PER_CPU_PAGE_FIRST_CHUNK=y
-CONFIG_ARCH_HIBERNATION_POSSIBLE=y
-CONFIG_ARCH_SUSPEND_POSSIBLE=y
-CONFIG_ARCH_WANT_HUGE_PMD_SHARE=y
-CONFIG_ARCH_WANT_GENERAL_HUGETLB=y
-CONFIG_ZONE_DMA32=y
-CONFIG_AUDIT_ARCH=y
-CONFIG_ARCH_SUPPORTS_OPTIMIZED_INLINING=y
-CONFIG_ARCH_SUPPORTS_DEBUG_PAGEALLOC=y
-CONFIG_HAVE_INTEL_TXT=y
-CONFIG_X86_64_SMP=y
-CONFIG_ARCH_HWEIGHT_CFLAGS="-fcall-saved-rdi -fcall-saved-rsi -fcall-saved-rdx -fcall-saved-rcx -fcall-saved-r8 -fcall-saved-r9 -fcall-saved-r10 -fcall-saved-r11"
-CONFIG_ARCH_SUPPORTS_UPROBES=y
-CONFIG_FIX_EARLYCON_MEM=y
-CONFIG_PGTABLE_LEVELS=4
-CONFIG_DEFCONFIG_LIST="/lib/modules/$UNAME_RELEASE/.config"
-CONFIG_IRQ_WORK=y
-CONFIG_BUILDTIME_EXTABLE_SORT=y
-
-#
-# General setup
-#
-CONFIG_INIT_ENV_ARG_LIMIT=32
-CONFIG_CROSS_COMPILE=""
-CONFIG_COMPILE_TEST=y
-CONFIG_LOCALVERSION=""
-# CONFIG_LOCALVERSION_AUTO is not set
-CONFIG_HAVE_KERNEL_GZIP=y
-CONFIG_HAVE_KERNEL_BZIP2=y
-CONFIG_HAVE_KERNEL_LZMA=y
-CONFIG_HAVE_KERNEL_XZ=y
-CONFIG_HAVE_KERNEL_LZO=y
-CONFIG_HAVE_KERNEL_LZ4=y
-CONFIG_KERNEL_GZIP=y
-# CONFIG_KERNEL_BZIP2 is not set
-# CONFIG_KERNEL_LZMA is not set
-# CONFIG_KERNEL_XZ is not set
-# CONFIG_KERNEL_LZO is not set
-# CONFIG_KERNEL_LZ4 is not set
-CONFIG_DEFAULT_HOSTNAME="(none)"
-CONFIG_SWAP=y
-CONFIG_SYSVIPC=y
-CONFIG_SYSVIPC_SYSCTL=y
-CONFIG_POSIX_MQUEUE=y
-CONFIG_POSIX_MQUEUE_SYSCTL=y
-CONFIG_CROSS_MEMORY_ATTACH=y
-CONFIG_FHANDLE=y
-# CONFIG_USELIB is not set
-CONFIG_AUDIT=y
-CONFIG_HAVE_ARCH_AUDITSYSCALL=y
-CONFIG_AUDITSYSCALL=y
-CONFIG_AUDIT_WATCH=y
-CONFIG_AUDIT_TREE=y
-
-#
-# IRQ subsystem
-#
-CONFIG_GENERIC_IRQ_PROBE=y
-CONFIG_GENERIC_IRQ_SHOW=y
-CONFIG_GENERIC_PENDING_IRQ=y
-CONFIG_IRQ_DOMAIN=y
-CONFIG_IRQ_DOMAIN_HIERARCHY=y
-CONFIG_GENERIC_MSI_IRQ=y
-CONFIG_GENERIC_MSI_IRQ_DOMAIN=y
-# CONFIG_IRQ_DOMAIN_DEBUG is not set
-CONFIG_IRQ_FORCED_THREADING=y
-CONFIG_SPARSE_IRQ=y
-CONFIG_CLOCKSOURCE_WATCHDOG=y
-CONFIG_ARCH_CLOCKSOURCE_DATA=y
-CONFIG_CLOCKSOURCE_VALIDATE_LAST_CYCLE=y
-CONFIG_GENERIC_TIME_VSYSCALL=y
-CONFIG_GENERIC_CLOCKEVENTS=y
-CONFIG_GENERIC_CLOCKEVENTS_BROADCAST=y
-CONFIG_GENERIC_CLOCKEVENTS_MIN_ADJUST=y
-CONFIG_GENERIC_CMOS_UPDATE=y
-
-#
-# Timers subsystem
-#
-CONFIG_TICK_ONESHOT=y
-CONFIG_NO_HZ_COMMON=y
-# CONFIG_HZ_PERIODIC is not set
-# CONFIG_NO_HZ_IDLE is not set
-CONFIG_NO_HZ_FULL=y
-# CONFIG_NO_HZ_FULL_ALL is not set
-# CONFIG_NO_HZ_FULL_SYSIDLE is not set
-CONFIG_NO_HZ=y
-CONFIG_HIGH_RES_TIMERS=y
-
-#
-# CPU/Task time and stats accounting
-#
-CONFIG_VIRT_CPU_ACCOUNTING=y
-CONFIG_VIRT_CPU_ACCOUNTING_GEN=y
-CONFIG_BSD_PROCESS_ACCT=y
-CONFIG_BSD_PROCESS_ACCT_V3=y
-CONFIG_TASKSTATS=y
-CONFIG_TASK_DELAY_ACCT=y
-CONFIG_TASK_XACCT=y
-CONFIG_TASK_IO_ACCOUNTING=y
-
-#
-# RCU Subsystem
-#
-CONFIG_TREE_RCU=y
-# CONFIG_RCU_EXPERT is not set
-CONFIG_SRCU=y
-CONFIG_TASKS_RCU=y
-CONFIG_RCU_STALL_COMMON=y
-CONFIG_CONTEXT_TRACKING=y
-# CONFIG_CONTEXT_TRACKING_FORCE is not set
-# CONFIG_TREE_RCU_TRACE is not set
-CONFIG_RCU_NOCB_CPU=y
-# CONFIG_RCU_NOCB_CPU_NONE is not set
-# CONFIG_RCU_NOCB_CPU_ZERO is not set
-CONFIG_RCU_NOCB_CPU_ALL=y
-# CONFIG_RCU_EXPEDITE_BOOT is not set
-CONFIG_BUILD_BIN2C=y
-CONFIG_IKCONFIG=y
-CONFIG_IKCONFIG_PROC=y
-CONFIG_LOG_BUF_SHIFT=18
-CONFIG_LOG_CPU_MAX_BUF_SHIFT=12
-CONFIG_HAVE_UNSTABLE_SCHED_CLOCK=y
-CONFIG_ARCH_SUPPORTS_NUMA_BALANCING=y
-CONFIG_ARCH_WANT_BATCHED_UNMAP_TLB_FLUSH=y
-CONFIG_ARCH_SUPPORTS_INT128=y
-CONFIG_NUMA_BALANCING=y
-CONFIG_NUMA_BALANCING_DEFAULT_ENABLED=y
-CONFIG_CGROUPS=y
-# CONFIG_CGROUP_DEBUG is not set
-CONFIG_CGROUP_FREEZER=y
-CONFIG_CGROUP_PIDS=y
-CONFIG_CGROUP_DEVICE=y
-CONFIG_CPUSETS=y
-CONFIG_PROC_PID_CPUSET=y
-CONFIG_CGROUP_CPUACCT=y
-CONFIG_PAGE_COUNTER=y
-CONFIG_MEMCG=y
-CONFIG_MEMCG_SWAP=y
-CONFIG_MEMCG_SWAP_ENABLED=y
-# CONFIG_MEMCG_KMEM is not set
-CONFIG_CGROUP_HUGETLB=y
-CONFIG_CGROUP_PERF=y
-CONFIG_CGROUP_SCHED=y
-CONFIG_FAIR_GROUP_SCHED=y
-CONFIG_CFS_BANDWIDTH=y
-# CONFIG_RT_GROUP_SCHED is not set
-CONFIG_BLK_CGROUP=y
-# CONFIG_DEBUG_BLK_CGROUP is not set
-CONFIG_CGROUP_WRITEBACK=y
-# CONFIG_CHECKPOINT_RESTORE is not set
-CONFIG_NAMESPACES=y
-CONFIG_UTS_NS=y
-CONFIG_IPC_NS=y
-CONFIG_USER_NS=y
-CONFIG_PID_NS=y
-CONFIG_NET_NS=y
-CONFIG_SCHED_AUTOGROUP=y
-# CONFIG_SYSFS_DEPRECATED is not set
-CONFIG_RELAY=y
-CONFIG_BLK_DEV_INITRD=y
-CONFIG_INITRAMFS_SOURCE=""
-CONFIG_RD_GZIP=y
-CONFIG_RD_BZIP2=y
-CONFIG_RD_LZMA=y
-CONFIG_RD_XZ=y
-CONFIG_RD_LZO=y
-CONFIG_RD_LZ4=y
-# CONFIG_CC_OPTIMIZE_FOR_SIZE is not set
-CONFIG_SYSCTL=y
-CONFIG_ANON_INODES=y
-CONFIG_HAVE_UID16=y
-CONFIG_SYSCTL_EXCEPTION_TRACE=y
-CONFIG_HAVE_PCSPKR_PLATFORM=y
-CONFIG_BPF=y
-# CONFIG_EXPERT is not set
-CONFIG_UID16=y
-CONFIG_MULTIUSER=y
-CONFIG_SGETMASK_SYSCALL=y
-CONFIG_SYSFS_SYSCALL=y
-# CONFIG_SYSCTL_SYSCALL is not set
-CONFIG_KALLSYMS=y
-CONFIG_KALLSYMS_ALL=y
-CONFIG_PRINTK=y
-CONFIG_BUG=y
-CONFIG_ELF_CORE=y
-CONFIG_PCSPKR_PLATFORM=y
-CONFIG_BASE_FULL=y
-CONFIG_FUTEX=y
-CONFIG_EPOLL=y
-CONFIG_SIGNALFD=y
-CONFIG_TIMERFD=y
-CONFIG_EVENTFD=y
-CONFIG_BPF_SYSCALL=y
-CONFIG_SHMEM=y
-CONFIG_AIO=y
-CONFIG_ADVISE_SYSCALLS=y
-CONFIG_USERFAULTFD=y
-CONFIG_PCI_QUIRKS=y
-CONFIG_MEMBARRIER=y
-# CONFIG_EMBEDDED is not set
-CONFIG_HAVE_PERF_EVENTS=y
-
-#
-# Kernel Performance Events And Counters
-#
-CONFIG_PERF_EVENTS=y
-# CONFIG_DEBUG_PERF_USE_VMALLOC is not set
-CONFIG_VM_EVENT_COUNTERS=y
-CONFIG_SLUB_DEBUG=y
-# CONFIG_COMPAT_BRK is not set
-# CONFIG_SLAB is not set
-CONFIG_SLUB=y
-CONFIG_SLUB_CPU_PARTIAL=y
-CONFIG_SYSTEM_DATA_VERIFICATION=y
-CONFIG_PROFILING=y
-CONFIG_TRACEPOINTS=y
-CONFIG_KEXEC_CORE=y
-# CONFIG_OPROFILE is not set
-CONFIG_HAVE_OPROFILE=y
-CONFIG_OPROFILE_NMI_TIMER=y
-CONFIG_KPROBES=y
-CONFIG_JUMP_LABEL=y
-# CONFIG_STATIC_KEYS_SELFTEST is not set
-CONFIG_OPTPROBES=y
-CONFIG_KPROBES_ON_FTRACE=y
-CONFIG_UPROBES=y
-# CONFIG_HAVE_64BIT_ALIGNED_ACCESS is not set
-CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS=y
-CONFIG_ARCH_USE_BUILTIN_BSWAP=y
-CONFIG_KRETPROBES=y
-CONFIG_USER_RETURN_NOTIFIER=y
-CONFIG_HAVE_IOREMAP_PROT=y
-CONFIG_HAVE_KPROBES=y
-CONFIG_HAVE_KRETPROBES=y
-CONFIG_HAVE_OPTPROBES=y
-CONFIG_HAVE_KPROBES_ON_FTRACE=y
-CONFIG_HAVE_ARCH_TRACEHOOK=y
-CONFIG_HAVE_DMA_ATTRS=y
-CONFIG_HAVE_DMA_CONTIGUOUS=y
-CONFIG_GENERIC_SMP_IDLE_THREAD=y
-CONFIG_ARCH_WANTS_DYNAMIC_TASK_STRUCT=y
-CONFIG_HAVE_REGS_AND_STACK_ACCESS_API=y
-CONFIG_HAVE_CLK=y
-CONFIG_HAVE_DMA_API_DEBUG=y
-CONFIG_HAVE_HW_BREAKPOINT=y
-CONFIG_HAVE_MIXED_BREAKPOINTS_REGS=y
-CONFIG_HAVE_USER_RETURN_NOTIFIER=y
-CONFIG_HAVE_PERF_EVENTS_NMI=y
-CONFIG_HAVE_PERF_REGS=y
-CONFIG_HAVE_PERF_USER_STACK_DUMP=y
-CONFIG_HAVE_ARCH_JUMP_LABEL=y
-CONFIG_ARCH_HAVE_NMI_SAFE_CMPXCHG=y
-CONFIG_HAVE_ALIGNED_STRUCT_PAGE=y
-CONFIG_HAVE_CMPXCHG_LOCAL=y
-CONFIG_HAVE_CMPXCHG_DOUBLE=y
-CONFIG_ARCH_WANT_COMPAT_IPC_PARSE_VERSION=y
-CONFIG_ARCH_WANT_OLD_COMPAT_IPC=y
-CONFIG_HAVE_ARCH_SECCOMP_FILTER=y
-CONFIG_SECCOMP_FILTER=y
-CONFIG_HAVE_CC_STACKPROTECTOR=y
-CONFIG_CC_STACKPROTECTOR=y
-# CONFIG_CC_STACKPROTECTOR_NONE is not set
-# CONFIG_CC_STACKPROTECTOR_REGULAR is not set
-CONFIG_CC_STACKPROTECTOR_STRONG=y
-CONFIG_HAVE_CONTEXT_TRACKING=y
-CONFIG_HAVE_VIRT_CPU_ACCOUNTING_GEN=y
-CONFIG_HAVE_IRQ_TIME_ACCOUNTING=y
-CONFIG_HAVE_ARCH_TRANSPARENT_HUGEPAGE=y
-CONFIG_HAVE_ARCH_HUGE_VMAP=y
-CONFIG_HAVE_ARCH_SOFT_DIRTY=y
-CONFIG_MODULES_USE_ELF_RELA=y
-CONFIG_HAVE_IRQ_EXIT_ON_IRQ_STACK=y
-CONFIG_ARCH_HAS_ELF_RANDOMIZE=y
-CONFIG_HAVE_COPY_THREAD_TLS=y
-CONFIG_OLD_SIGSUSPEND3=y
-CONFIG_COMPAT_OLD_SIGACTION=y
-
-#
-# GCOV-based kernel profiling
-#
-# CONFIG_GCOV_KERNEL is not set
-CONFIG_ARCH_HAS_GCOV_PROFILE_ALL=y
-# CONFIG_HAVE_GENERIC_DMA_COHERENT is not set
-CONFIG_SLABINFO=y
-CONFIG_RT_MUTEXES=y
-CONFIG_BASE_SMALL=0
-CONFIG_MODULES=y
-# CONFIG_MODULE_FORCE_LOAD is not set
-CONFIG_MODULE_UNLOAD=y
-# CONFIG_MODULE_FORCE_UNLOAD is not set
-# CONFIG_MODVERSIONS is not set
-# CONFIG_MODULE_SRCVERSION_ALL is not set
-CONFIG_MODULE_SIG=y
-# CONFIG_MODULE_SIG_FORCE is not set
-CONFIG_MODULE_SIG_ALL=y
-# CONFIG_MODULE_SIG_SHA1 is not set
-# CONFIG_MODULE_SIG_SHA224 is not set
-CONFIG_MODULE_SIG_SHA256=y
-# CONFIG_MODULE_SIG_SHA384 is not set
-# CONFIG_MODULE_SIG_SHA512 is not set
-CONFIG_MODULE_SIG_HASH="sha256"
-# CONFIG_MODULE_COMPRESS is not set
-CONFIG_MODULES_TREE_LOOKUP=y
-CONFIG_BLOCK=y
-CONFIG_BLK_DEV_BSG=y
-CONFIG_BLK_DEV_BSGLIB=y
-CONFIG_BLK_DEV_INTEGRITY=y
-CONFIG_BLK_DEV_THROTTLING=y
-# CONFIG_BLK_CMDLINE_PARSER is not set
-
-#
-# Partition Types
-#
-CONFIG_PARTITION_ADVANCED=y
-# CONFIG_ACORN_PARTITION is not set
-CONFIG_AIX_PARTITION=y
-CONFIG_OSF_PARTITION=y
-CONFIG_AMIGA_PARTITION=y
-# CONFIG_ATARI_PARTITION is not set
-CONFIG_MAC_PARTITION=y
-CONFIG_MSDOS_PARTITION=y
-CONFIG_BSD_DISKLABEL=y
-CONFIG_MINIX_SUBPARTITION=y
-CONFIG_SOLARIS_X86_PARTITION=y
-CONFIG_UNIXWARE_DISKLABEL=y
-CONFIG_LDM_PARTITION=y
-# CONFIG_LDM_DEBUG is not set
-CONFIG_SGI_PARTITION=y
-# CONFIG_ULTRIX_PARTITION is not set
-CONFIG_SUN_PARTITION=y
-CONFIG_KARMA_PARTITION=y
-CONFIG_EFI_PARTITION=y
-# CONFIG_SYSV68_PARTITION is not set
-# CONFIG_CMDLINE_PARTITION is not set
-CONFIG_BLOCK_COMPAT=y
-
-#
-# IO Schedulers
-#
-CONFIG_IOSCHED_NOOP=y
-CONFIG_IOSCHED_DEADLINE=y
-CONFIG_IOSCHED_CFQ=y
-CONFIG_CFQ_GROUP_IOSCHED=y
-# CONFIG_DEFAULT_DEADLINE is not set
-CONFIG_DEFAULT_CFQ=y
-# CONFIG_DEFAULT_NOOP is not set
-CONFIG_DEFAULT_IOSCHED="cfq"
-CONFIG_PREEMPT_NOTIFIERS=y
-CONFIG_PADATA=y
-CONFIG_ASN1=y
-CONFIG_INLINE_SPIN_UNLOCK_IRQ=y
-CONFIG_INLINE_READ_UNLOCK=y
-CONFIG_INLINE_READ_UNLOCK_IRQ=y
-CONFIG_INLINE_WRITE_UNLOCK=y
-CONFIG_INLINE_WRITE_UNLOCK_IRQ=y
-CONFIG_ARCH_SUPPORTS_ATOMIC_RMW=y
-CONFIG_MUTEX_SPIN_ON_OWNER=y
-CONFIG_RWSEM_SPIN_ON_OWNER=y
-CONFIG_LOCK_SPIN_ON_OWNER=y
-CONFIG_ARCH_USE_QUEUED_SPINLOCKS=y
-CONFIG_QUEUED_SPINLOCKS=y
-CONFIG_ARCH_USE_QUEUED_RWLOCKS=y
-CONFIG_QUEUED_RWLOCKS=y
-CONFIG_FREEZER=y
-
-#
-# Processor type and features
-#
-CONFIG_ZONE_DMA=y
-CONFIG_SMP=y
-CONFIG_X86_FEATURE_NAMES=y
-CONFIG_X86_X2APIC=y
-CONFIG_X86_MPPARSE=y
-CONFIG_X86_EXTENDED_PLATFORM=y
-CONFIG_X86_NUMACHIP=y
-# CONFIG_X86_VSMP is not set
-CONFIG_X86_UV=y
-# CONFIG_X86_GOLDFISH is not set
-CONFIG_X86_INTEL_LPSS=y
-# CONFIG_X86_AMD_PLATFORM_DEVICE is not set
-CONFIG_IOSF_MBI=y
-# CONFIG_IOSF_MBI_DEBUG is not set
-CONFIG_X86_SUPPORTS_MEMORY_FAILURE=y
-CONFIG_SCHED_OMIT_FRAME_POINTER=y
-CONFIG_HYPERVISOR_GUEST=y
-CONFIG_PARAVIRT=y
-# CONFIG_PARAVIRT_DEBUG is not set
-# CONFIG_PARAVIRT_SPINLOCKS is not set
-CONFIG_XEN=y
-CONFIG_XEN_DOM0=y
-CONFIG_XEN_PVHVM=y
-CONFIG_XEN_512GB=y
-CONFIG_XEN_SAVE_RESTORE=y
-CONFIG_XEN_DEBUG_FS=y
-CONFIG_XEN_PVH=y
-CONFIG_KVM_GUEST=y
-# CONFIG_KVM_DEBUG_FS is not set
-CONFIG_PARAVIRT_TIME_ACCOUNTING=y
-CONFIG_PARAVIRT_CLOCK=y
-CONFIG_NO_BOOTMEM=y
-# CONFIG_MK8 is not set
-# CONFIG_MPSC is not set
-# CONFIG_MCORE2 is not set
-# CONFIG_MATOM is not set
-CONFIG_GENERIC_CPU=y
-CONFIG_X86_INTERNODE_CACHE_SHIFT=6
-CONFIG_X86_L1_CACHE_SHIFT=6
-CONFIG_X86_TSC=y
-CONFIG_X86_CMPXCHG64=y
-CONFIG_X86_CMOV=y
-CONFIG_X86_MINIMUM_CPU_FAMILY=64
-CONFIG_X86_DEBUGCTLMSR=y
-CONFIG_CPU_SUP_INTEL=y
-CONFIG_CPU_SUP_AMD=y
-CONFIG_CPU_SUP_CENTAUR=y
-CONFIG_HPET_TIMER=y
-CONFIG_HPET_EMULATE_RTC=y
-CONFIG_DMI=y
-# CONFIG_GART_IOMMU is not set
-# CONFIG_CALGARY_IOMMU is not set
-CONFIG_SWIOTLB=y
-CONFIG_IOMMU_HELPER=y
-# CONFIG_MAXSMP is not set
-CONFIG_NR_CPUS=64
-CONFIG_SCHED_SMT=y
-CONFIG_SCHED_MC=y
-# CONFIG_PREEMPT_NONE is not set
-CONFIG_PREEMPT_VOLUNTARY=y
-# CONFIG_PREEMPT is not set
-CONFIG_X86_LOCAL_APIC=y
-CONFIG_X86_IO_APIC=y
-CONFIG_X86_REROUTE_FOR_BROKEN_BOOT_IRQS=y
-CONFIG_X86_MCE=y
-CONFIG_X86_MCE_INTEL=y
-CONFIG_X86_MCE_AMD=y
-CONFIG_X86_MCE_THRESHOLD=y
-CONFIG_X86_MCE_INJECT=m
-CONFIG_X86_THERMAL_VECTOR=y
-# CONFIG_VM86 is not set
-CONFIG_X86_16BIT=y
-CONFIG_X86_ESPFIX64=y
-CONFIG_X86_VSYSCALL_EMULATION=y
-CONFIG_I8K=m
-CONFIG_MICROCODE=y
-CONFIG_MICROCODE_INTEL=y
-CONFIG_MICROCODE_AMD=y
-CONFIG_MICROCODE_OLD_INTERFACE=y
-CONFIG_X86_MSR=y
-CONFIG_X86_CPUID=y
-CONFIG_ARCH_PHYS_ADDR_T_64BIT=y
-CONFIG_ARCH_DMA_ADDR_T_64BIT=y
-CONFIG_X86_DIRECT_GBPAGES=y
-CONFIG_NUMA=y
-CONFIG_AMD_NUMA=y
-CONFIG_X86_64_ACPI_NUMA=y
-CONFIG_NODES_SPAN_OTHER_NODES=y
-# CONFIG_NUMA_EMU is not set
-CONFIG_NODES_SHIFT=9
-CONFIG_ARCH_SPARSEMEM_ENABLE=y
-CONFIG_ARCH_SPARSEMEM_DEFAULT=y
-CONFIG_ARCH_SELECT_MEMORY_MODEL=y
-# CONFIG_ARCH_MEMORY_PROBE is not set
-CONFIG_ARCH_PROC_KCORE_TEXT=y
-CONFIG_ILLEGAL_POINTER_VALUE=0xdead000000000000
-CONFIG_SELECT_MEMORY_MODEL=y
-CONFIG_SPARSEMEM_MANUAL=y
-CONFIG_SPARSEMEM=y
-CONFIG_NEED_MULTIPLE_NODES=y
-CONFIG_HAVE_MEMORY_PRESENT=y
-CONFIG_SPARSEMEM_EXTREME=y
-CONFIG_SPARSEMEM_VMEMMAP_ENABLE=y
-CONFIG_SPARSEMEM_ALLOC_MEM_MAP_TOGETHER=y
-CONFIG_SPARSEMEM_VMEMMAP=y
-CONFIG_HAVE_MEMBLOCK=y
-CONFIG_HAVE_MEMBLOCK_NODE_MAP=y
-CONFIG_ARCH_DISCARD_MEMBLOCK=y
-CONFIG_MEMORY_ISOLATION=y
-# CONFIG_MOVABLE_NODE is not set
-CONFIG_HAVE_BOOTMEM_INFO_NODE=y
-CONFIG_MEMORY_HOTPLUG=y
-CONFIG_MEMORY_HOTPLUG_SPARSE=y
-CONFIG_MEMORY_HOTREMOVE=y
-CONFIG_SPLIT_PTLOCK_CPUS=4
-CONFIG_ARCH_ENABLE_SPLIT_PMD_PTLOCK=y
-CONFIG_MEMORY_BALLOON=y
-CONFIG_BALLOON_COMPACTION=y
-CONFIG_COMPACTION=y
-CONFIG_MIGRATION=y
-CONFIG_ARCH_ENABLE_HUGEPAGE_MIGRATION=y
-CONFIG_PHYS_ADDR_T_64BIT=y
-CONFIG_ZONE_DMA_FLAG=1
-CONFIG_BOUNCE=y
-CONFIG_VIRT_TO_BUS=y
-CONFIG_MMU_NOTIFIER=y
-CONFIG_KSM=y
-CONFIG_DEFAULT_MMAP_MIN_ADDR=65536
-CONFIG_ARCH_SUPPORTS_MEMORY_FAILURE=y
-CONFIG_MEMORY_FAILURE=y
-CONFIG_HWPOISON_INJECT=m
-CONFIG_TRANSPARENT_HUGEPAGE=y
-# CONFIG_TRANSPARENT_HUGEPAGE_ALWAYS is not set
-CONFIG_TRANSPARENT_HUGEPAGE_MADVISE=y
-CONFIG_CLEANCACHE=y
-CONFIG_FRONTSWAP=y
-CONFIG_CMA=y
-# CONFIG_CMA_DEBUG is not set
-# CONFIG_CMA_DEBUGFS is not set
-CONFIG_CMA_AREAS=7
-CONFIG_ZSWAP=y
-CONFIG_ZPOOL=y
-CONFIG_ZBUD=y
-CONFIG_ZSMALLOC=y
-# CONFIG_PGTABLE_MAPPING is not set
-# CONFIG_ZSMALLOC_STAT is not set
-CONFIG_GENERIC_EARLY_IOREMAP=y
-CONFIG_ARCH_SUPPORTS_DEFERRED_STRUCT_PAGE_INIT=y
-# CONFIG_DEFERRED_STRUCT_PAGE_INIT is not set
-# CONFIG_IDLE_PAGE_TRACKING is not set
-CONFIG_FRAME_VECTOR=y
-CONFIG_X86_PMEM_LEGACY_DEVICE=y
-CONFIG_X86_PMEM_LEGACY=y
-CONFIG_X86_CHECK_BIOS_CORRUPTION=y
-# CONFIG_X86_BOOTPARAM_MEMORY_CORRUPTION_CHECK is not set
-CONFIG_X86_RESERVE_LOW=64
-CONFIG_MTRR=y
-CONFIG_MTRR_SANITIZER=y
-CONFIG_MTRR_SANITIZER_ENABLE_DEFAULT=0
-CONFIG_MTRR_SANITIZER_SPARE_REG_NR_DEFAULT=1
-CONFIG_X86_PAT=y
-CONFIG_ARCH_USES_PG_UNCACHED=y
-CONFIG_ARCH_RANDOM=y
-CONFIG_X86_SMAP=y
-CONFIG_X86_INTEL_MPX=y
-CONFIG_EFI=y
-CONFIG_EFI_STUB=y
-CONFIG_EFI_MIXED=y
-CONFIG_SECCOMP=y
-# CONFIG_HZ_100 is not set
-# CONFIG_HZ_250 is not set
-# CONFIG_HZ_300 is not set
-CONFIG_HZ_1000=y
-CONFIG_HZ=1000
-CONFIG_SCHED_HRTICK=y
-CONFIG_KEXEC=y
-CONFIG_KEXEC_FILE=y
-CONFIG_KEXEC_VERIFY_SIG=y
-CONFIG_KEXEC_BZIMAGE_VERIFY_SIG=y
-CONFIG_CRASH_DUMP=y
-CONFIG_PHYSICAL_START=0x1000000
-CONFIG_RELOCATABLE=y
-CONFIG_RANDOMIZE_BASE=y
-CONFIG_RANDOMIZE_BASE_MAX_OFFSET=0x40000000
-CONFIG_X86_NEED_RELOCS=y
-CONFIG_PHYSICAL_ALIGN=0x1000000
-CONFIG_HOTPLUG_CPU=y
-# CONFIG_BOOTPARAM_HOTPLUG_CPU0 is not set
-# CONFIG_DEBUG_HOTPLUG_CPU0 is not set
-# CONFIG_COMPAT_VDSO is not set
-# CONFIG_LEGACY_VSYSCALL_NATIVE is not set
-CONFIG_LEGACY_VSYSCALL_EMULATE=y
-# CONFIG_LEGACY_VSYSCALL_NONE is not set
-# CONFIG_CMDLINE_BOOL is not set
-CONFIG_MODIFY_LDT_SYSCALL=y
-CONFIG_HAVE_LIVEPATCH=y
-# CONFIG_LIVEPATCH is not set
-CONFIG_ARCH_ENABLE_MEMORY_HOTPLUG=y
-CONFIG_ARCH_ENABLE_MEMORY_HOTREMOVE=y
-CONFIG_USE_PERCPU_NUMA_NODE_ID=y
-
-#
-# Power management and ACPI options
-#
-# CONFIG_SUSPEND is not set
-CONFIG_HIBERNATE_CALLBACKS=y
-# CONFIG_HIBERNATION is not set
-CONFIG_PM_SLEEP=y
-CONFIG_PM_SLEEP_SMP=y
-# CONFIG_PM_AUTOSLEEP is not set
-# CONFIG_PM_WAKELOCKS is not set
-CONFIG_PM=y
-# CONFIG_PM_DEBUG is not set
-CONFIG_PM_CLK=y
-# CONFIG_WQ_POWER_EFFICIENT_DEFAULT is not set
-CONFIG_ACPI=y
-CONFIG_ACPI_LEGACY_TABLES_LOOKUP=y
-CONFIG_ARCH_MIGHT_HAVE_ACPI_PDC=y
-CONFIG_ACPI_SYSTEM_POWER_STATES_SUPPORT=y
-# CONFIG_ACPI_DEBUGGER is not set
-# CONFIG_ACPI_PROCFS_POWER is not set
-# CONFIG_ACPI_REV_OVERRIDE_POSSIBLE is not set
-CONFIG_ACPI_EC_DEBUGFS=m
-CONFIG_ACPI_AC=y
-CONFIG_ACPI_BATTERY=y
-CONFIG_ACPI_BUTTON=y
-CONFIG_ACPI_VIDEO=m
-CONFIG_ACPI_FAN=y
-CONFIG_ACPI_DOCK=y
-CONFIG_ACPI_CPU_FREQ_PSS=y
-CONFIG_ACPI_PROCESSOR_IDLE=y
-CONFIG_ACPI_PROCESSOR=y
-CONFIG_ACPI_IPMI=m
-CONFIG_ACPI_HOTPLUG_CPU=y
-CONFIG_ACPI_PROCESSOR_AGGREGATOR=m
-CONFIG_ACPI_THERMAL=y
-CONFIG_ACPI_NUMA=y
-# CONFIG_ACPI_CUSTOM_DSDT is not set
-# CONFIG_ACPI_INITRD_TABLE_OVERRIDE is not set
-# CONFIG_ACPI_DEBUG is not set
-CONFIG_ACPI_PCI_SLOT=y
-CONFIG_X86_PM_TIMER=y
-CONFIG_ACPI_CONTAINER=y
-CONFIG_ACPI_HOTPLUG_MEMORY=y
-CONFIG_ACPI_HOTPLUG_IOAPIC=y
-CONFIG_ACPI_SBS=m
-CONFIG_ACPI_HED=y
-CONFIG_ACPI_CUSTOM_METHOD=m
-CONFIG_ACPI_BGRT=y
-# CONFIG_ACPI_REDUCED_HARDWARE_ONLY is not set
-CONFIG_ACPI_NFIT=m
-# CONFIG_ACPI_NFIT_DEBUG is not set
-CONFIG_HAVE_ACPI_APEI=y
-CONFIG_HAVE_ACPI_APEI_NMI=y
-CONFIG_ACPI_APEI=y
-CONFIG_ACPI_APEI_GHES=y
-CONFIG_ACPI_APEI_PCIEAER=y
-CONFIG_ACPI_APEI_MEMORY_FAILURE=y
-# CONFIG_ACPI_APEI_EINJ is not set
-# CONFIG_ACPI_APEI_ERST_DEBUG is not set
-# CONFIG_ACPI_EXTLOG is not set
-CONFIG_PMIC_OPREGION=y
-CONFIG_CRC_PMIC_OPREGION=y
-CONFIG_SFI=y
-
-#
-# CPU Frequency scaling
-#
-CONFIG_CPU_FREQ=y
-CONFIG_CPU_FREQ_GOV_COMMON=y
-CONFIG_CPU_FREQ_STAT=m
-CONFIG_CPU_FREQ_STAT_DETAILS=y
-# CONFIG_CPU_FREQ_DEFAULT_GOV_PERFORMANCE is not set
-# CONFIG_CPU_FREQ_DEFAULT_GOV_POWERSAVE is not set
-# CONFIG_CPU_FREQ_DEFAULT_GOV_USERSPACE is not set
-CONFIG_CPU_FREQ_DEFAULT_GOV_ONDEMAND=y
-# CONFIG_CPU_FREQ_DEFAULT_GOV_CONSERVATIVE is not set
-CONFIG_CPU_FREQ_GOV_PERFORMANCE=y
-CONFIG_CPU_FREQ_GOV_POWERSAVE=y
-CONFIG_CPU_FREQ_GOV_USERSPACE=y
-CONFIG_CPU_FREQ_GOV_ONDEMAND=y
-CONFIG_CPU_FREQ_GOV_CONSERVATIVE=y
-
-#
-# CPU frequency scaling drivers
-#
-CONFIG_X86_INTEL_PSTATE=y
-CONFIG_X86_PCC_CPUFREQ=m
-CONFIG_X86_ACPI_CPUFREQ=m
-CONFIG_X86_ACPI_CPUFREQ_CPB=y
-CONFIG_X86_POWERNOW_K8=m
-CONFIG_X86_AMD_FREQ_SENSITIVITY=m
-# CONFIG_X86_SPEEDSTEP_CENTRINO is not set
-CONFIG_X86_P4_CLOCKMOD=m
-
-#
-# shared options
-#
-CONFIG_X86_SPEEDSTEP_LIB=m
-
-#
-# CPU Idle
-#
-CONFIG_CPU_IDLE=y
-# CONFIG_CPU_IDLE_GOV_LADDER is not set
-CONFIG_CPU_IDLE_GOV_MENU=y
-# CONFIG_ARCH_NEEDS_CPU_IDLE_COUPLED is not set
-CONFIG_INTEL_IDLE=y
-
-#
-# Memory power savings
-#
-CONFIG_I7300_IDLE_IOAT_CHANNEL=y
-CONFIG_I7300_IDLE=m
-
-#
-# Bus options (PCI etc.)
-#
-CONFIG_PCI=y
-CONFIG_PCI_DIRECT=y
-CONFIG_PCI_MMCONFIG=y
-CONFIG_PCI_XEN=y
-CONFIG_PCI_DOMAINS=y
-CONFIG_PCIEPORTBUS=y
-CONFIG_HOTPLUG_PCI_PCIE=y
-CONFIG_PCIEAER=y
-CONFIG_PCIE_ECRC=y
-CONFIG_PCIEAER_INJECT=m
-CONFIG_PCIEASPM=y
-# CONFIG_PCIEASPM_DEBUG is not set
-CONFIG_PCIEASPM_DEFAULT=y
-# CONFIG_PCIEASPM_POWERSAVE is not set
-# CONFIG_PCIEASPM_PERFORMANCE is not set
-CONFIG_PCIE_PME=y
-CONFIG_PCI_BUS_ADDR_T_64BIT=y
-CONFIG_PCI_MSI=y
-CONFIG_PCI_MSI_IRQ_DOMAIN=y
-# CONFIG_PCI_DEBUG is not set
-# CONFIG_PCI_REALLOC_ENABLE_AUTO is not set
-CONFIG_PCI_STUB=y
-CONFIG_XEN_PCIDEV_FRONTEND=m
-CONFIG_HT_IRQ=y
-CONFIG_PCI_ATS=y
-CONFIG_PCI_IOV=y
-CONFIG_PCI_PRI=y
-CONFIG_PCI_PASID=y
-CONFIG_PCI_LABEL=y
-
-#
-# PCI host controller drivers
-#
-CONFIG_ISA_DMA_API=y
-CONFIG_AMD_NB=y
-CONFIG_PCCARD=y
-CONFIG_PCMCIA=y
-CONFIG_PCMCIA_LOAD_CIS=y
-CONFIG_CARDBUS=y
-
-#
-# PC-card bridges
-#
-CONFIG_YENTA=m
-CONFIG_YENTA_O2=y
-CONFIG_YENTA_RICOH=y
-CONFIG_YENTA_TI=y
-CONFIG_YENTA_ENE_TUNE=y
-CONFIG_YENTA_TOSHIBA=y
-CONFIG_PD6729=m
-CONFIG_I82092=m
-CONFIG_PCCARD_NONSTATIC=y
-CONFIG_HOTPLUG_PCI=y
-CONFIG_HOTPLUG_PCI_ACPI=y
-CONFIG_HOTPLUG_PCI_ACPI_IBM=m
-# CONFIG_HOTPLUG_PCI_CPCI is not set
-CONFIG_HOTPLUG_PCI_SHPC=m
-# CONFIG_RAPIDIO is not set
-# CONFIG_X86_SYSFB is not set
-
-#
-# Executable file formats / Emulations
-#
-CONFIG_BINFMT_ELF=y
-CONFIG_COMPAT_BINFMT_ELF=y
-CONFIG_CORE_DUMP_DEFAULT_ELF_HEADERS=y
-CONFIG_BINFMT_SCRIPT=y
-# CONFIG_HAVE_AOUT is not set
-CONFIG_BINFMT_MISC=m
-CONFIG_COREDUMP=y
-CONFIG_IA32_EMULATION=y
-# CONFIG_IA32_AOUT is not set
-# CONFIG_X86_X32 is not set
-CONFIG_COMPAT=y
-CONFIG_COMPAT_FOR_U64_ALIGNMENT=y
-CONFIG_SYSVIPC_COMPAT=y
-CONFIG_KEYS_COMPAT=y
-CONFIG_X86_DEV_DMA_OPS=y
-CONFIG_PMC_ATOM=y
-CONFIG_NET=y
-CONFIG_COMPAT_NETLINK_MESSAGES=y
-CONFIG_NET_INGRESS=y
-
-#
-# Networking options
-#
-CONFIG_PACKET=y
-CONFIG_PACKET_DIAG=m
-CONFIG_UNIX=y
-CONFIG_UNIX_DIAG=m
-CONFIG_XFRM=y
-CONFIG_XFRM_ALGO=y
-CONFIG_XFRM_USER=y
-CONFIG_XFRM_SUB_POLICY=y
-CONFIG_XFRM_MIGRATE=y
-CONFIG_XFRM_STATISTICS=y
-CONFIG_XFRM_IPCOMP=m
-CONFIG_NET_KEY=m
-CONFIG_NET_KEY_MIGRATE=y
-CONFIG_INET=y
-CONFIG_IP_MULTICAST=y
-CONFIG_IP_ADVANCED_ROUTER=y
-CONFIG_IP_FIB_TRIE_STATS=y
-CONFIG_IP_MULTIPLE_TABLES=y
-CONFIG_IP_ROUTE_MULTIPATH=y
-CONFIG_IP_ROUTE_VERBOSE=y
-CONFIG_IP_ROUTE_CLASSID=y
-# CONFIG_IP_PNP is not set
-CONFIG_NET_IPIP=m
-CONFIG_NET_IPGRE_DEMUX=m
-CONFIG_NET_IP_TUNNEL=m
-CONFIG_NET_IPGRE=m
-CONFIG_NET_IPGRE_BROADCAST=y
-CONFIG_IP_MROUTE=y
-CONFIG_IP_MROUTE_MULTIPLE_TABLES=y
-CONFIG_IP_PIMSM_V1=y
-CONFIG_IP_PIMSM_V2=y
-CONFIG_SYN_COOKIES=y
-CONFIG_NET_IPVTI=m
-CONFIG_NET_UDP_TUNNEL=m
-CONFIG_NET_FOU=m
-CONFIG_NET_FOU_IP_TUNNELS=y
-CONFIG_INET_AH=m
-CONFIG_INET_ESP=m
-CONFIG_INET_IPCOMP=m
-CONFIG_INET_XFRM_TUNNEL=m
-CONFIG_INET_TUNNEL=m
-CONFIG_INET_XFRM_MODE_TRANSPORT=m
-CONFIG_INET_XFRM_MODE_TUNNEL=m
-CONFIG_INET_XFRM_MODE_BEET=m
-CONFIG_INET_LRO=y
-CONFIG_INET_DIAG=m
-CONFIG_INET_TCP_DIAG=m
-CONFIG_INET_UDP_DIAG=m
-CONFIG_TCP_CONG_ADVANCED=y
-CONFIG_TCP_CONG_BIC=m
-CONFIG_TCP_CONG_CUBIC=y
-CONFIG_TCP_CONG_WESTWOOD=m
-CONFIG_TCP_CONG_HTCP=m
-CONFIG_TCP_CONG_HSTCP=m
-CONFIG_TCP_CONG_HYBLA=m
-CONFIG_TCP_CONG_VEGAS=m
-CONFIG_TCP_CONG_SCALABLE=m
-CONFIG_TCP_CONG_LP=m
-CONFIG_TCP_CONG_VENO=m
-CONFIG_TCP_CONG_YEAH=m
-CONFIG_TCP_CONG_ILLINOIS=m
-CONFIG_TCP_CONG_DCTCP=m
-CONFIG_TCP_CONG_CDG=m
-CONFIG_DEFAULT_CUBIC=y
-# CONFIG_DEFAULT_RENO is not set
-CONFIG_DEFAULT_TCP_CONG="cubic"
-CONFIG_TCP_MD5SIG=y
-CONFIG_IPV6=y
-CONFIG_IPV6_ROUTER_PREF=y
-CONFIG_IPV6_ROUTE_INFO=y
-CONFIG_IPV6_OPTIMISTIC_DAD=y
-CONFIG_INET6_AH=m
-CONFIG_INET6_ESP=m
-CONFIG_INET6_IPCOMP=m
-CONFIG_IPV6_MIP6=y
-CONFIG_IPV6_ILA=m
-CONFIG_INET6_XFRM_TUNNEL=m
-CONFIG_INET6_TUNNEL=m
-CONFIG_INET6_XFRM_MODE_TRANSPORT=m
-CONFIG_INET6_XFRM_MODE_TUNNEL=m
-CONFIG_INET6_XFRM_MODE_BEET=m
-CONFIG_INET6_XFRM_MODE_ROUTEOPTIMIZATION=m
-CONFIG_IPV6_VTI=m
-CONFIG_IPV6_SIT=m
-CONFIG_IPV6_SIT_6RD=y
-CONFIG_IPV6_NDISC_NODETYPE=y
-CONFIG_IPV6_TUNNEL=m
-# CONFIG_IPV6_GRE is not set
-CONFIG_IPV6_MULTIPLE_TABLES=y
-CONFIG_IPV6_SUBTREES=y
-CONFIG_IPV6_MROUTE=y
-CONFIG_IPV6_MROUTE_MULTIPLE_TABLES=y
-CONFIG_IPV6_PIMSM_V2=y
-CONFIG_NETLABEL=y
-CONFIG_NETWORK_SECMARK=y
-CONFIG_NET_PTP_CLASSIFY=y
-CONFIG_NETWORK_PHY_TIMESTAMPING=y
-CONFIG_NETFILTER=y
-# CONFIG_NETFILTER_DEBUG is not set
-CONFIG_NETFILTER_ADVANCED=y
-CONFIG_BRIDGE_NETFILTER=m
-
-#
-# Core Netfilter Configuration
-#
-CONFIG_NETFILTER_INGRESS=y
-CONFIG_NETFILTER_NETLINK=m
-CONFIG_NETFILTER_NETLINK_ACCT=m
-CONFIG_NETFILTER_NETLINK_QUEUE=m
-CONFIG_NETFILTER_NETLINK_LOG=m
-CONFIG_NF_CONNTRACK=m
-CONFIG_NF_LOG_COMMON=m
-CONFIG_NF_CONNTRACK_MARK=y
-CONFIG_NF_CONNTRACK_SECMARK=y
-CONFIG_NF_CONNTRACK_ZONES=y
-CONFIG_NF_CONNTRACK_PROCFS=y
-CONFIG_NF_CONNTRACK_EVENTS=y
-# CONFIG_NF_CONNTRACK_TIMEOUT is not set
-CONFIG_NF_CONNTRACK_TIMESTAMP=y
-CONFIG_NF_CONNTRACK_LABELS=y
-CONFIG_NF_CT_PROTO_DCCP=m
-CONFIG_NF_CT_PROTO_GRE=m
-CONFIG_NF_CT_PROTO_SCTP=m
-CONFIG_NF_CT_PROTO_UDPLITE=m
-CONFIG_NF_CONNTRACK_AMANDA=m
-CONFIG_NF_CONNTRACK_FTP=m
-CONFIG_NF_CONNTRACK_H323=m
-CONFIG_NF_CONNTRACK_IRC=m
-CONFIG_NF_CONNTRACK_BROADCAST=m
-CONFIG_NF_CONNTRACK_NETBIOS_NS=m
-CONFIG_NF_CONNTRACK_SNMP=m
-CONFIG_NF_CONNTRACK_PPTP=m
-CONFIG_NF_CONNTRACK_SANE=m
-CONFIG_NF_CONNTRACK_SIP=m
-CONFIG_NF_CONNTRACK_TFTP=m
-CONFIG_NF_CT_NETLINK=m
-# CONFIG_NF_CT_NETLINK_TIMEOUT is not set
-# CONFIG_NETFILTER_NETLINK_GLUE_CT is not set
-CONFIG_NF_NAT=m
-CONFIG_NF_NAT_NEEDED=y
-CONFIG_NF_NAT_PROTO_DCCP=m
-CONFIG_NF_NAT_PROTO_UDPLITE=m
-CONFIG_NF_NAT_PROTO_SCTP=m
-CONFIG_NF_NAT_AMANDA=m
-CONFIG_NF_NAT_FTP=m
-CONFIG_NF_NAT_IRC=m
-CONFIG_NF_NAT_SIP=m
-CONFIG_NF_NAT_TFTP=m
-CONFIG_NF_NAT_REDIRECT=m
-CONFIG_NETFILTER_SYNPROXY=m
-CONFIG_NF_TABLES=m
-CONFIG_NF_TABLES_INET=m
-CONFIG_NF_TABLES_NETDEV=m
-CONFIG_NFT_EXTHDR=m
-CONFIG_NFT_META=m
-CONFIG_NFT_CT=m
-CONFIG_NFT_RBTREE=m
-CONFIG_NFT_HASH=m
-CONFIG_NFT_COUNTER=m
-CONFIG_NFT_LOG=m
-CONFIG_NFT_LIMIT=m
-CONFIG_NFT_MASQ=m
-CONFIG_NFT_REDIR=m
-CONFIG_NFT_NAT=m
-CONFIG_NFT_QUEUE=m
-CONFIG_NFT_REJECT=m
-CONFIG_NFT_REJECT_INET=m
-CONFIG_NFT_COMPAT=m
-CONFIG_NETFILTER_XTABLES=y
-
-#
-# Xtables combined modules
-#
-CONFIG_NETFILTER_XT_MARK=m
-CONFIG_NETFILTER_XT_CONNMARK=m
-CONFIG_NETFILTER_XT_SET=m
-
-#
-# Xtables targets
-#
-CONFIG_NETFILTER_XT_TARGET_AUDIT=m
-CONFIG_NETFILTER_XT_TARGET_CHECKSUM=m
-CONFIG_NETFILTER_XT_TARGET_CLASSIFY=m
-CONFIG_NETFILTER_XT_TARGET_CONNMARK=m
-CONFIG_NETFILTER_XT_TARGET_CONNSECMARK=m
-CONFIG_NETFILTER_XT_TARGET_CT=m
-CONFIG_NETFILTER_XT_TARGET_DSCP=m
-CONFIG_NETFILTER_XT_TARGET_HL=m
-CONFIG_NETFILTER_XT_TARGET_HMARK=m
-CONFIG_NETFILTER_XT_TARGET_IDLETIMER=m
-CONFIG_NETFILTER_XT_TARGET_LED=m
-CONFIG_NETFILTER_XT_TARGET_LOG=m
-CONFIG_NETFILTER_XT_TARGET_MARK=m
-CONFIG_NETFILTER_XT_NAT=m
-CONFIG_NETFILTER_XT_TARGET_NETMAP=m
-CONFIG_NETFILTER_XT_TARGET_NFLOG=m
-CONFIG_NETFILTER_XT_TARGET_NFQUEUE=m
-CONFIG_NETFILTER_XT_TARGET_NOTRACK=m
-CONFIG_NETFILTER_XT_TARGET_RATEEST=m
-CONFIG_NETFILTER_XT_TARGET_REDIRECT=m
-CONFIG_NETFILTER_XT_TARGET_TEE=m
-CONFIG_NETFILTER_XT_TARGET_TPROXY=m
-CONFIG_NETFILTER_XT_TARGET_TRACE=m
-CONFIG_NETFILTER_XT_TARGET_SECMARK=m
-CONFIG_NETFILTER_XT_TARGET_TCPMSS=m
-CONFIG_NETFILTER_XT_TARGET_TCPOPTSTRIP=m
-
-#
-# Xtables matches
-#
-CONFIG_NETFILTER_XT_MATCH_ADDRTYPE=m
-CONFIG_NETFILTER_XT_MATCH_BPF=m
-CONFIG_NETFILTER_XT_MATCH_CGROUP=m
-CONFIG_NETFILTER_XT_MATCH_CLUSTER=m
-CONFIG_NETFILTER_XT_MATCH_COMMENT=m
-CONFIG_NETFILTER_XT_MATCH_CONNBYTES=m
-CONFIG_NETFILTER_XT_MATCH_CONNLABEL=m
-CONFIG_NETFILTER_XT_MATCH_CONNLIMIT=m
-CONFIG_NETFILTER_XT_MATCH_CONNMARK=m
-CONFIG_NETFILTER_XT_MATCH_CONNTRACK=m
-CONFIG_NETFILTER_XT_MATCH_CPU=m
-CONFIG_NETFILTER_XT_MATCH_DCCP=m
-CONFIG_NETFILTER_XT_MATCH_DEVGROUP=m
-CONFIG_NETFILTER_XT_MATCH_DSCP=m
-CONFIG_NETFILTER_XT_MATCH_ECN=m
-CONFIG_NETFILTER_XT_MATCH_ESP=m
-CONFIG_NETFILTER_XT_MATCH_HASHLIMIT=m
-CONFIG_NETFILTER_XT_MATCH_HELPER=m
-CONFIG_NETFILTER_XT_MATCH_HL=m
-CONFIG_NETFILTER_XT_MATCH_IPCOMP=m
-CONFIG_NETFILTER_XT_MATCH_IPRANGE=m
-CONFIG_NETFILTER_XT_MATCH_IPVS=m
-CONFIG_NETFILTER_XT_MATCH_L2TP=m
-CONFIG_NETFILTER_XT_MATCH_LENGTH=m
-CONFIG_NETFILTER_XT_MATCH_LIMIT=m
-CONFIG_NETFILTER_XT_MATCH_MAC=m
-CONFIG_NETFILTER_XT_MATCH_MARK=m
-CONFIG_NETFILTER_XT_MATCH_MULTIPORT=m
-CONFIG_NETFILTER_XT_MATCH_NFACCT=m
-CONFIG_NETFILTER_XT_MATCH_OSF=m
-CONFIG_NETFILTER_XT_MATCH_OWNER=m
-CONFIG_NETFILTER_XT_MATCH_POLICY=m
-CONFIG_NETFILTER_XT_MATCH_PHYSDEV=m
-CONFIG_NETFILTER_XT_MATCH_PKTTYPE=m
-CONFIG_NETFILTER_XT_MATCH_QUOTA=m
-CONFIG_NETFILTER_XT_MATCH_RATEEST=m
-CONFIG_NETFILTER_XT_MATCH_REALM=m
-CONFIG_NETFILTER_XT_MATCH_RECENT=m
-CONFIG_NETFILTER_XT_MATCH_SCTP=m
-CONFIG_NETFILTER_XT_MATCH_SOCKET=m
-CONFIG_NETFILTER_XT_MATCH_STATE=m
-CONFIG_NETFILTER_XT_MATCH_STATISTIC=m
-CONFIG_NETFILTER_XT_MATCH_STRING=m
-CONFIG_NETFILTER_XT_MATCH_TCPMSS=m
-CONFIG_NETFILTER_XT_MATCH_TIME=m
-CONFIG_NETFILTER_XT_MATCH_U32=m
-CONFIG_IP_SET=m
-CONFIG_IP_SET_MAX=256
-CONFIG_IP_SET_BITMAP_IP=m
-CONFIG_IP_SET_BITMAP_IPMAC=m
-CONFIG_IP_SET_BITMAP_PORT=m
-CONFIG_IP_SET_HASH_IP=m
-CONFIG_IP_SET_HASH_IPMARK=m
-CONFIG_IP_SET_HASH_IPPORT=m
-CONFIG_IP_SET_HASH_IPPORTIP=m
-CONFIG_IP_SET_HASH_IPPORTNET=m
-CONFIG_IP_SET_HASH_MAC=m
-CONFIG_IP_SET_HASH_NETPORTNET=m
-CONFIG_IP_SET_HASH_NET=m
-CONFIG_IP_SET_HASH_NETNET=m
-CONFIG_IP_SET_HASH_NETPORT=m
-CONFIG_IP_SET_HASH_NETIFACE=m
-CONFIG_IP_SET_LIST_SET=m
-CONFIG_IP_VS=m
-CONFIG_IP_VS_IPV6=y
-# CONFIG_IP_VS_DEBUG is not set
-CONFIG_IP_VS_TAB_BITS=12
-
-#
-# IPVS transport protocol load balancing support
-#
-CONFIG_IP_VS_PROTO_TCP=y
-CONFIG_IP_VS_PROTO_UDP=y
-CONFIG_IP_VS_PROTO_AH_ESP=y
-CONFIG_IP_VS_PROTO_ESP=y
-CONFIG_IP_VS_PROTO_AH=y
-CONFIG_IP_VS_PROTO_SCTP=y
-
-#
-# IPVS scheduler
-#
-CONFIG_IP_VS_RR=m
-CONFIG_IP_VS_WRR=m
-CONFIG_IP_VS_LC=m
-CONFIG_IP_VS_WLC=m
-CONFIG_IP_VS_FO=m
-CONFIG_IP_VS_OVF=m
-CONFIG_IP_VS_LBLC=m
-CONFIG_IP_VS_LBLCR=m
-CONFIG_IP_VS_DH=m
-CONFIG_IP_VS_SH=m
-CONFIG_IP_VS_SED=m
-CONFIG_IP_VS_NQ=m
-
-#
-# IPVS SH scheduler
-#
-CONFIG_IP_VS_SH_TAB_BITS=8
-
-#
-# IPVS application helper
-#
-CONFIG_IP_VS_FTP=m
-CONFIG_IP_VS_NFCT=y
-CONFIG_IP_VS_PE_SIP=m
-
-#
-# IP: Netfilter Configuration
-#
-CONFIG_NF_DEFRAG_IPV4=m
-CONFIG_NF_CONNTRACK_IPV4=m
-# CONFIG_NF_CONNTRACK_PROC_COMPAT is not set
-CONFIG_NF_TABLES_IPV4=m
-CONFIG_NFT_CHAIN_ROUTE_IPV4=m
-CONFIG_NFT_REJECT_IPV4=m
-CONFIG_NFT_DUP_IPV4=m
-CONFIG_NF_TABLES_ARP=m
-CONFIG_NF_DUP_IPV4=m
-CONFIG_NF_LOG_ARP=m
-CONFIG_NF_LOG_IPV4=m
-CONFIG_NF_REJECT_IPV4=y
-CONFIG_NF_NAT_IPV4=m
-CONFIG_NFT_CHAIN_NAT_IPV4=m
-CONFIG_NF_NAT_MASQUERADE_IPV4=m
-CONFIG_NFT_MASQ_IPV4=m
-CONFIG_NFT_REDIR_IPV4=m
-CONFIG_NF_NAT_SNMP_BASIC=m
-CONFIG_NF_NAT_PROTO_GRE=m
-CONFIG_NF_NAT_PPTP=m
-CONFIG_NF_NAT_H323=m
-CONFIG_IP_NF_IPTABLES=y
-CONFIG_IP_NF_MATCH_AH=m
-CONFIG_IP_NF_MATCH_ECN=m
-CONFIG_IP_NF_MATCH_RPFILTER=m
-CONFIG_IP_NF_MATCH_TTL=m
-CONFIG_IP_NF_FILTER=y
-CONFIG_IP_NF_TARGET_REJECT=y
-CONFIG_IP_NF_TARGET_SYNPROXY=m
-CONFIG_IP_NF_NAT=m
-CONFIG_IP_NF_TARGET_MASQUERADE=m
-CONFIG_IP_NF_TARGET_NETMAP=m
-CONFIG_IP_NF_TARGET_REDIRECT=m
-CONFIG_IP_NF_MANGLE=m
-CONFIG_IP_NF_TARGET_CLUSTERIP=m
-CONFIG_IP_NF_TARGET_ECN=m
-CONFIG_IP_NF_TARGET_TTL=m
-CONFIG_IP_NF_RAW=m
-CONFIG_IP_NF_SECURITY=m
-CONFIG_IP_NF_ARPTABLES=m
-CONFIG_IP_NF_ARPFILTER=m
-CONFIG_IP_NF_ARP_MANGLE=m
-
-#
-# IPv6: Netfilter Configuration
-#
-CONFIG_NF_DEFRAG_IPV6=m
-CONFIG_NF_CONNTRACK_IPV6=m
-CONFIG_NF_TABLES_IPV6=m
-CONFIG_NFT_CHAIN_ROUTE_IPV6=m
-CONFIG_NFT_REJECT_IPV6=m
-CONFIG_NFT_DUP_IPV6=m
-CONFIG_NF_DUP_IPV6=m
-CONFIG_NF_REJECT_IPV6=m
-CONFIG_NF_LOG_IPV6=m
-CONFIG_NF_NAT_IPV6=m
-CONFIG_NFT_CHAIN_NAT_IPV6=m
-CONFIG_NF_NAT_MASQUERADE_IPV6=m
-CONFIG_NFT_MASQ_IPV6=m
-CONFIG_NFT_REDIR_IPV6=m
-CONFIG_IP6_NF_IPTABLES=m
-CONFIG_IP6_NF_MATCH_AH=m
-CONFIG_IP6_NF_MATCH_EUI64=m
-CONFIG_IP6_NF_MATCH_FRAG=m
-CONFIG_IP6_NF_MATCH_OPTS=m
-CONFIG_IP6_NF_MATCH_HL=m
-CONFIG_IP6_NF_MATCH_IPV6HEADER=m
-CONFIG_IP6_NF_MATCH_MH=m
-CONFIG_IP6_NF_MATCH_RPFILTER=m
-CONFIG_IP6_NF_MATCH_RT=m
-CONFIG_IP6_NF_TARGET_HL=m
-CONFIG_IP6_NF_FILTER=m
-CONFIG_IP6_NF_TARGET_REJECT=m
-CONFIG_IP6_NF_TARGET_SYNPROXY=m
-CONFIG_IP6_NF_MANGLE=m
-CONFIG_IP6_NF_RAW=m
-CONFIG_IP6_NF_SECURITY=m
-CONFIG_IP6_NF_NAT=m
-CONFIG_IP6_NF_TARGET_MASQUERADE=m
-# CONFIG_IP6_NF_TARGET_NPT is not set
-CONFIG_NF_TABLES_BRIDGE=m
-CONFIG_NFT_BRIDGE_META=m
-CONFIG_NFT_BRIDGE_REJECT=m
-CONFIG_NF_LOG_BRIDGE=m
-CONFIG_BRIDGE_NF_EBTABLES=m
-CONFIG_BRIDGE_EBT_BROUTE=m
-CONFIG_BRIDGE_EBT_T_FILTER=m
-CONFIG_BRIDGE_EBT_T_NAT=m
-CONFIG_BRIDGE_EBT_802_3=m
-CONFIG_BRIDGE_EBT_AMONG=m
-CONFIG_BRIDGE_EBT_ARP=m
-CONFIG_BRIDGE_EBT_IP=m
-CONFIG_BRIDGE_EBT_IP6=m
-CONFIG_BRIDGE_EBT_LIMIT=m
-CONFIG_BRIDGE_EBT_MARK=m
-CONFIG_BRIDGE_EBT_PKTTYPE=m
-CONFIG_BRIDGE_EBT_STP=m
-CONFIG_BRIDGE_EBT_VLAN=m
-CONFIG_BRIDGE_EBT_ARPREPLY=m
-CONFIG_BRIDGE_EBT_DNAT=m
-CONFIG_BRIDGE_EBT_MARK_T=m
-CONFIG_BRIDGE_EBT_REDIRECT=m
-CONFIG_BRIDGE_EBT_SNAT=m
-CONFIG_BRIDGE_EBT_LOG=m
-CONFIG_BRIDGE_EBT_NFLOG=m
-CONFIG_IP_DCCP=m
-CONFIG_INET_DCCP_DIAG=m
-
-#
-# DCCP CCIDs Configuration
-#
-# CONFIG_IP_DCCP_CCID2_DEBUG is not set
-CONFIG_IP_DCCP_CCID3=y
-# CONFIG_IP_DCCP_CCID3_DEBUG is not set
-CONFIG_IP_DCCP_TFRC_LIB=y
-
-#
-# DCCP Kernel Hacking
-#
-# CONFIG_IP_DCCP_DEBUG is not set
-# CONFIG_NET_DCCPPROBE is not set
-CONFIG_IP_SCTP=m
-CONFIG_NET_SCTPPROBE=m
-# CONFIG_SCTP_DBG_OBJCNT is not set
-# CONFIG_SCTP_DEFAULT_COOKIE_HMAC_MD5 is not set
-CONFIG_SCTP_DEFAULT_COOKIE_HMAC_SHA1=y
-# CONFIG_SCTP_DEFAULT_COOKIE_HMAC_NONE is not set
-CONFIG_SCTP_COOKIE_HMAC_MD5=y
-CONFIG_SCTP_COOKIE_HMAC_SHA1=y
-CONFIG_RDS=m
-CONFIG_RDS_RDMA=m
-CONFIG_RDS_TCP=m
-# CONFIG_RDS_DEBUG is not set
-CONFIG_TIPC=m
-# CONFIG_TIPC_MEDIA_IB is not set
-CONFIG_TIPC_MEDIA_UDP=y
-CONFIG_ATM=m
-CONFIG_ATM_CLIP=m
-# CONFIG_ATM_CLIP_NO_ICMP is not set
-CONFIG_ATM_LANE=m
-# CONFIG_ATM_MPOA is not set
-CONFIG_ATM_BR2684=m
-# CONFIG_ATM_BR2684_IPFILTER is not set
-CONFIG_L2TP=m
-CONFIG_L2TP_DEBUGFS=m
-CONFIG_L2TP_V3=y
-CONFIG_L2TP_IP=m
-CONFIG_L2TP_ETH=m
-CONFIG_STP=m
-CONFIG_GARP=m
-CONFIG_MRP=m
-CONFIG_BRIDGE=m
-CONFIG_BRIDGE_IGMP_SNOOPING=y
-CONFIG_BRIDGE_VLAN_FILTERING=y
-CONFIG_HAVE_NET_DSA=y
-CONFIG_NET_DSA=m
-CONFIG_NET_DSA_HWMON=y
-CONFIG_NET_DSA_TAG_BRCM=y
-CONFIG_NET_DSA_TAG_TRAILER=y
-CONFIG_VLAN_8021Q=m
-CONFIG_VLAN_8021Q_GVRP=y
-CONFIG_VLAN_8021Q_MVRP=y
-# CONFIG_DECNET is not set
-CONFIG_LLC=m
-# CONFIG_LLC2 is not set
-CONFIG_IPX=m
-# CONFIG_IPX_INTERN is not set
-CONFIG_ATALK=m
-CONFIG_DEV_APPLETALK=m
-CONFIG_IPDDP=m
-CONFIG_IPDDP_ENCAP=y
-# CONFIG_X25 is not set
-# CONFIG_LAPB is not set
-# CONFIG_PHONET is not set
-CONFIG_6LOWPAN=m
-CONFIG_6LOWPAN_NHC=m
-CONFIG_6LOWPAN_NHC_DEST=m
-CONFIG_6LOWPAN_NHC_FRAGMENT=m
-CONFIG_6LOWPAN_NHC_HOP=m
-CONFIG_6LOWPAN_NHC_IPV6=m
-CONFIG_6LOWPAN_NHC_MOBILITY=m
-CONFIG_6LOWPAN_NHC_ROUTING=m
-CONFIG_6LOWPAN_NHC_UDP=m
-CONFIG_IEEE802154=m
-# CONFIG_IEEE802154_NL802154_EXPERIMENTAL is not set
-CONFIG_IEEE802154_SOCKET=m
-CONFIG_IEEE802154_6LOWPAN=m
-CONFIG_MAC802154=m
-CONFIG_NET_SCHED=y
-
-#
-# Queueing/Scheduling
-#
-CONFIG_NET_SCH_CBQ=m
-CONFIG_NET_SCH_HTB=m
-CONFIG_NET_SCH_HFSC=m
-CONFIG_NET_SCH_ATM=m
-CONFIG_NET_SCH_PRIO=m
-CONFIG_NET_SCH_MULTIQ=m
-CONFIG_NET_SCH_RED=m
-CONFIG_NET_SCH_SFB=m
-CONFIG_NET_SCH_SFQ=m
-CONFIG_NET_SCH_TEQL=m
-CONFIG_NET_SCH_TBF=m
-CONFIG_NET_SCH_GRED=m
-CONFIG_NET_SCH_DSMARK=m
-CONFIG_NET_SCH_NETEM=m
-CONFIG_NET_SCH_DRR=m
-CONFIG_NET_SCH_MQPRIO=m
-CONFIG_NET_SCH_CHOKE=m
-CONFIG_NET_SCH_QFQ=m
-CONFIG_NET_SCH_CODEL=m
-CONFIG_NET_SCH_FQ_CODEL=y
-CONFIG_NET_SCH_FQ=m
-CONFIG_NET_SCH_HHF=m
-CONFIG_NET_SCH_PIE=m
-CONFIG_NET_SCH_INGRESS=m
-CONFIG_NET_SCH_PLUG=m
-
-#
-# Classification
-#
-CONFIG_NET_CLS=y
-CONFIG_NET_CLS_BASIC=m
-CONFIG_NET_CLS_TCINDEX=m
-CONFIG_NET_CLS_ROUTE4=m
-CONFIG_NET_CLS_FW=m
-CONFIG_NET_CLS_U32=m
-CONFIG_CLS_U32_PERF=y
-CONFIG_CLS_U32_MARK=y
-CONFIG_NET_CLS_RSVP=m
-CONFIG_NET_CLS_RSVP6=m
-CONFIG_NET_CLS_FLOW=m
-CONFIG_NET_CLS_CGROUP=y
-CONFIG_NET_CLS_BPF=m
-CONFIG_NET_CLS_FLOWER=m
-CONFIG_NET_EMATCH=y
-CONFIG_NET_EMATCH_STACK=32
-CONFIG_NET_EMATCH_CMP=m
-CONFIG_NET_EMATCH_NBYTE=m
-CONFIG_NET_EMATCH_U32=m
-CONFIG_NET_EMATCH_META=m
-CONFIG_NET_EMATCH_TEXT=m
-CONFIG_NET_EMATCH_CANID=m
-CONFIG_NET_EMATCH_IPSET=m
-CONFIG_NET_CLS_ACT=y
-CONFIG_NET_ACT_POLICE=m
-CONFIG_NET_ACT_GACT=m
-CONFIG_GACT_PROB=y
-CONFIG_NET_ACT_MIRRED=m
-CONFIG_NET_ACT_IPT=m
-CONFIG_NET_ACT_NAT=m
-CONFIG_NET_ACT_PEDIT=m
-CONFIG_NET_ACT_SIMP=m
-CONFIG_NET_ACT_SKBEDIT=m
-CONFIG_NET_ACT_CSUM=m
-CONFIG_NET_ACT_VLAN=m
-CONFIG_NET_ACT_BPF=m
-CONFIG_NET_ACT_CONNMARK=m
-CONFIG_NET_CLS_IND=y
-CONFIG_NET_SCH_FIFO=y
-CONFIG_DCB=y
-CONFIG_DNS_RESOLVER=m
-CONFIG_BATMAN_ADV=m
-CONFIG_BATMAN_ADV_BLA=y
-CONFIG_BATMAN_ADV_DAT=y
-CONFIG_BATMAN_ADV_NC=y
-CONFIG_BATMAN_ADV_MCAST=y
-# CONFIG_BATMAN_ADV_DEBUG is not set
-CONFIG_OPENVSWITCH=m
-CONFIG_OPENVSWITCH_GRE=m
-CONFIG_OPENVSWITCH_VXLAN=m
-CONFIG_OPENVSWITCH_GENEVE=m
-CONFIG_VSOCKETS=m
-CONFIG_VMWARE_VMCI_VSOCKETS=m
-# CONFIG_NETLINK_MMAP is not set
-CONFIG_NETLINK_DIAG=m
-CONFIG_MPLS=y
-CONFIG_NET_MPLS_GSO=m
-CONFIG_MPLS_ROUTING=m
-CONFIG_MPLS_IPTUNNEL=m
-# CONFIG_HSR is not set
-CONFIG_NET_SWITCHDEV=y
-# CONFIG_NET_L3_MASTER_DEV is not set
-CONFIG_RPS=y
-CONFIG_RFS_ACCEL=y
-CONFIG_XPS=y
-CONFIG_CGROUP_NET_PRIO=y
-CONFIG_CGROUP_NET_CLASSID=y
-CONFIG_NET_RX_BUSY_POLL=y
-CONFIG_BQL=y
-CONFIG_BPF_JIT=y
-CONFIG_NET_FLOW_LIMIT=y
-
-#
-# Network testing
-#
-CONFIG_NET_PKTGEN=m
-# CONFIG_NET_TCPPROBE is not set
-CONFIG_NET_DROP_MONITOR=y
-CONFIG_HAMRADIO=y
-
-#
-# Packet Radio protocols
-#
-CONFIG_AX25=m
-CONFIG_AX25_DAMA_SLAVE=y
-CONFIG_NETROM=m
-CONFIG_ROSE=m
-
-#
-# AX.25 network device drivers
-#
-CONFIG_MKISS=m
-CONFIG_6PACK=m
-CONFIG_BPQETHER=m
-CONFIG_BAYCOM_SER_FDX=m
-CONFIG_BAYCOM_SER_HDX=m
-CONFIG_BAYCOM_PAR=m
-CONFIG_YAM=m
-CONFIG_CAN=m
-CONFIG_CAN_RAW=m
-CONFIG_CAN_BCM=m
-CONFIG_CAN_GW=m
-
-#
-# CAN Device Drivers
-#
-CONFIG_CAN_VCAN=m
-CONFIG_CAN_SLCAN=m
-CONFIG_CAN_DEV=m
-CONFIG_CAN_CALC_BITTIMING=y
-CONFIG_CAN_LEDS=y
-# CONFIG_CAN_AT91 is not set
-# CONFIG_PCH_CAN is not set
-# CONFIG_CAN_SUN4I is not set
-# CONFIG_CAN_XILINXCAN is not set
-CONFIG_CAN_SJA1000=m
-# CONFIG_CAN_SJA1000_ISA is not set
-CONFIG_CAN_SJA1000_PLATFORM=m
-# CONFIG_CAN_EMS_PCMCIA is not set
-CONFIG_CAN_EMS_PCI=m
-# CONFIG_CAN_PEAK_PCMCIA is not set
-CONFIG_CAN_PEAK_PCI=m
-CONFIG_CAN_PEAK_PCIEC=y
-CONFIG_CAN_KVASER_PCI=m
-CONFIG_CAN_PLX_PCI=m
-CONFIG_CAN_C_CAN=m
-CONFIG_CAN_C_CAN_PLATFORM=m
-CONFIG_CAN_C_CAN_PCI=m
-CONFIG_CAN_M_CAN=m
-CONFIG_CAN_CC770=m
-# CONFIG_CAN_CC770_ISA is not set
-CONFIG_CAN_CC770_PLATFORM=m
-
-#
-# CAN SPI interfaces
-#
-# CONFIG_CAN_MCP251X is not set
-
-#
-# CAN USB interfaces
-#
-CONFIG_CAN_EMS_USB=m
-CONFIG_CAN_ESD_USB2=m
-CONFIG_CAN_GS_USB=m
-CONFIG_CAN_KVASER_USB=m
-CONFIG_CAN_PEAK_USB=m
-CONFIG_CAN_8DEV_USB=m
-CONFIG_CAN_SOFTING=m
-# CONFIG_CAN_SOFTING_CS is not set
-# CONFIG_CAN_DEBUG_DEVICES is not set
-CONFIG_IRDA=m
-
-#
-# IrDA protocols
-#
-CONFIG_IRLAN=m
-CONFIG_IRNET=m
-CONFIG_IRCOMM=m
-# CONFIG_IRDA_ULTRA is not set
-
-#
-# IrDA options
-#
-CONFIG_IRDA_CACHE_LAST_LSAP=y
-CONFIG_IRDA_FAST_RR=y
-# CONFIG_IRDA_DEBUG is not set
-
-#
-# Infrared-port device drivers
-#
-
-#
-# SIR device drivers
-#
-CONFIG_IRTTY_SIR=m
-
-#
-# Dongle support
-#
-CONFIG_DONGLE=y
-CONFIG_ESI_DONGLE=m
-CONFIG_ACTISYS_DONGLE=m
-CONFIG_TEKRAM_DONGLE=m
-CONFIG_TOIM3232_DONGLE=m
-CONFIG_LITELINK_DONGLE=m
-CONFIG_MA600_DONGLE=m
-CONFIG_GIRBIL_DONGLE=m
-CONFIG_MCP2120_DONGLE=m
-CONFIG_OLD_BELKIN_DONGLE=m
-CONFIG_ACT200L_DONGLE=m
-CONFIG_KINGSUN_DONGLE=m
-CONFIG_KSDAZZLE_DONGLE=m
-CONFIG_KS959_DONGLE=m
-
-#
-# FIR device drivers
-#
-CONFIG_USB_IRDA=m
-CONFIG_SIGMATEL_FIR=m
-CONFIG_NSC_FIR=m
-CONFIG_WINBOND_FIR=m
-CONFIG_SMC_IRCC_FIR=m
-CONFIG_ALI_FIR=m
-CONFIG_VLSI_FIR=m
-CONFIG_VIA_FIR=m
-CONFIG_MCS_FIR=m
-# CONFIG_SH_IRDA is not set
-CONFIG_BT=m
-CONFIG_BT_BREDR=y
-CONFIG_BT_RFCOMM=m
-CONFIG_BT_RFCOMM_TTY=y
-CONFIG_BT_BNEP=m
-CONFIG_BT_BNEP_MC_FILTER=y
-CONFIG_BT_BNEP_PROTO_FILTER=y
-CONFIG_BT_CMTP=m
-CONFIG_BT_HIDP=m
-CONFIG_BT_HS=y
-CONFIG_BT_LE=y
-CONFIG_BT_6LOWPAN=m
-# CONFIG_BT_SELFTEST is not set
-# CONFIG_BT_DEBUGFS is not set
-
-#
-# Bluetooth device drivers
-#
-CONFIG_BT_INTEL=m
-CONFIG_BT_BCM=m
-CONFIG_BT_RTL=m
-CONFIG_BT_QCA=m
-CONFIG_BT_HCIBTUSB=m
-CONFIG_BT_HCIBTUSB_BCM=y
-CONFIG_BT_HCIBTUSB_RTL=y
-CONFIG_BT_HCIBTSDIO=m
-CONFIG_BT_HCIUART=m
-CONFIG_BT_HCIUART_H4=y
-CONFIG_BT_HCIUART_BCSP=y
-CONFIG_BT_HCIUART_ATH3K=y
-CONFIG_BT_HCIUART_LL=y
-CONFIG_BT_HCIUART_3WIRE=y
-CONFIG_BT_HCIUART_INTEL=y
-CONFIG_BT_HCIUART_BCM=y
-CONFIG_BT_HCIUART_QCA=y
-CONFIG_BT_HCIBCM203X=m
-CONFIG_BT_HCIBPA10X=m
-CONFIG_BT_HCIBFUSB=m
-CONFIG_BT_HCIDTL1=m
-CONFIG_BT_HCIBT3C=m
-CONFIG_BT_HCIBLUECARD=m
-CONFIG_BT_HCIBTUART=m
-CONFIG_BT_HCIVHCI=m
-CONFIG_BT_MRVL=m
-CONFIG_BT_MRVL_SDIO=m
-CONFIG_BT_ATH3K=m
-# CONFIG_AF_RXRPC is not set
-CONFIG_FIB_RULES=y
-CONFIG_WIRELESS=y
-CONFIG_WIRELESS_EXT=y
-CONFIG_WEXT_CORE=y
-CONFIG_WEXT_PROC=y
-CONFIG_WEXT_SPY=y
-CONFIG_WEXT_PRIV=y
-CONFIG_CFG80211=m
-# CONFIG_NL80211_TESTMODE is not set
-# CONFIG_CFG80211_DEVELOPER_WARNINGS is not set
-# CONFIG_CFG80211_REG_DEBUG is not set
-CONFIG_CFG80211_DEFAULT_PS=y
-CONFIG_CFG80211_DEBUGFS=y
-# CONFIG_CFG80211_INTERNAL_REGDB is not set
-CONFIG_CFG80211_CRDA_SUPPORT=y
-CONFIG_CFG80211_WEXT=y
-CONFIG_CFG80211_WEXT_EXPORT=y
-CONFIG_LIB80211=m
-CONFIG_LIB80211_CRYPT_WEP=m
-CONFIG_LIB80211_CRYPT_CCMP=m
-CONFIG_LIB80211_CRYPT_TKIP=m
-# CONFIG_LIB80211_DEBUG is not set
-CONFIG_MAC80211=m
-CONFIG_MAC80211_HAS_RC=y
-CONFIG_MAC80211_RC_MINSTREL=y
-CONFIG_MAC80211_RC_MINSTREL_HT=y
-# CONFIG_MAC80211_RC_MINSTREL_VHT is not set
-CONFIG_MAC80211_RC_DEFAULT_MINSTREL=y
-CONFIG_MAC80211_RC_DEFAULT="minstrel_ht"
-CONFIG_MAC80211_MESH=y
-CONFIG_MAC80211_LEDS=y
-CONFIG_MAC80211_DEBUGFS=y
-# CONFIG_MAC80211_MESSAGE_TRACING is not set
-# CONFIG_MAC80211_DEBUG_MENU is not set
-CONFIG_MAC80211_STA_HASH_MAX_SIZE=0
-# CONFIG_WIMAX is not set
-CONFIG_RFKILL=m
-CONFIG_RFKILL_LEDS=y
-CONFIG_RFKILL_INPUT=y
-CONFIG_RFKILL_GPIO=m
-CONFIG_NET_9P=m
-CONFIG_NET_9P_VIRTIO=m
-CONFIG_NET_9P_RDMA=m
-# CONFIG_NET_9P_DEBUG is not set
-# CONFIG_CAIF is not set
-CONFIG_CEPH_LIB=m
-# CONFIG_CEPH_LIB_PRETTYDEBUG is not set
-# CONFIG_CEPH_LIB_USE_DNS_RESOLVER is not set
-CONFIG_NFC=m
-CONFIG_NFC_DIGITAL=m
-CONFIG_NFC_NCI=m
-# CONFIG_NFC_NCI_SPI is not set
-# CONFIG_NFC_NCI_UART is not set
-CONFIG_NFC_HCI=m
-CONFIG_NFC_SHDLC=y
-
-#
-# Near Field Communication (NFC) devices
-#
-CONFIG_NFC_PN533=m
-CONFIG_NFC_TRF7970A=m
-CONFIG_NFC_MEI_PHY=m
-CONFIG_NFC_SIM=m
-CONFIG_NFC_PORT100=m
-# CONFIG_NFC_FDP is not set
-CONFIG_NFC_PN544=m
-CONFIG_NFC_PN544_I2C=m
-CONFIG_NFC_PN544_MEI=m
-CONFIG_NFC_MICROREAD=m
-CONFIG_NFC_MICROREAD_I2C=m
-CONFIG_NFC_MICROREAD_MEI=m
-CONFIG_NFC_MRVL=m
-CONFIG_NFC_MRVL_USB=m
-# CONFIG_NFC_MRVL_I2C is not set
-CONFIG_NFC_ST21NFCA=m
-CONFIG_NFC_ST21NFCA_I2C=m
-# CONFIG_NFC_ST_NCI is not set
-CONFIG_NFC_NXP_NCI=m
-CONFIG_NFC_NXP_NCI_I2C=m
-# CONFIG_NFC_S3FWRN5_I2C is not set
-CONFIG_LWTUNNEL=y
-CONFIG_HAVE_BPF_JIT=y
-
-#
-# Device Drivers
-#
-
-#
-# Generic Driver Options
-#
-# CONFIG_UEVENT_HELPER is not set
-CONFIG_DEVTMPFS=y
-CONFIG_DEVTMPFS_MOUNT=y
-CONFIG_STANDALONE=y
-CONFIG_PREVENT_FIRMWARE_BUILD=y
-CONFIG_FW_LOADER=y
-# CONFIG_FIRMWARE_IN_KERNEL is not set
-CONFIG_EXTRA_FIRMWARE=""
-# CONFIG_FW_LOADER_USER_HELPER_FALLBACK is not set
-CONFIG_WANT_DEV_COREDUMP=y
-CONFIG_ALLOW_DEV_COREDUMP=y
-CONFIG_DEV_COREDUMP=y
-# CONFIG_DEBUG_DRIVER is not set
-CONFIG_DEBUG_DEVRES=y
-CONFIG_SYS_HYPERVISOR=y
-# CONFIG_GENERIC_CPU_DEVICES is not set
-CONFIG_GENERIC_CPU_AUTOPROBE=y
-CONFIG_REGMAP=y
-CONFIG_REGMAP_I2C=y
-CONFIG_REGMAP_SPI=m
-CONFIG_REGMAP_IRQ=y
-CONFIG_DMA_SHARED_BUFFER=y
-# CONFIG_FENCE_TRACE is not set
-# CONFIG_DMA_CMA is not set
-
-#
-# Bus devices
-#
-CONFIG_CONNECTOR=y
-CONFIG_PROC_EVENTS=y
-CONFIG_MTD=m
-# CONFIG_MTD_TESTS is not set
-# CONFIG_MTD_REDBOOT_PARTS is not set
-# CONFIG_MTD_CMDLINE_PARTS is not set
-# CONFIG_MTD_AR7_PARTS is not set
-
-#
-# User Modules And Translation Layers
-#
-CONFIG_MTD_BLKDEVS=m
-CONFIG_MTD_BLOCK=m
-# CONFIG_MTD_BLOCK_RO is not set
-# CONFIG_FTL is not set
-# CONFIG_NFTL is not set
-# CONFIG_INFTL is not set
-# CONFIG_RFD_FTL is not set
-# CONFIG_SSFDC is not set
-# CONFIG_SM_FTL is not set
-# CONFIG_MTD_OOPS is not set
-# CONFIG_MTD_SWAP is not set
-# CONFIG_MTD_PARTITIONED_MASTER is not set
-
-#
-# RAM/ROM/Flash chip drivers
-#
-# CONFIG_MTD_CFI is not set
-# CONFIG_MTD_JEDECPROBE is not set
-CONFIG_MTD_MAP_BANK_WIDTH_1=y
-CONFIG_MTD_MAP_BANK_WIDTH_2=y
-CONFIG_MTD_MAP_BANK_WIDTH_4=y
-# CONFIG_MTD_MAP_BANK_WIDTH_8 is not set
-# CONFIG_MTD_MAP_BANK_WIDTH_16 is not set
-# CONFIG_MTD_MAP_BANK_WIDTH_32 is not set
-CONFIG_MTD_CFI_I1=y
-CONFIG_MTD_CFI_I2=y
-# CONFIG_MTD_CFI_I4 is not set
-# CONFIG_MTD_CFI_I8 is not set
-# CONFIG_MTD_RAM is not set
-# CONFIG_MTD_ROM is not set
-# CONFIG_MTD_ABSENT is not set
-
-#
-# Mapping drivers for chip access
-#
-# CONFIG_MTD_COMPLEX_MAPPINGS is not set
-# CONFIG_MTD_TS5500 is not set
-# CONFIG_MTD_INTEL_VR_NOR is not set
-# CONFIG_MTD_PLATRAM is not set
-
-#
-# Self-contained MTD device drivers
-#
-# CONFIG_MTD_PMC551 is not set
-# CONFIG_MTD_DATAFLASH is not set
-# CONFIG_MTD_SST25L is not set
-# CONFIG_MTD_SLRAM is not set
-# CONFIG_MTD_PHRAM is not set
-# CONFIG_MTD_MTDRAM is not set
-# CONFIG_MTD_BLOCK2MTD is not set
-
-#
-# Disk-On-Chip Device Drivers
-#
-# CONFIG_MTD_DOCG3 is not set
-# CONFIG_MTD_NAND is not set
-# CONFIG_MTD_ONENAND is not set
-
-#
-# LPDDR & LPDDR2 PCM memory drivers
-#
-# CONFIG_MTD_LPDDR is not set
-# CONFIG_MTD_SPI_NOR is not set
-CONFIG_MTD_UBI=m
-CONFIG_MTD_UBI_WL_THRESHOLD=4096
-CONFIG_MTD_UBI_BEB_LIMIT=20
-# CONFIG_MTD_UBI_FASTMAP is not set
-# CONFIG_MTD_UBI_GLUEBI is not set
-# CONFIG_MTD_UBI_BLOCK is not set
-# CONFIG_OF is not set
-CONFIG_ARCH_MIGHT_HAVE_PC_PARPORT=y
-CONFIG_PARPORT=m
-CONFIG_PARPORT_PC=m
-CONFIG_PARPORT_SERIAL=m
-# CONFIG_PARPORT_PC_FIFO is not set
-# CONFIG_PARPORT_PC_SUPERIO is not set
-CONFIG_PARPORT_PC_PCMCIA=m
-# CONFIG_PARPORT_GSC is not set
-# CONFIG_PARPORT_AX88796 is not set
-CONFIG_PARPORT_1284=y
-CONFIG_PARPORT_NOT_PC=y
-CONFIG_PNP=y
-# CONFIG_PNP_DEBUG_MESSAGES is not set
-
-#
-# Protocols
-#
-CONFIG_PNPACPI=y
-CONFIG_BLK_DEV=y
-CONFIG_BLK_DEV_NULL_BLK=m
-CONFIG_BLK_DEV_FD=m
-# CONFIG_PARIDE is not set
-CONFIG_BLK_DEV_PCIESSD_MTIP32XX=m
-CONFIG_ZRAM=m
-# CONFIG_ZRAM_LZ4_COMPRESS is not set
-CONFIG_BLK_CPQ_CISS_DA=m
-CONFIG_CISS_SCSI_TAPE=y
-CONFIG_BLK_DEV_DAC960=m
-CONFIG_BLK_DEV_UMEM=m
-# CONFIG_BLK_DEV_COW_COMMON is not set
-CONFIG_BLK_DEV_LOOP=m
-CONFIG_BLK_DEV_LOOP_MIN_COUNT=0
-# CONFIG_BLK_DEV_CRYPTOLOOP is not set
-CONFIG_BLK_DEV_DRBD=m
-# CONFIG_DRBD_FAULT_INJECTION is not set
-CONFIG_BLK_DEV_NBD=m
-CONFIG_BLK_DEV_SKD=m
-CONFIG_BLK_DEV_OSD=m
-CONFIG_BLK_DEV_SX8=m
-CONFIG_BLK_DEV_RAM=m
-CONFIG_BLK_DEV_RAM_COUNT=16
-CONFIG_BLK_DEV_RAM_SIZE=16384
-CONFIG_BLK_DEV_RAM_DAX=y
-CONFIG_CDROM_PKTCDVD=m
-CONFIG_CDROM_PKTCDVD_BUFFERS=8
-# CONFIG_CDROM_PKTCDVD_WCACHE is not set
-CONFIG_ATA_OVER_ETH=m
-CONFIG_XEN_BLKDEV_FRONTEND=m
-CONFIG_XEN_BLKDEV_BACKEND=m
-CONFIG_VIRTIO_BLK=m
-# CONFIG_BLK_DEV_HD is not set
-CONFIG_BLK_DEV_RBD=m
-# CONFIG_BLK_DEV_RSXX is not set
-CONFIG_BLK_DEV_NVME=y
-
-#
-# Misc devices
-#
-CONFIG_SENSORS_LIS3LV02D=m
-# CONFIG_AD525X_DPOT is not set
-# CONFIG_DUMMY_IRQ is not set
-# CONFIG_IBM_ASM is not set
-# CONFIG_PHANTOM is not set
-# CONFIG_INTEL_MID_PTI is not set
-CONFIG_SGI_IOC4=m
-CONFIG_TIFM_CORE=m
-CONFIG_TIFM_7XX1=m
-# CONFIG_ICS932S401 is not set
-# CONFIG_ATMEL_SSC is not set
-CONFIG_ENCLOSURE_SERVICES=m
-CONFIG_SGI_XP=m
-CONFIG_HP_ILO=m
-# CONFIG_QCOM_COINCELL is not set
-CONFIG_SGI_GRU=m
-# CONFIG_SGI_GRU_DEBUG is not set
-CONFIG_APDS9802ALS=m
-CONFIG_ISL29003=m
-CONFIG_ISL29020=m
-CONFIG_SENSORS_TSL2550=m
-# CONFIG_SENSORS_BH1780 is not set
-CONFIG_SENSORS_BH1770=m
-CONFIG_SENSORS_APDS990X=m
-# CONFIG_HMC6352 is not set
-# CONFIG_DS1682 is not set
-# CONFIG_TI_DAC7512 is not set
-CONFIG_VMWARE_BALLOON=m
-# CONFIG_BMP085_I2C is not set
-# CONFIG_BMP085_SPI is not set
-# CONFIG_PCH_PHUB is not set
-# CONFIG_USB_SWITCH_FSA9480 is not set
-# CONFIG_LATTICE_ECP3_CONFIG is not set
-# CONFIG_SRAM is not set
-# CONFIG_C2PORT is not set
-
-#
-# EEPROM support
-#
-CONFIG_EEPROM_AT24=m
-# CONFIG_EEPROM_AT25 is not set
-CONFIG_EEPROM_LEGACY=m
-CONFIG_EEPROM_MAX6875=m
-CONFIG_EEPROM_93CX6=m
-# CONFIG_EEPROM_93XX46 is not set
-CONFIG_CB710_CORE=m
-# CONFIG_CB710_DEBUG is not set
-CONFIG_CB710_DEBUG_ASSUMPTIONS=y
-
-#
-# Texas Instruments shared transport line discipline
-#
-# CONFIG_TI_ST is not set
-CONFIG_SENSORS_LIS3_I2C=m
-
-#
-# Altera FPGA firmware download module
-#
-CONFIG_ALTERA_STAPL=m
-CONFIG_INTEL_MEI=m
-CONFIG_INTEL_MEI_ME=m
-CONFIG_INTEL_MEI_TXE=m
-CONFIG_VMWARE_VMCI=m
-
-#
-# Intel MIC Bus Driver
-#
-CONFIG_INTEL_MIC_BUS=m
-
-#
-# SCIF Bus Driver
-#
-CONFIG_SCIF_BUS=m
-
-#
-# Intel MIC Host Driver
-#
-CONFIG_INTEL_MIC_HOST=m
-
-#
-# Intel MIC Card Driver
-#
-CONFIG_INTEL_MIC_CARD=m
-
-#
-# SCIF Driver
-#
-CONFIG_SCIF=m
-
-#
-# Intel MIC Coprocessor State Management (COSM) Drivers
-#
-CONFIG_MIC_COSM=m
-# CONFIG_GENWQE is not set
-CONFIG_ECHO=m
-# CONFIG_CXL_BASE is not set
-# CONFIG_CXL_KERNEL_API is not set
-# CONFIG_CXL_EEH is not set
-CONFIG_HAVE_IDE=y
-# CONFIG_IDE is not set
-
-#
-# SCSI device support
-#
-CONFIG_SCSI_MOD=y
-CONFIG_RAID_ATTRS=m
-CONFIG_SCSI=y
-CONFIG_SCSI_DMA=y
-CONFIG_SCSI_NETLINK=y
-# CONFIG_SCSI_MQ_DEFAULT is not set
-CONFIG_SCSI_PROC_FS=y
-
-#
-# SCSI support type (disk, tape, CD-ROM)
-#
-CONFIG_BLK_DEV_SD=y
-CONFIG_CHR_DEV_ST=m
-CONFIG_CHR_DEV_OSST=m
-CONFIG_BLK_DEV_SR=y
-CONFIG_BLK_DEV_SR_VENDOR=y
-CONFIG_CHR_DEV_SG=y
-CONFIG_CHR_DEV_SCH=m
-CONFIG_SCSI_ENCLOSURE=m
-CONFIG_SCSI_CONSTANTS=y
-CONFIG_SCSI_LOGGING=y
-CONFIG_SCSI_SCAN_ASYNC=y
-
-#
-# SCSI Transports
-#
-CONFIG_SCSI_SPI_ATTRS=m
-CONFIG_SCSI_FC_ATTRS=m
-CONFIG_SCSI_ISCSI_ATTRS=m
-CONFIG_SCSI_SAS_ATTRS=m
-CONFIG_SCSI_SAS_LIBSAS=m
-CONFIG_SCSI_SAS_ATA=y
-CONFIG_SCSI_SAS_HOST_SMP=y
-CONFIG_SCSI_SRP_ATTRS=m
-CONFIG_SCSI_LOWLEVEL=y
-CONFIG_ISCSI_TCP=m
-CONFIG_ISCSI_BOOT_SYSFS=m
-CONFIG_SCSI_CXGB3_ISCSI=m
-CONFIG_SCSI_CXGB4_ISCSI=m
-CONFIG_SCSI_BNX2_ISCSI=m
-CONFIG_SCSI_BNX2X_FCOE=m
-CONFIG_BE2ISCSI=m
-CONFIG_BLK_DEV_3W_XXXX_RAID=m
-CONFIG_SCSI_HPSA=m
-CONFIG_SCSI_3W_9XXX=m
-CONFIG_SCSI_3W_SAS=m
-CONFIG_SCSI_ACARD=m
-CONFIG_SCSI_AACRAID=m
-CONFIG_SCSI_AIC7XXX=m
-CONFIG_AIC7XXX_CMDS_PER_DEVICE=4
-CONFIG_AIC7XXX_RESET_DELAY_MS=15000
-# CONFIG_AIC7XXX_DEBUG_ENABLE is not set
-CONFIG_AIC7XXX_DEBUG_MASK=0
-# CONFIG_AIC7XXX_REG_PRETTY_PRINT is not set
-CONFIG_SCSI_AIC79XX=m
-CONFIG_AIC79XX_CMDS_PER_DEVICE=4
-CONFIG_AIC79XX_RESET_DELAY_MS=15000
-# CONFIG_AIC79XX_DEBUG_ENABLE is not set
-CONFIG_AIC79XX_DEBUG_MASK=0
-# CONFIG_AIC79XX_REG_PRETTY_PRINT is not set
-# CONFIG_SCSI_AIC94XX is not set
-CONFIG_SCSI_MVSAS=m
-# CONFIG_SCSI_MVSAS_DEBUG is not set
-CONFIG_SCSI_MVSAS_TASKLET=y
-CONFIG_SCSI_MVUMI=m
-# CONFIG_SCSI_DPT_I2O is not set
-CONFIG_SCSI_ADVANSYS=m
-CONFIG_SCSI_ARCMSR=m
-CONFIG_SCSI_ESAS2R=m
-CONFIG_MEGARAID_NEWGEN=y
-CONFIG_MEGARAID_MM=m
-CONFIG_MEGARAID_MAILBOX=m
-CONFIG_MEGARAID_LEGACY=m
-CONFIG_MEGARAID_SAS=m
-CONFIG_SCSI_MPT3SAS=m
-CONFIG_SCSI_MPT2SAS_MAX_SGE=128
-CONFIG_SCSI_MPT3SAS_MAX_SGE=128
-CONFIG_SCSI_MPT2SAS=m
-CONFIG_SCSI_UFSHCD=m
-CONFIG_SCSI_UFSHCD_PCI=m
-# CONFIG_SCSI_UFSHCD_PLATFORM is not set
-CONFIG_SCSI_HPTIOP=m
-CONFIG_SCSI_BUSLOGIC=m
-CONFIG_SCSI_FLASHPOINT=y
-CONFIG_VMWARE_PVSCSI=m
-CONFIG_XEN_SCSI_FRONTEND=m
-CONFIG_HYPERV_STORAGE=m
-CONFIG_LIBFC=m
-CONFIG_LIBFCOE=m
-CONFIG_FCOE=m
-CONFIG_FCOE_FNIC=m
-CONFIG_SCSI_SNIC=m
-# CONFIG_SCSI_SNIC_DEBUG_FS is not set
-CONFIG_SCSI_DMX3191D=m
-# CONFIG_SCSI_EATA is not set
-# CONFIG_SCSI_FUTURE_DOMAIN is not set
-CONFIG_SCSI_GDTH=m
-CONFIG_SCSI_ISCI=m
-CONFIG_SCSI_IPS=m
-CONFIG_SCSI_INITIO=m
-CONFIG_SCSI_INIA100=m
-# CONFIG_SCSI_PPA is not set
-# CONFIG_SCSI_IMM is not set
-CONFIG_SCSI_STEX=m
-CONFIG_SCSI_SYM53C8XX_2=m
-CONFIG_SCSI_SYM53C8XX_DMA_ADDRESSING_MODE=1
-CONFIG_SCSI_SYM53C8XX_DEFAULT_TAGS=16
-CONFIG_SCSI_SYM53C8XX_MAX_TAGS=64
-CONFIG_SCSI_SYM53C8XX_MMIO=y
-CONFIG_SCSI_IPR=m
-CONFIG_SCSI_IPR_TRACE=y
-CONFIG_SCSI_IPR_DUMP=y
-CONFIG_SCSI_QLOGIC_1280=m
-CONFIG_SCSI_QLA_FC=m
-CONFIG_TCM_QLA2XXX=m
-CONFIG_SCSI_QLA_ISCSI=m
-CONFIG_SCSI_LPFC=m
-# CONFIG_SCSI_LPFC_DEBUG_FS is not set
-CONFIG_SCSI_DC395x=m
-CONFIG_SCSI_AM53C974=m
-CONFIG_SCSI_WD719X=m
-CONFIG_SCSI_DEBUG=m
-CONFIG_SCSI_PMCRAID=m
-CONFIG_SCSI_PM8001=m
-CONFIG_SCSI_BFA_FC=m
-CONFIG_SCSI_VIRTIO=m
-CONFIG_SCSI_CHELSIO_FCOE=m
-# CONFIG_SCSI_LOWLEVEL_PCMCIA is not set
-CONFIG_SCSI_DH=y
-CONFIG_SCSI_DH_RDAC=m
-CONFIG_SCSI_DH_HP_SW=m
-CONFIG_SCSI_DH_EMC=m
-CONFIG_SCSI_DH_ALUA=m
-CONFIG_SCSI_OSD_INITIATOR=m
-CONFIG_SCSI_OSD_ULD=m
-CONFIG_SCSI_OSD_DPRINT_SENSE=1
-# CONFIG_SCSI_OSD_DEBUG is not set
-CONFIG_ATA=y
-# CONFIG_ATA_NONSTANDARD is not set
-CONFIG_ATA_VERBOSE_ERROR=y
-CONFIG_ATA_ACPI=y
-# CONFIG_SATA_ZPODD is not set
-CONFIG_SATA_PMP=y
-
-#
-# Controllers with non-SFF native interface
-#
-CONFIG_SATA_AHCI=y
-CONFIG_SATA_AHCI_PLATFORM=m
-CONFIG_SATA_INIC162X=m
-CONFIG_SATA_ACARD_AHCI=m
-CONFIG_SATA_SIL24=m
-CONFIG_ATA_SFF=y
-
-#
-# SFF controllers with custom DMA interface
-#
-CONFIG_PDC_ADMA=m
-CONFIG_SATA_QSTOR=m
-CONFIG_SATA_SX4=m
-CONFIG_ATA_BMDMA=y
-
-#
-# SATA SFF controllers with BMDMA
-#
-CONFIG_ATA_PIIX=y
-# CONFIG_SATA_HIGHBANK is not set
-CONFIG_SATA_MV=m
-CONFIG_SATA_NV=m
-CONFIG_SATA_PROMISE=m
-# CONFIG_SATA_RCAR is not set
-CONFIG_SATA_SIL=m
-CONFIG_SATA_SIS=m
-CONFIG_SATA_SVW=m
-CONFIG_SATA_ULI=m
-CONFIG_SATA_VIA=m
-CONFIG_SATA_VITESSE=m
-
-#
-# PATA SFF controllers with BMDMA
-#
-CONFIG_PATA_ALI=m
-CONFIG_PATA_AMD=m
-# CONFIG_PATA_ARASAN_CF is not set
-CONFIG_PATA_ARTOP=m
-CONFIG_PATA_ATIIXP=m
-CONFIG_PATA_ATP867X=m
-CONFIG_PATA_CMD64X=m
-# CONFIG_PATA_CS5520 is not set
-# CONFIG_PATA_CS5530 is not set
-# CONFIG_PATA_CS5536 is not set
-CONFIG_PATA_CYPRESS=m
-CONFIG_PATA_EFAR=m
-CONFIG_PATA_HPT366=m
-CONFIG_PATA_HPT37X=m
-CONFIG_PATA_HPT3X2N=m
-CONFIG_PATA_HPT3X3=m
-# CONFIG_PATA_HPT3X3_DMA is not set
-CONFIG_PATA_IT8213=m
-CONFIG_PATA_IT821X=m
-CONFIG_PATA_JMICRON=m
-CONFIG_PATA_MARVELL=m
-CONFIG_PATA_NETCELL=m
-CONFIG_PATA_NINJA32=m
-CONFIG_PATA_NS87415=m
-CONFIG_PATA_OLDPIIX=m
-CONFIG_PATA_OPTIDMA=m
-CONFIG_PATA_PDC2027X=m
-CONFIG_PATA_PDC_OLD=m
-# CONFIG_PATA_RADISYS is not set
-CONFIG_PATA_RDC=m
-# CONFIG_PATA_SC1200 is not set
-CONFIG_PATA_SCH=m
-CONFIG_PATA_SERVERWORKS=m
-CONFIG_PATA_SIL680=m
-CONFIG_PATA_SIS=m
-CONFIG_PATA_TOSHIBA=m
-CONFIG_PATA_TRIFLEX=m
-CONFIG_PATA_VIA=m
-CONFIG_PATA_WINBOND=m
-
-#
-# PIO-only SFF controllers
-#
-CONFIG_PATA_CMD640_PCI=m
-CONFIG_PATA_MPIIX=m
-CONFIG_PATA_NS87410=m
-CONFIG_PATA_OPTI=m
-CONFIG_PATA_PCMCIA=m
-# CONFIG_PATA_RZ1000 is not set
-
-#
-# Generic fallback / legacy drivers
-#
-CONFIG_PATA_ACPI=m
-CONFIG_ATA_GENERIC=m
-# CONFIG_PATA_LEGACY is not set
-CONFIG_MD=y
-CONFIG_BLK_DEV_MD=y
-CONFIG_MD_AUTODETECT=y
-CONFIG_MD_LINEAR=m
-CONFIG_MD_RAID0=m
-CONFIG_MD_RAID1=m
-CONFIG_MD_RAID10=m
-CONFIG_MD_RAID456=m
-CONFIG_MD_MULTIPATH=m
-CONFIG_MD_FAULTY=m
-# CONFIG_MD_CLUSTER is not set
-CONFIG_BCACHE=m
-# CONFIG_BCACHE_DEBUG is not set
-# CONFIG_BCACHE_CLOSURES_DEBUG is not set
-CONFIG_BLK_DEV_DM_BUILTIN=y
-CONFIG_BLK_DEV_DM=y
-# CONFIG_DM_MQ_DEFAULT is not set
-CONFIG_DM_DEBUG=y
-CONFIG_DM_BUFIO=y
-CONFIG_DM_BIO_PRISON=m
-CONFIG_DM_PERSISTENT_DATA=m
-# CONFIG_DM_DEBUG_BLOCK_STACK_TRACING is not set
-CONFIG_DM_CRYPT=m
-CONFIG_DM_SNAPSHOT=y
-CONFIG_DM_THIN_PROVISIONING=m
-CONFIG_DM_CACHE=m
-CONFIG_DM_CACHE_MQ=m
-CONFIG_DM_CACHE_SMQ=m
-CONFIG_DM_CACHE_CLEANER=m
-# CONFIG_DM_ERA is not set
-CONFIG_DM_MIRROR=y
-CONFIG_DM_LOG_USERSPACE=m
-CONFIG_DM_RAID=m
-CONFIG_DM_ZERO=y
-CONFIG_DM_MULTIPATH=m
-CONFIG_DM_MULTIPATH_QL=m
-CONFIG_DM_MULTIPATH_ST=m
-CONFIG_DM_DELAY=m
-CONFIG_DM_UEVENT=y
-CONFIG_DM_FLAKEY=m
-CONFIG_DM_VERITY=m
-CONFIG_DM_SWITCH=m
-CONFIG_DM_LOG_WRITES=m
-CONFIG_TARGET_CORE=m
-CONFIG_TCM_IBLOCK=m
-CONFIG_TCM_FILEIO=m
-CONFIG_TCM_PSCSI=m
-CONFIG_TCM_USER2=m
-CONFIG_LOOPBACK_TARGET=m
-CONFIG_TCM_FC=m
-CONFIG_ISCSI_TARGET=m
-CONFIG_SBP_TARGET=m
-CONFIG_FUSION=y
-CONFIG_FUSION_SPI=m
-CONFIG_FUSION_FC=m
-CONFIG_FUSION_SAS=m
-CONFIG_FUSION_MAX_SGE=40
-CONFIG_FUSION_CTL=m
-CONFIG_FUSION_LAN=m
-CONFIG_FUSION_LOGGING=y
-
-#
-# IEEE 1394 (FireWire) support
-#
-CONFIG_FIREWIRE=m
-CONFIG_FIREWIRE_OHCI=m
-CONFIG_FIREWIRE_SBP2=m
-CONFIG_FIREWIRE_NET=m
-CONFIG_FIREWIRE_NOSY=m
-CONFIG_MACINTOSH_DRIVERS=y
-CONFIG_MAC_EMUMOUSEBTN=y
-CONFIG_NETDEVICES=y
-CONFIG_MII=m
-CONFIG_NET_CORE=y
-CONFIG_BONDING=m
-CONFIG_DUMMY=m
-CONFIG_EQUALIZER=m
-CONFIG_NET_FC=y
-CONFIG_IFB=m
-CONFIG_NET_TEAM=m
-CONFIG_NET_TEAM_MODE_BROADCAST=m
-CONFIG_NET_TEAM_MODE_ROUNDROBIN=m
-CONFIG_NET_TEAM_MODE_RANDOM=m
-CONFIG_NET_TEAM_MODE_ACTIVEBACKUP=m
-CONFIG_NET_TEAM_MODE_LOADBALANCE=m
-CONFIG_MACVLAN=m
-CONFIG_MACVTAP=m
-CONFIG_IPVLAN=m
-CONFIG_VXLAN=m
-CONFIG_GENEVE=m
-CONFIG_NETCONSOLE=m
-CONFIG_NETCONSOLE_DYNAMIC=y
-CONFIG_NETPOLL=y
-CONFIG_NET_POLL_CONTROLLER=y
-CONFIG_NTB_NETDEV=m
-CONFIG_TUN=m
-# CONFIG_TUN_VNET_CROSS_LE is not set
-CONFIG_VETH=m
-CONFIG_VIRTIO_NET=m
-CONFIG_NLMON=m
-CONFIG_SUNGEM_PHY=m
-# CONFIG_ARCNET is not set
-CONFIG_ATM_DRIVERS=y
-# CONFIG_ATM_DUMMY is not set
-CONFIG_ATM_TCP=m
-# CONFIG_ATM_LANAI is not set
-CONFIG_ATM_ENI=m
-# CONFIG_ATM_ENI_DEBUG is not set
-# CONFIG_ATM_ENI_TUNE_BURST is not set
-CONFIG_ATM_FIRESTREAM=m
-# CONFIG_ATM_ZATM is not set
-CONFIG_ATM_NICSTAR=m
-# CONFIG_ATM_NICSTAR_USE_SUNI is not set
-# CONFIG_ATM_NICSTAR_USE_IDT77105 is not set
-# CONFIG_ATM_IDT77252 is not set
-# CONFIG_ATM_AMBASSADOR is not set
-# CONFIG_ATM_HORIZON is not set
-# CONFIG_ATM_IA is not set
-# CONFIG_ATM_FORE200E is not set
-CONFIG_ATM_HE=m
-# CONFIG_ATM_HE_USE_SUNI is not set
-CONFIG_ATM_SOLOS=m
-
-#
-# CAIF transport drivers
-#
-CONFIG_VHOST_NET=m
-CONFIG_VHOST_SCSI=m
-CONFIG_VHOST_RING=m
-CONFIG_VHOST=m
-# CONFIG_VHOST_CROSS_ENDIAN_LEGACY is not set
-
-#
-# Distributed Switch Architecture drivers
-#
-# CONFIG_NET_DSA_MV88E6XXX is not set
-CONFIG_NET_DSA_MV88E6060=m
-# CONFIG_NET_DSA_MV88E6XXX_NEED_PPU is not set
-# CONFIG_NET_DSA_MV88E6131 is not set
-# CONFIG_NET_DSA_MV88E6123_61_65 is not set
-# CONFIG_NET_DSA_MV88E6171 is not set
-# CONFIG_NET_DSA_MV88E6352 is not set
-CONFIG_NET_DSA_BCM_SF2=m
-CONFIG_ETHERNET=y
-CONFIG_MDIO=m
-CONFIG_NET_VENDOR_3COM=y
-CONFIG_PCMCIA_3C574=m
-CONFIG_PCMCIA_3C589=m
-CONFIG_VORTEX=m
-CONFIG_TYPHOON=m
-CONFIG_NET_VENDOR_ADAPTEC=y
-CONFIG_ADAPTEC_STARFIRE=m
-CONFIG_NET_VENDOR_AGERE=y
-CONFIG_ET131X=m
-CONFIG_NET_VENDOR_ALTEON=y
-CONFIG_ACENIC=m
-# CONFIG_ACENIC_OMIT_TIGON_I is not set
-CONFIG_ALTERA_TSE=m
-CONFIG_NET_VENDOR_AMD=y
-CONFIG_AMD8111_ETH=m
-CONFIG_PCNET32=m
-CONFIG_PCMCIA_NMCLAN=m
-# CONFIG_AMD_XGBE is not set
-# CONFIG_NET_XGENE is not set
-CONFIG_NET_VENDOR_ARC=y
-CONFIG_NET_VENDOR_ATHEROS=y
-CONFIG_ATL2=m
-CONFIG_ATL1=m
-CONFIG_ATL1E=m
-CONFIG_ATL1C=m
-CONFIG_ALX=m
-# CONFIG_NET_VENDOR_AURORA is not set
-CONFIG_NET_CADENCE=y
-CONFIG_MACB=m
-CONFIG_NET_VENDOR_BROADCOM=y
-CONFIG_B44=m
-CONFIG_B44_PCI_AUTOSELECT=y
-CONFIG_B44_PCICORE_AUTOSELECT=y
-CONFIG_B44_PCI=y
-CONFIG_BCMGENET=m
-CONFIG_BNX2=m
-CONFIG_CNIC=m
-CONFIG_TIGON3=m
-CONFIG_BNX2X=m
-CONFIG_BNX2X_SRIOV=y
-CONFIG_BNX2X_VXLAN=y
-CONFIG_BNXT=m
-CONFIG_BNXT_SRIOV=y
-CONFIG_NET_VENDOR_BROCADE=y
-CONFIG_BNA=m
-# CONFIG_NET_CALXEDA_XGMAC is not set
-# CONFIG_NET_VENDOR_CAVIUM is not set
-CONFIG_NET_VENDOR_CHELSIO=y
-CONFIG_CHELSIO_T1=m
-CONFIG_CHELSIO_T1_1G=y
-CONFIG_CHELSIO_T3=m
-CONFIG_CHELSIO_T4=m
-CONFIG_CHELSIO_T4_DCB=y
-# CONFIG_CHELSIO_T4_FCOE is not set
-CONFIG_CHELSIO_T4VF=m
-CONFIG_NET_VENDOR_CISCO=y
-CONFIG_ENIC=m
-# CONFIG_CX_ECAT is not set
-CONFIG_DNET=m
-CONFIG_NET_VENDOR_DEC=y
-CONFIG_NET_TULIP=y
-CONFIG_DE2104X=m
-CONFIG_DE2104X_DSL=0
-CONFIG_TULIP=m
-# CONFIG_TULIP_MWI is not set
-CONFIG_TULIP_MMIO=y
-# CONFIG_TULIP_NAPI is not set
-CONFIG_DE4X5=m
-CONFIG_WINBOND_840=m
-CONFIG_DM9102=m
-CONFIG_ULI526X=m
-CONFIG_PCMCIA_XIRCOM=m
-CONFIG_NET_VENDOR_DLINK=y
-CONFIG_DL2K=m
-CONFIG_SUNDANCE=m
-# CONFIG_SUNDANCE_MMIO is not set
-CONFIG_NET_VENDOR_EMULEX=y
-CONFIG_BE2NET=m
-# CONFIG_BE2NET_HWMON is not set
-CONFIG_BE2NET_VXLAN=y
-# CONFIG_NET_VENDOR_EZCHIP is not set
-CONFIG_NET_VENDOR_EXAR=y
-CONFIG_S2IO=m
-CONFIG_VXGE=m
-# CONFIG_VXGE_DEBUG_TRACE_ALL is not set
-# CONFIG_NET_VENDOR_FUJITSU is not set
-# CONFIG_NET_VENDOR_HP is not set
-CONFIG_NET_VENDOR_INTEL=y
-CONFIG_E100=m
-CONFIG_E1000=m
-CONFIG_E1000E=m
-CONFIG_IGB=m
-CONFIG_IGB_HWMON=y
-CONFIG_IGB_DCA=y
-CONFIG_IGBVF=m
-CONFIG_IXGB=m
-CONFIG_IXGBE=m
-CONFIG_IXGBE_VXLAN=y
-CONFIG_IXGBE_HWMON=y
-CONFIG_IXGBE_DCA=y
-CONFIG_IXGBE_DCB=y
-CONFIG_IXGBEVF=m
-CONFIG_I40E=m
-CONFIG_I40E_VXLAN=y
-# CONFIG_I40E_DCB is not set
-# CONFIG_I40E_FCOE is not set
-CONFIG_I40EVF=m
-CONFIG_FM10K=m
-# CONFIG_FM10K_VXLAN is not set
-# CONFIG_NET_VENDOR_I825XX is not set
-CONFIG_JME=m
-CONFIG_NET_VENDOR_MARVELL=y
-CONFIG_MVMDIO=m
-# CONFIG_PXA168_ETH is not set
-CONFIG_SKGE=m
-# CONFIG_SKGE_DEBUG is not set
-CONFIG_SKGE_GENESIS=y
-CONFIG_SKY2=m
-# CONFIG_SKY2_DEBUG is not set
-CONFIG_NET_VENDOR_MELLANOX=y
-CONFIG_MLX4_EN=m
-CONFIG_MLX4_EN_DCB=y
-CONFIG_MLX4_EN_VXLAN=y
-CONFIG_MLX4_CORE=m
-CONFIG_MLX4_DEBUG=y
-CONFIG_MLX5_CORE=m
-CONFIG_MLX5_CORE_EN=y
-CONFIG_MLXSW_CORE=m
-CONFIG_MLXSW_PCI=m
-CONFIG_MLXSW_SWITCHX2=m
-CONFIG_MLXSW_SPECTRUM=m
-CONFIG_NET_VENDOR_MICREL=y
-# CONFIG_KS8842 is not set
-# CONFIG_KS8851 is not set
-# CONFIG_KS8851_MLL is not set
-CONFIG_KSZ884X_PCI=m
-# CONFIG_NET_VENDOR_MICROCHIP is not set
-CONFIG_NET_VENDOR_MYRI=y
-CONFIG_MYRI10GE=m
-CONFIG_MYRI10GE_DCA=y
-CONFIG_FEALNX=m
-CONFIG_NET_VENDOR_NATSEMI=y
-CONFIG_NATSEMI=m
-CONFIG_NS83820=m
-CONFIG_NET_VENDOR_8390=y
-CONFIG_PCMCIA_AXNET=m
-CONFIG_NE2K_PCI=m
-CONFIG_PCMCIA_PCNET=m
-CONFIG_NET_VENDOR_NVIDIA=y
-CONFIG_FORCEDETH=m
-CONFIG_NET_VENDOR_OKI=y
-# CONFIG_PCH_GBE is not set
-CONFIG_ETHOC=m
-CONFIG_NET_PACKET_ENGINE=y
-CONFIG_HAMACHI=m
-CONFIG_YELLOWFIN=m
-CONFIG_NET_VENDOR_QLOGIC=y
-CONFIG_QLA3XXX=m
-CONFIG_QLCNIC=m
-CONFIG_QLCNIC_SRIOV=y
-CONFIG_QLCNIC_DCB=y
-CONFIG_QLCNIC_VXLAN=y
-CONFIG_QLCNIC_HWMON=y
-CONFIG_QLGE=m
-CONFIG_NETXEN_NIC=m
-CONFIG_QED=m
-CONFIG_QEDE=m
-# CONFIG_NET_VENDOR_QUALCOMM is not set
-CONFIG_NET_VENDOR_REALTEK=y
-CONFIG_ATP=m
-CONFIG_8139CP=m
-CONFIG_8139TOO=m
-# CONFIG_8139TOO_PIO is not set
-# CONFIG_8139TOO_TUNE_TWISTER is not set
-CONFIG_8139TOO_8129=y
-# CONFIG_8139_OLD_RX_RESET is not set
-CONFIG_R8169=m
-# CONFIG_NET_VENDOR_RENESAS is not set
-CONFIG_NET_VENDOR_RDC=y
-CONFIG_R6040=m
-CONFIG_NET_VENDOR_ROCKER=y
-CONFIG_ROCKER=m
-# CONFIG_NET_VENDOR_SAMSUNG is not set
-# CONFIG_NET_VENDOR_SEEQ is not set
-CONFIG_NET_VENDOR_SILAN=y
-CONFIG_SC92031=m
-CONFIG_NET_VENDOR_SIS=y
-CONFIG_SIS900=m
-CONFIG_SIS190=m
-CONFIG_SFC=m
-CONFIG_SFC_MTD=y
-CONFIG_SFC_MCDI_MON=y
-CONFIG_SFC_SRIOV=y
-# CONFIG_SFC_MCDI_LOGGING is not set
-CONFIG_NET_VENDOR_SMSC=y
-CONFIG_PCMCIA_SMC91C92=m
-CONFIG_EPIC100=m
-CONFIG_SMSC911X=m
-# CONFIG_SMSC911X_ARCH_HOOKS is not set
-CONFIG_SMSC9420=m
-CONFIG_NET_VENDOR_STMICRO=y
-CONFIG_STMMAC_ETH=m
-# CONFIG_STMMAC_PLATFORM is not set
-# CONFIG_STMMAC_PCI is not set
-CONFIG_NET_VENDOR_SUN=y
-CONFIG_HAPPYMEAL=m
-CONFIG_SUNGEM=m
-CONFIG_CASSINI=m
-CONFIG_NIU=m
-# CONFIG_NET_VENDOR_SYNOPSYS is not set
-CONFIG_NET_VENDOR_TEHUTI=y
-CONFIG_TEHUTI=m
-CONFIG_NET_VENDOR_TI=y
-# CONFIG_TI_CPSW_ALE is not set
-CONFIG_TLAN=m
-CONFIG_NET_VENDOR_VIA=y
-CONFIG_VIA_RHINE=m
-CONFIG_VIA_RHINE_MMIO=y
-CONFIG_VIA_VELOCITY=m
-CONFIG_NET_VENDOR_WIZNET=y
-CONFIG_WIZNET_W5100=m
-CONFIG_WIZNET_W5300=m
-# CONFIG_WIZNET_BUS_DIRECT is not set
-# CONFIG_WIZNET_BUS_INDIRECT is not set
-CONFIG_WIZNET_BUS_ANY=y
-CONFIG_NET_VENDOR_XIRCOM=y
-CONFIG_PCMCIA_XIRC2PS=m
-# CONFIG_FDDI is not set
-# CONFIG_HIPPI is not set
-# CONFIG_NET_SB1000 is not set
-CONFIG_PHYLIB=y
-
-#
-# MII PHY device drivers
-#
-CONFIG_AQUANTIA_PHY=m
-CONFIG_AT803X_PHY=m
-CONFIG_AMD_PHY=m
-CONFIG_MARVELL_PHY=m
-CONFIG_DAVICOM_PHY=m
-CONFIG_QSEMI_PHY=m
-CONFIG_LXT_PHY=m
-CONFIG_CICADA_PHY=m
-CONFIG_VITESSE_PHY=m
-CONFIG_TERANETICS_PHY=m
-CONFIG_SMSC_PHY=m
-CONFIG_BCM_NET_PHYLIB=m
-CONFIG_BROADCOM_PHY=m
-CONFIG_BCM7XXX_PHY=m
-CONFIG_BCM87XX_PHY=m
-CONFIG_ICPLUS_PHY=m
-CONFIG_REALTEK_PHY=m
-CONFIG_NATIONAL_PHY=m
-CONFIG_STE10XP=m
-CONFIG_LSI_ET1011C_PHY=m
-CONFIG_MICREL_PHY=m
-CONFIG_DP83848_PHY=m
-CONFIG_DP83867_PHY=m
-CONFIG_MICROCHIP_PHY=m
-CONFIG_FIXED_PHY=y
-CONFIG_MDIO_BITBANG=m
-# CONFIG_MDIO_GPIO is not set
-CONFIG_MDIO_OCTEON=m
-CONFIG_MDIO_BCM_UNIMAC=m
-# CONFIG_MICREL_KS8995MA is not set
-# CONFIG_PLIP is not set
-CONFIG_PPP=m
-CONFIG_PPP_BSDCOMP=m
-CONFIG_PPP_DEFLATE=m
-CONFIG_PPP_FILTER=y
-CONFIG_PPP_MPPE=m
-CONFIG_PPP_MULTILINK=y
-CONFIG_PPPOATM=m
-CONFIG_PPPOE=m
-CONFIG_PPTP=m
-CONFIG_PPPOL2TP=m
-CONFIG_PPP_ASYNC=m
-CONFIG_PPP_SYNC_TTY=m
-CONFIG_SLIP=m
-CONFIG_SLHC=m
-CONFIG_SLIP_COMPRESSED=y
-CONFIG_SLIP_SMART=y
-# CONFIG_SLIP_MODE_SLIP6 is not set
-CONFIG_USB_NET_DRIVERS=y
-CONFIG_USB_CATC=m
-CONFIG_USB_KAWETH=m
-CONFIG_USB_PEGASUS=m
-CONFIG_USB_RTL8150=m
-CONFIG_USB_RTL8152=m
-CONFIG_USB_LAN78XX=m
-CONFIG_USB_USBNET=m
-CONFIG_USB_NET_AX8817X=m
-CONFIG_USB_NET_AX88179_178A=m
-CONFIG_USB_NET_CDCETHER=m
-CONFIG_USB_NET_CDC_EEM=m
-CONFIG_USB_NET_CDC_NCM=m
-CONFIG_USB_NET_HUAWEI_CDC_NCM=m
-CONFIG_USB_NET_CDC_MBIM=m
-CONFIG_USB_NET_DM9601=m
-CONFIG_USB_NET_SR9700=m
-# CONFIG_USB_NET_SR9800 is not set
-CONFIG_USB_NET_SMSC75XX=m
-CONFIG_USB_NET_SMSC95XX=m
-CONFIG_USB_NET_GL620A=m
-CONFIG_USB_NET_NET1080=m
-CONFIG_USB_NET_PLUSB=m
-CONFIG_USB_NET_MCS7830=m
-CONFIG_USB_NET_RNDIS_HOST=m
-CONFIG_USB_NET_CDC_SUBSET=m
-CONFIG_USB_ALI_M5632=y
-CONFIG_USB_AN2720=y
-CONFIG_USB_BELKIN=y
-CONFIG_USB_ARMLINUX=y
-CONFIG_USB_EPSON2888=y
-CONFIG_USB_KC2190=y
-CONFIG_USB_NET_ZAURUS=m
-CONFIG_USB_NET_CX82310_ETH=m
-CONFIG_USB_NET_KALMIA=m
-CONFIG_USB_NET_QMI_WWAN=m
-CONFIG_USB_HSO=m
-CONFIG_USB_NET_INT51X1=m
-CONFIG_USB_IPHETH=m
-CONFIG_USB_SIERRA_NET=m
-CONFIG_USB_VL600=m
-CONFIG_USB_NET_CH9200=m
-CONFIG_WLAN=y
-# CONFIG_PCMCIA_RAYCS is not set
-# CONFIG_LIBERTAS_THINFIRM is not set
-# CONFIG_AIRO is not set
-# CONFIG_ATMEL is not set
-# CONFIG_AT76C50X_USB is not set
-# CONFIG_AIRO_CS is not set
-# CONFIG_PCMCIA_WL3501 is not set
-# CONFIG_PRISM54 is not set
-# CONFIG_USB_ZD1201 is not set
-CONFIG_USB_NET_RNDIS_WLAN=m
-# CONFIG_ADM8211 is not set
-CONFIG_RTL8180=m
-CONFIG_RTL8187=m
-CONFIG_RTL8187_LEDS=y
-CONFIG_MAC80211_HWSIM=m
-CONFIG_MWL8K=m
-# CONFIG_ATH_CARDS is not set
-CONFIG_B43=m
-CONFIG_B43_BCMA=y
-CONFIG_B43_SSB=y
-CONFIG_B43_BUSES_BCMA_AND_SSB=y
-# CONFIG_B43_BUSES_BCMA is not set
-# CONFIG_B43_BUSES_SSB is not set
-CONFIG_B43_PCI_AUTOSELECT=y
-CONFIG_B43_PCICORE_AUTOSELECT=y
-CONFIG_B43_SDIO=y
-CONFIG_B43_BCMA_PIO=y
-CONFIG_B43_PIO=y
-CONFIG_B43_PHY_G=y
-CONFIG_B43_PHY_N=y
-CONFIG_B43_PHY_LP=y
-CONFIG_B43_PHY_HT=y
-CONFIG_B43_LEDS=y
-CONFIG_B43_HWRNG=y
-# CONFIG_B43_DEBUG is not set
-CONFIG_B43LEGACY=m
-CONFIG_B43LEGACY_PCI_AUTOSELECT=y
-CONFIG_B43LEGACY_PCICORE_AUTOSELECT=y
-CONFIG_B43LEGACY_LEDS=y
-CONFIG_B43LEGACY_HWRNG=y
-# CONFIG_B43LEGACY_DEBUG is not set
-CONFIG_B43LEGACY_DMA=y
-CONFIG_B43LEGACY_PIO=y
-CONFIG_B43LEGACY_DMA_AND_PIO_MODE=y
-# CONFIG_B43LEGACY_DMA_MODE is not set
-# CONFIG_B43LEGACY_PIO_MODE is not set
-CONFIG_BRCMUTIL=m
-CONFIG_BRCMSMAC=m
-CONFIG_BRCMFMAC=m
-CONFIG_BRCMFMAC_PROTO_BCDC=y
-CONFIG_BRCMFMAC_PROTO_MSGBUF=y
-CONFIG_BRCMFMAC_SDIO=y
-CONFIG_BRCMFMAC_USB=y
-CONFIG_BRCMFMAC_PCIE=y
-# CONFIG_BRCM_TRACING is not set
-# CONFIG_BRCMDBG is not set
-# CONFIG_HOSTAP is not set
-CONFIG_IPW2100=m
-CONFIG_IPW2100_MONITOR=y
-# CONFIG_IPW2100_DEBUG is not set
-CONFIG_IPW2200=m
-CONFIG_IPW2200_MONITOR=y
-CONFIG_IPW2200_RADIOTAP=y
-CONFIG_IPW2200_PROMISCUOUS=y
-CONFIG_IPW2200_QOS=y
-# CONFIG_IPW2200_DEBUG is not set
-CONFIG_LIBIPW=m
-# CONFIG_LIBIPW_DEBUG is not set
-CONFIG_IWLWIFI=m
-CONFIG_IWLWIFI_LEDS=y
-CONFIG_IWLDVM=m
-CONFIG_IWLMVM=m
-CONFIG_IWLWIFI_OPMODE_MODULAR=y
-# CONFIG_IWLWIFI_BCAST_FILTERING is not set
-# CONFIG_IWLWIFI_UAPSD is not set
-
-#
-# Debugging Options
-#
-CONFIG_IWLWIFI_DEBUG=y
-CONFIG_IWLWIFI_DEBUGFS=y
-# CONFIG_IWLWIFI_DEBUG_EXPERIMENTAL_UCODE is not set
-# CONFIG_IWLWIFI_DEVICE_TRACING is not set
-CONFIG_IWLEGACY=m
-CONFIG_IWL4965=m
-CONFIG_IWL3945=m
-
-#
-# iwl3945 / iwl4965 Debugging Options
-#
-CONFIG_IWLEGACY_DEBUG=y
-CONFIG_IWLEGACY_DEBUGFS=y
-CONFIG_LIBERTAS=m
-CONFIG_LIBERTAS_USB=m
-CONFIG_LIBERTAS_CS=m
-CONFIG_LIBERTAS_SDIO=m
-# CONFIG_LIBERTAS_SPI is not set
-# CONFIG_LIBERTAS_DEBUG is not set
-CONFIG_LIBERTAS_MESH=y
-CONFIG_HERMES=m
-CONFIG_HERMES_PRISM=y
-CONFIG_HERMES_CACHE_FW_ON_INIT=y
-CONFIG_PLX_HERMES=m
-# CONFIG_TMD_HERMES is not set
-CONFIG_NORTEL_HERMES=m
-CONFIG_PCI_HERMES=m
-CONFIG_PCMCIA_HERMES=m
-# CONFIG_PCMCIA_SPECTRUM is not set
-CONFIG_ORINOCO_USB=m
-CONFIG_P54_COMMON=m
-CONFIG_P54_USB=m
-CONFIG_P54_PCI=m
-# CONFIG_P54_SPI is not set
-CONFIG_P54_LEDS=y
-CONFIG_RT2X00=m
-CONFIG_RT2400PCI=m
-CONFIG_RT2500PCI=m
-CONFIG_RT61PCI=m
-CONFIG_RT2800PCI=m
-CONFIG_RT2800PCI_RT33XX=y
-CONFIG_RT2800PCI_RT35XX=y
-CONFIG_RT2800PCI_RT53XX=y
-CONFIG_RT2800PCI_RT3290=y
-CONFIG_RT2500USB=m
-CONFIG_RT73USB=m
-CONFIG_RT2800USB=m
-CONFIG_RT2800USB_RT33XX=y
-CONFIG_RT2800USB_RT35XX=y
-CONFIG_RT2800USB_RT3573=y
-CONFIG_RT2800USB_RT53XX=y
-CONFIG_RT2800USB_RT55XX=y
-CONFIG_RT2800USB_UNKNOWN=y
-CONFIG_RT2800_LIB=m
-CONFIG_RT2800_LIB_MMIO=m
-CONFIG_RT2X00_LIB_MMIO=m
-CONFIG_RT2X00_LIB_PCI=m
-CONFIG_RT2X00_LIB_USB=m
-CONFIG_RT2X00_LIB=m
-CONFIG_RT2X00_LIB_FIRMWARE=y
-CONFIG_RT2X00_LIB_CRYPTO=y
-CONFIG_RT2X00_LIB_LEDS=y
-CONFIG_RT2X00_LIB_DEBUGFS=y
-# CONFIG_RT2X00_DEBUG is not set
-# CONFIG_WL_MEDIATEK is not set
-CONFIG_RTL_CARDS=m
-CONFIG_RTL8192CE=m
-CONFIG_RTL8192SE=m
-CONFIG_RTL8192DE=m
-CONFIG_RTL8723AE=m
-CONFIG_RTL8723BE=m
-CONFIG_RTL8188EE=m
-CONFIG_RTL8192EE=m
-CONFIG_RTL8821AE=m
-CONFIG_RTL8192CU=m
-CONFIG_RTLWIFI=m
-CONFIG_RTLWIFI_PCI=m
-CONFIG_RTLWIFI_USB=m
-# CONFIG_RTLWIFI_DEBUG is not set
-CONFIG_RTL8192C_COMMON=m
-CONFIG_RTL8723_COMMON=m
-CONFIG_RTLBTCOEXIST=m
-CONFIG_RTL8XXXU=m
-CONFIG_RTL8XXXU_UNTESTED=y
-# CONFIG_WL_TI is not set
-CONFIG_ZD1211RW=m
-# CONFIG_ZD1211RW_DEBUG is not set
-CONFIG_MWIFIEX=m
-CONFIG_MWIFIEX_SDIO=m
-CONFIG_MWIFIEX_PCIE=m
-CONFIG_MWIFIEX_USB=m
-CONFIG_CW1200=m
-CONFIG_CW1200_WLAN_SDIO=m
-CONFIG_CW1200_WLAN_SPI=m
-CONFIG_RSI_91X=m
-CONFIG_RSI_DEBUGFS=y
-CONFIG_RSI_SDIO=m
-CONFIG_RSI_USB=m
-
-#
-# Enable WiMAX (Networking options) to see the WiMAX drivers
-#
-# CONFIG_WAN is not set
-CONFIG_IEEE802154_DRIVERS=m
-CONFIG_IEEE802154_FAKELB=m
-CONFIG_IEEE802154_AT86RF230=m
-# CONFIG_IEEE802154_AT86RF230_DEBUGFS is not set
-# CONFIG_IEEE802154_MRF24J40 is not set
-CONFIG_IEEE802154_CC2520=m
-CONFIG_IEEE802154_ATUSB=m
-CONFIG_XEN_NETDEV_FRONTEND=m
-CONFIG_XEN_NETDEV_BACKEND=m
-CONFIG_VMXNET3=m
-CONFIG_FUJITSU_ES=m
-CONFIG_HYPERV_NET=m
-CONFIG_ISDN=y
-CONFIG_ISDN_I4L=m
-CONFIG_ISDN_PPP=y
-CONFIG_ISDN_PPP_VJ=y
-CONFIG_ISDN_MPP=y
-CONFIG_IPPP_FILTER=y
-# CONFIG_ISDN_PPP_BSDCOMP is not set
-CONFIG_ISDN_AUDIO=y
-CONFIG_ISDN_TTY_FAX=y
-
-#
-# ISDN feature submodules
-#
-CONFIG_ISDN_DIVERSION=m
-
-#
-# ISDN4Linux hardware drivers
-#
-
-#
-# Passive cards
-#
-CONFIG_ISDN_DRV_HISAX=m
-
-#
-# D-channel protocol features
-#
-CONFIG_HISAX_EURO=y
-CONFIG_DE_AOC=y
-CONFIG_HISAX_NO_SENDCOMPLETE=y
-CONFIG_HISAX_NO_LLC=y
-CONFIG_HISAX_NO_KEYPAD=y
-CONFIG_HISAX_1TR6=y
-CONFIG_HISAX_NI1=y
-CONFIG_HISAX_MAX_CARDS=8
-
-#
-# HiSax supported cards
-#
-CONFIG_HISAX_16_3=y
-CONFIG_HISAX_TELESPCI=y
-CONFIG_HISAX_S0BOX=y
-CONFIG_HISAX_FRITZPCI=y
-CONFIG_HISAX_AVM_A1_PCMCIA=y
-CONFIG_HISAX_ELSA=y
-CONFIG_HISAX_DIEHLDIVA=y
-CONFIG_HISAX_SEDLBAUER=y
-CONFIG_HISAX_NETJET=y
-CONFIG_HISAX_NETJET_U=y
-CONFIG_HISAX_NICCY=y
-CONFIG_HISAX_BKM_A4T=y
-CONFIG_HISAX_SCT_QUADRO=y
-CONFIG_HISAX_GAZEL=y
-CONFIG_HISAX_HFC_PCI=y
-CONFIG_HISAX_W6692=y
-CONFIG_HISAX_HFC_SX=y
-CONFIG_HISAX_ENTERNOW_PCI=y
-# CONFIG_HISAX_DEBUG is not set
-
-#
-# HiSax PCMCIA card service modules
-#
-CONFIG_HISAX_SEDLBAUER_CS=m
-CONFIG_HISAX_ELSA_CS=m
-CONFIG_HISAX_AVM_A1_CS=m
-CONFIG_HISAX_TELES_CS=m
-
-#
-# HiSax sub driver modules
-#
-CONFIG_HISAX_ST5481=m
-# CONFIG_HISAX_HFCUSB is not set
-CONFIG_HISAX_HFC4S8S=m
-CONFIG_HISAX_FRITZ_PCIPNP=m
-
-#
-# Active cards
-#
-CONFIG_ISDN_CAPI=m
-# CONFIG_CAPI_TRACE is not set
-CONFIG_ISDN_CAPI_CAPI20=m
-CONFIG_ISDN_CAPI_MIDDLEWARE=y
-CONFIG_ISDN_CAPI_CAPIDRV=m
-# CONFIG_ISDN_CAPI_CAPIDRV_VERBOSE is not set
-
-#
-# CAPI hardware drivers
-#
-CONFIG_CAPI_AVM=y
-CONFIG_ISDN_DRV_AVMB1_B1PCI=m
-CONFIG_ISDN_DRV_AVMB1_B1PCIV4=y
-CONFIG_ISDN_DRV_AVMB1_B1PCMCIA=m
-CONFIG_ISDN_DRV_AVMB1_AVM_CS=m
-CONFIG_ISDN_DRV_AVMB1_T1PCI=m
-CONFIG_ISDN_DRV_AVMB1_C4=m
-CONFIG_CAPI_EICON=y
-CONFIG_ISDN_DIVAS=m
-CONFIG_ISDN_DIVAS_BRIPCI=y
-CONFIG_ISDN_DIVAS_PRIPCI=y
-CONFIG_ISDN_DIVAS_DIVACAPI=m
-CONFIG_ISDN_DIVAS_USERIDI=m
-CONFIG_ISDN_DIVAS_MAINT=m
-CONFIG_ISDN_DRV_GIGASET=m
-CONFIG_GIGASET_CAPI=y
-# CONFIG_GIGASET_I4L is not set
-# CONFIG_GIGASET_DUMMYLL is not set
-CONFIG_GIGASET_BASE=m
-CONFIG_GIGASET_M105=m
-CONFIG_GIGASET_M101=m
-# CONFIG_GIGASET_DEBUG is not set
-CONFIG_HYSDN=m
-CONFIG_HYSDN_CAPI=y
-CONFIG_MISDN=m
-CONFIG_MISDN_DSP=m
-CONFIG_MISDN_L1OIP=m
-
-#
-# mISDN hardware drivers
-#
-CONFIG_MISDN_HFCPCI=m
-CONFIG_MISDN_HFCMULTI=m
-CONFIG_MISDN_HFCUSB=m
-CONFIG_MISDN_AVMFRITZ=m
-CONFIG_MISDN_SPEEDFAX=m
-CONFIG_MISDN_INFINEON=m
-CONFIG_MISDN_W6692=m
-CONFIG_MISDN_NETJET=m
-CONFIG_MISDN_IPAC=m
-CONFIG_MISDN_ISAR=m
-CONFIG_ISDN_HDLC=m
-# CONFIG_NVM is not set
-
-#
-# Input device support
-#
-CONFIG_INPUT=y
-CONFIG_INPUT_LEDS=y
-CONFIG_INPUT_FF_MEMLESS=m
-CONFIG_INPUT_POLLDEV=m
-CONFIG_INPUT_SPARSEKMAP=m
-# CONFIG_INPUT_MATRIXKMAP is not set
-
-#
-# Userland interfaces
-#
-CONFIG_INPUT_MOUSEDEV=y
-# CONFIG_INPUT_MOUSEDEV_PSAUX is not set
-CONFIG_INPUT_MOUSEDEV_SCREEN_X=1024
-CONFIG_INPUT_MOUSEDEV_SCREEN_Y=768
-CONFIG_INPUT_JOYDEV=m
-CONFIG_INPUT_EVDEV=y
-# CONFIG_INPUT_EVBUG is not set
-
-#
-# Input Device Drivers
-#
-CONFIG_INPUT_KEYBOARD=y
-# CONFIG_KEYBOARD_ADP5588 is not set
-# CONFIG_KEYBOARD_ADP5589 is not set
-CONFIG_KEYBOARD_ATKBD=y
-# CONFIG_KEYBOARD_QT1070 is not set
-# CONFIG_KEYBOARD_QT2160 is not set
-# CONFIG_KEYBOARD_LKKBD is not set
-CONFIG_KEYBOARD_GPIO=m
-# CONFIG_KEYBOARD_GPIO_POLLED is not set
-# CONFIG_KEYBOARD_TCA6416 is not set
-# CONFIG_KEYBOARD_TCA8418 is not set
-# CONFIG_KEYBOARD_MATRIX is not set
-# CONFIG_KEYBOARD_LM8323 is not set
-# CONFIG_KEYBOARD_LM8333 is not set
-# CONFIG_KEYBOARD_MAX7359 is not set
-# CONFIG_KEYBOARD_MCS is not set
-# CONFIG_KEYBOARD_MPR121 is not set
-# CONFIG_KEYBOARD_NEWTON is not set
-# CONFIG_KEYBOARD_OPENCORES is not set
-# CONFIG_KEYBOARD_SAMSUNG is not set
-# CONFIG_KEYBOARD_GOLDFISH_EVENTS is not set
-# CONFIG_KEYBOARD_STOWAWAY is not set
-# CONFIG_KEYBOARD_ST_KEYSCAN is not set
-# CONFIG_KEYBOARD_SUNKBD is not set
-# CONFIG_KEYBOARD_SH_KEYSC is not set
-# CONFIG_KEYBOARD_XTKBD is not set
-CONFIG_INPUT_MOUSE=y
-CONFIG_MOUSE_PS2=y
-CONFIG_MOUSE_PS2_ALPS=y
-CONFIG_MOUSE_PS2_LOGIPS2PP=y
-CONFIG_MOUSE_PS2_SYNAPTICS=y
-CONFIG_MOUSE_PS2_CYPRESS=y
-CONFIG_MOUSE_PS2_LIFEBOOK=y
-CONFIG_MOUSE_PS2_TRACKPOINT=y
-CONFIG_MOUSE_PS2_ELANTECH=y
-CONFIG_MOUSE_PS2_SENTELIC=y
-# CONFIG_MOUSE_PS2_TOUCHKIT is not set
-CONFIG_MOUSE_PS2_FOCALTECH=y
-CONFIG_MOUSE_PS2_VMMOUSE=y
-CONFIG_MOUSE_SERIAL=m
-CONFIG_MOUSE_APPLETOUCH=m
-CONFIG_MOUSE_BCM5974=m
-CONFIG_MOUSE_CYAPA=m
-CONFIG_MOUSE_ELAN_I2C=m
-CONFIG_MOUSE_ELAN_I2C_I2C=y
-CONFIG_MOUSE_ELAN_I2C_SMBUS=y
-CONFIG_MOUSE_VSXXXAA=m
-# CONFIG_MOUSE_GPIO is not set
-CONFIG_MOUSE_SYNAPTICS_I2C=m
-CONFIG_MOUSE_SYNAPTICS_USB=m
-CONFIG_INPUT_JOYSTICK=y
-CONFIG_JOYSTICK_ANALOG=m
-CONFIG_JOYSTICK_A3D=m
-CONFIG_JOYSTICK_ADI=m
-CONFIG_JOYSTICK_COBRA=m
-CONFIG_JOYSTICK_GF2K=m
-CONFIG_JOYSTICK_GRIP=m
-CONFIG_JOYSTICK_GRIP_MP=m
-CONFIG_JOYSTICK_GUILLEMOT=m
-CONFIG_JOYSTICK_INTERACT=m
-CONFIG_JOYSTICK_SIDEWINDER=m
-CONFIG_JOYSTICK_TMDC=m
-CONFIG_JOYSTICK_IFORCE=m
-CONFIG_JOYSTICK_IFORCE_USB=y
-CONFIG_JOYSTICK_IFORCE_232=y
-CONFIG_JOYSTICK_WARRIOR=m
-CONFIG_JOYSTICK_MAGELLAN=m
-CONFIG_JOYSTICK_SPACEORB=m
-CONFIG_JOYSTICK_SPACEBALL=m
-CONFIG_JOYSTICK_STINGER=m
-CONFIG_JOYSTICK_TWIDJOY=m
-CONFIG_JOYSTICK_ZHENHUA=m
-CONFIG_JOYSTICK_DB9=m
-CONFIG_JOYSTICK_GAMECON=m
-CONFIG_JOYSTICK_TURBOGRAFX=m
-# CONFIG_JOYSTICK_AS5011 is not set
-CONFIG_JOYSTICK_JOYDUMP=m
-CONFIG_JOYSTICK_XPAD=m
-CONFIG_JOYSTICK_XPAD_FF=y
-CONFIG_JOYSTICK_XPAD_LEDS=y
-CONFIG_JOYSTICK_WALKERA0701=m
-CONFIG_INPUT_TABLET=y
-CONFIG_TABLET_USB_ACECAD=m
-CONFIG_TABLET_USB_AIPTEK=m
-CONFIG_TABLET_USB_GTCO=m
-CONFIG_TABLET_USB_HANWANG=m
-CONFIG_TABLET_USB_KBTAB=m
-CONFIG_TABLET_SERIAL_WACOM4=m
-CONFIG_INPUT_TOUCHSCREEN=y
-CONFIG_TOUCHSCREEN_PROPERTIES=y
-# CONFIG_TOUCHSCREEN_ADS7846 is not set
-# CONFIG_TOUCHSCREEN_AD7877 is not set
-# CONFIG_TOUCHSCREEN_AD7879 is not set
-CONFIG_TOUCHSCREEN_ATMEL_MXT=m
-CONFIG_TOUCHSCREEN_AUO_PIXCIR=m
-# CONFIG_TOUCHSCREEN_BU21013 is not set
-# CONFIG_TOUCHSCREEN_CY8CTMG110 is not set
-# CONFIG_TOUCHSCREEN_CYTTSP_CORE is not set
-# CONFIG_TOUCHSCREEN_CYTTSP4_CORE is not set
-CONFIG_TOUCHSCREEN_DYNAPRO=m
-# CONFIG_TOUCHSCREEN_HAMPSHIRE is not set
-CONFIG_TOUCHSCREEN_EETI=m
-# CONFIG_TOUCHSCREEN_FT6236 is not set
-CONFIG_TOUCHSCREEN_FUJITSU=m
-CONFIG_TOUCHSCREEN_GOODIX=m
-CONFIG_TOUCHSCREEN_ILI210X=m
-# CONFIG_TOUCHSCREEN_IPROC is not set
-CONFIG_TOUCHSCREEN_GUNZE=m
-CONFIG_TOUCHSCREEN_ELAN=m
-CONFIG_TOUCHSCREEN_ELO=m
-CONFIG_TOUCHSCREEN_WACOM_W8001=m
-CONFIG_TOUCHSCREEN_WACOM_I2C=m
-# CONFIG_TOUCHSCREEN_MAX11801 is not set
-CONFIG_TOUCHSCREEN_MCS5000=m
-CONFIG_TOUCHSCREEN_MMS114=m
-CONFIG_TOUCHSCREEN_MTOUCH=m
-# CONFIG_TOUCHSCREEN_IMX6UL_TSC is not set
-CONFIG_TOUCHSCREEN_INEXIO=m
-CONFIG_TOUCHSCREEN_MK712=m
-CONFIG_TOUCHSCREEN_PENMOUNT=m
-CONFIG_TOUCHSCREEN_EDT_FT5X06=m
-CONFIG_TOUCHSCREEN_TOUCHRIGHT=m
-CONFIG_TOUCHSCREEN_TOUCHWIN=m
-CONFIG_TOUCHSCREEN_PIXCIR=m
-# CONFIG_TOUCHSCREEN_WDT87XX_I2C is not set
-# CONFIG_TOUCHSCREEN_WM97XX is not set
-CONFIG_TOUCHSCREEN_USB_COMPOSITE=m
-CONFIG_TOUCHSCREEN_USB_EGALAX=y
-CONFIG_TOUCHSCREEN_USB_PANJIT=y
-CONFIG_TOUCHSCREEN_USB_3M=y
-CONFIG_TOUCHSCREEN_USB_ITM=y
-CONFIG_TOUCHSCREEN_USB_ETURBO=y
-CONFIG_TOUCHSCREEN_USB_GUNZE=y
-CONFIG_TOUCHSCREEN_USB_DMC_TSC10=y
-CONFIG_TOUCHSCREEN_USB_IRTOUCH=y
-CONFIG_TOUCHSCREEN_USB_IDEALTEK=y
-CONFIG_TOUCHSCREEN_USB_GENERAL_TOUCH=y
-CONFIG_TOUCHSCREEN_USB_GOTOP=y
-CONFIG_TOUCHSCREEN_USB_JASTEC=y
-CONFIG_TOUCHSCREEN_USB_ELO=y
-CONFIG_TOUCHSCREEN_USB_E2I=y
-CONFIG_TOUCHSCREEN_USB_ZYTRONIC=y
-CONFIG_TOUCHSCREEN_USB_ETT_TC45USB=y
-CONFIG_TOUCHSCREEN_USB_NEXIO=y
-CONFIG_TOUCHSCREEN_USB_EASYTOUCH=y
-CONFIG_TOUCHSCREEN_TOUCHIT213=m
-CONFIG_TOUCHSCREEN_TSC_SERIO=m
-# CONFIG_TOUCHSCREEN_TSC2004 is not set
-# CONFIG_TOUCHSCREEN_TSC2005 is not set
-CONFIG_TOUCHSCREEN_TSC2007=m
-CONFIG_TOUCHSCREEN_ST1232=m
-# CONFIG_TOUCHSCREEN_SUN4I is not set
-# CONFIG_TOUCHSCREEN_SUR40 is not set
-# CONFIG_TOUCHSCREEN_SX8654 is not set
-# CONFIG_TOUCHSCREEN_TPS6507X is not set
-CONFIG_TOUCHSCREEN_ZFORCE=m
-# CONFIG_TOUCHSCREEN_ROHM_BU21023 is not set
-CONFIG_INPUT_MISC=y
-# CONFIG_INPUT_AD714X is not set
-# CONFIG_INPUT_BMA150 is not set
-CONFIG_INPUT_E3X0_BUTTON=m
-CONFIG_INPUT_PCSPKR=m
-CONFIG_INPUT_MMA8450=m
-CONFIG_INPUT_MPU3050=m
-CONFIG_INPUT_APANEL=m
-CONFIG_INPUT_GP2A=m
-# CONFIG_INPUT_GPIO_BEEPER is not set
-# CONFIG_INPUT_GPIO_TILT_POLLED is not set
-CONFIG_INPUT_ATLAS_BTNS=m
-CONFIG_INPUT_ATI_REMOTE2=m
-CONFIG_INPUT_KEYSPAN_REMOTE=m
-CONFIG_INPUT_KXTJ9=m
-# CONFIG_INPUT_KXTJ9_POLLED_MODE is not set
-CONFIG_INPUT_POWERMATE=m
-CONFIG_INPUT_YEALINK=m
-CONFIG_INPUT_CM109=m
-CONFIG_INPUT_UINPUT=m
-# CONFIG_INPUT_PCF8574 is not set
-# CONFIG_INPUT_PWM_BEEPER is not set
-CONFIG_INPUT_GPIO_ROTARY_ENCODER=m
-# CONFIG_INPUT_ADXL34X is not set
-# CONFIG_INPUT_IMS_PCU is not set
-CONFIG_INPUT_CMA3000=m
-CONFIG_INPUT_CMA3000_I2C=m
-CONFIG_INPUT_XEN_KBDDEV_FRONTEND=m
-CONFIG_INPUT_IDEAPAD_SLIDEBAR=m
-CONFIG_INPUT_SOC_BUTTON_ARRAY=m
-# CONFIG_INPUT_DRV260X_HAPTICS is not set
-# CONFIG_INPUT_DRV2665_HAPTICS is not set
-# CONFIG_INPUT_DRV2667_HAPTICS is not set
-
-#
-# Hardware I/O ports
-#
-CONFIG_SERIO=y
-CONFIG_ARCH_MIGHT_HAVE_PC_SERIO=y
-CONFIG_SERIO_I8042=y
-CONFIG_SERIO_SERPORT=y
-# CONFIG_SERIO_CT82C710 is not set
-# CONFIG_SERIO_PARKBD is not set
-# CONFIG_SERIO_PCIPS2 is not set
-CONFIG_SERIO_LIBPS2=y
-CONFIG_SERIO_RAW=m
-CONFIG_SERIO_ALTERA_PS2=m
-# CONFIG_SERIO_PS2MULT is not set
-CONFIG_SERIO_ARC_PS2=m
-# CONFIG_SERIO_OLPC_APSP is not set
-CONFIG_HYPERV_KEYBOARD=m
-# CONFIG_SERIO_SUN4I_PS2 is not set
-# CONFIG_USERIO is not set
-CONFIG_GAMEPORT=m
-CONFIG_GAMEPORT_NS558=m
-CONFIG_GAMEPORT_L4=m
-CONFIG_GAMEPORT_EMU10K1=m
-CONFIG_GAMEPORT_FM801=m
-
-#
-# Character devices
-#
-CONFIG_TTY=y
-CONFIG_VT=y
-CONFIG_CONSOLE_TRANSLATIONS=y
-CONFIG_VT_CONSOLE=y
-CONFIG_VT_CONSOLE_SLEEP=y
-CONFIG_HW_CONSOLE=y
-CONFIG_VT_HW_CONSOLE_BINDING=y
-CONFIG_UNIX98_PTYS=y
-# CONFIG_DEVPTS_MULTIPLE_INSTANCES is not set
-# CONFIG_LEGACY_PTYS is not set
-CONFIG_SERIAL_NONSTANDARD=y
-CONFIG_ROCKETPORT=m
-CONFIG_CYCLADES=m
-# CONFIG_CYZ_INTR is not set
-# CONFIG_MOXA_INTELLIO is not set
-# CONFIG_MOXA_SMARTIO is not set
-CONFIG_SYNCLINK=m
-CONFIG_SYNCLINKMP=m
-CONFIG_SYNCLINK_GT=m
-CONFIG_NOZOMI=m
-# CONFIG_ISI is not set
-CONFIG_N_HDLC=m
-CONFIG_N_GSM=m
-# CONFIG_TRACE_SINK is not set
-CONFIG_DEVMEM=y
-# CONFIG_DEVKMEM is not set
-
-#
-# Serial drivers
-#
-CONFIG_SERIAL_EARLYCON=y
-CONFIG_SERIAL_8250=y
-# CONFIG_SERIAL_8250_DEPRECATED_OPTIONS is not set
-CONFIG_SERIAL_8250_PNP=y
-CONFIG_SERIAL_8250_CONSOLE=y
-CONFIG_SERIAL_8250_DMA=y
-CONFIG_SERIAL_8250_PCI=y
-CONFIG_SERIAL_8250_CS=m
-CONFIG_SERIAL_8250_NR_UARTS=32
-CONFIG_SERIAL_8250_RUNTIME_UARTS=32
-CONFIG_SERIAL_8250_EXTENDED=y
-CONFIG_SERIAL_8250_MANY_PORTS=y
-CONFIG_SERIAL_8250_SHARE_IRQ=y
-# CONFIG_SERIAL_8250_DETECT_IRQ is not set
-CONFIG_SERIAL_8250_RSA=y
-# CONFIG_SERIAL_8250_FSL is not set
-CONFIG_SERIAL_8250_DW=y
-CONFIG_SERIAL_8250_RT288X=y
-# CONFIG_SERIAL_8250_FINTEK is not set
-CONFIG_SERIAL_8250_MID=y
-
-#
-# Non-8250 serial port support
-#
-# CONFIG_SERIAL_ATMEL is not set
-# CONFIG_SERIAL_KGDB_NMI is not set
-# CONFIG_SERIAL_CLPS711X is not set
-# CONFIG_SERIAL_MAX3100 is not set
-# CONFIG_SERIAL_MAX310X is not set
-# CONFIG_SERIAL_IMX is not set
-# CONFIG_SERIAL_UARTLITE is not set
-# CONFIG_SERIAL_SH_SCI is not set
-CONFIG_SERIAL_CORE=y
-CONFIG_SERIAL_CORE_CONSOLE=y
-CONFIG_CONSOLE_POLL=y
-CONFIG_SERIAL_JSM=m
-# CONFIG_SERIAL_SCCNXP is not set
-# CONFIG_SERIAL_SC16IS7XX is not set
-# CONFIG_SERIAL_TIMBERDALE is not set
-# CONFIG_SERIAL_BCM63XX is not set
-# CONFIG_SERIAL_ALTERA_JTAGUART is not set
-# CONFIG_SERIAL_ALTERA_UART is not set
-# CONFIG_SERIAL_IFX6X60 is not set
-# CONFIG_SERIAL_PCH_UART is not set
-# CONFIG_SERIAL_MXS_AUART is not set
-CONFIG_SERIAL_ARC=m
-CONFIG_SERIAL_ARC_NR_PORTS=1
-# CONFIG_SERIAL_RP2 is not set
-# CONFIG_SERIAL_FSL_LPUART is not set
-# CONFIG_SERIAL_ST_ASC is not set
-# CONFIG_SERIAL_STM32 is not set
-CONFIG_PRINTER=m
-CONFIG_LP_CONSOLE=y
-CONFIG_PPDEV=m
-CONFIG_HVC_DRIVER=y
-CONFIG_HVC_IRQ=y
-CONFIG_HVC_XEN=y
-CONFIG_HVC_XEN_FRONTEND=y
-CONFIG_VIRTIO_CONSOLE=m
-CONFIG_IPMI_HANDLER=m
-# CONFIG_IPMI_PANIC_EVENT is not set
-CONFIG_IPMI_DEVICE_INTERFACE=m
-CONFIG_IPMI_SI=m
-# CONFIG_IPMI_SI_PROBE_DEFAULTS is not set
-CONFIG_IPMI_SSIF=m
-CONFIG_IPMI_WATCHDOG=m
-CONFIG_IPMI_POWEROFF=m
-CONFIG_HW_RANDOM=y
-CONFIG_HW_RANDOM_TIMERIOMEM=m
-CONFIG_HW_RANDOM_INTEL=m
-CONFIG_HW_RANDOM_AMD=m
-CONFIG_HW_RANDOM_VIA=m
-CONFIG_HW_RANDOM_VIRTIO=m
-CONFIG_HW_RANDOM_TPM=m
-# CONFIG_HW_RANDOM_STM32 is not set
-CONFIG_NVRAM=y
-CONFIG_R3964=m
-# CONFIG_APPLICOM is not set
-
-#
-# PCMCIA character devices
-#
-# CONFIG_SYNCLINK_CS is not set
-CONFIG_CARDMAN_4000=m
-CONFIG_CARDMAN_4040=m
-# CONFIG_IPWIRELESS is not set
-CONFIG_MWAVE=m
-CONFIG_RAW_DRIVER=y
-CONFIG_MAX_RAW_DEVS=8192
-CONFIG_HPET=y
-# CONFIG_HPET_MMAP is not set
-CONFIG_HANGCHECK_TIMER=m
-CONFIG_UV_MMTIMER=m
-CONFIG_TCG_TPM=m
-CONFIG_TCG_TIS=m
-# CONFIG_TCG_TIS_I2C_ATMEL is not set
-# CONFIG_TCG_TIS_I2C_INFINEON is not set
-# CONFIG_TCG_TIS_I2C_NUVOTON is not set
-CONFIG_TCG_NSC=m
-CONFIG_TCG_ATMEL=m
-CONFIG_TCG_INFINEON=m
-# CONFIG_TCG_XEN is not set
-CONFIG_TCG_CRB=m
-# CONFIG_TCG_TIS_ST33ZP24 is not set
-CONFIG_TELCLOCK=m
-CONFIG_DEVPORT=y
-CONFIG_XILLYBUS=m
-CONFIG_XILLYBUS_PCIE=m
-
-#
-# I2C support
-#
-CONFIG_I2C=y
-CONFIG_ACPI_I2C_OPREGION=y
-CONFIG_I2C_BOARDINFO=y
-CONFIG_I2C_COMPAT=y
-CONFIG_I2C_CHARDEV=m
-CONFIG_I2C_MUX=m
-
-#
-# Multiplexer I2C Chip support
-#
-# CONFIG_I2C_MUX_GPIO is not set
-# CONFIG_I2C_MUX_PCA9541 is not set
-# CONFIG_I2C_MUX_PCA954x is not set
-# CONFIG_I2C_MUX_PINCTRL is not set
-# CONFIG_I2C_MUX_REG is not set
-CONFIG_I2C_HELPER_AUTO=y
-CONFIG_I2C_SMBUS=m
-CONFIG_I2C_ALGOBIT=m
-CONFIG_I2C_ALGOPCA=m
-
-#
-# I2C Hardware Bus support
-#
-
-#
-# PC SMBus host controller drivers
-#
-# CONFIG_I2C_ALI1535 is not set
-# CONFIG_I2C_ALI1563 is not set
-# CONFIG_I2C_ALI15X3 is not set
-CONFIG_I2C_AMD756=m
-CONFIG_I2C_AMD756_S4882=m
-CONFIG_I2C_AMD8111=m
-# CONFIG_I2C_HIX5HD2 is not set
-CONFIG_I2C_I801=m
-CONFIG_I2C_ISCH=m
-CONFIG_I2C_ISMT=m
-CONFIG_I2C_PIIX4=m
-CONFIG_I2C_NFORCE2=m
-CONFIG_I2C_NFORCE2_S4985=m
-# CONFIG_I2C_SIS5595 is not set
-# CONFIG_I2C_SIS630 is not set
-CONFIG_I2C_SIS96X=m
-CONFIG_I2C_VIA=m
-CONFIG_I2C_VIAPRO=m
-
-#
-# ACPI drivers
-#
-CONFIG_I2C_SCMI=m
-
-#
-# I2C system bus drivers (mostly embedded / system-on-chip)
-#
-# CONFIG_I2C_AXXIA is not set
-# CONFIG_I2C_BCM_IPROC is not set
-CONFIG_I2C_BRCMSTB=y
-# CONFIG_I2C_CBUS_GPIO is not set
-CONFIG_I2C_DESIGNWARE_CORE=m
-CONFIG_I2C_DESIGNWARE_PLATFORM=m
-CONFIG_I2C_DESIGNWARE_PCI=m
-CONFIG_I2C_DESIGNWARE_BAYTRAIL=y
-# CONFIG_I2C_EFM32 is not set
-# CONFIG_I2C_EG20T is not set
-# CONFIG_I2C_EMEV2 is not set
-# CONFIG_I2C_GPIO is not set
-# CONFIG_I2C_IMG is not set
-# CONFIG_I2C_JZ4780 is not set
-# CONFIG_I2C_MT65XX is not set
-# CONFIG_I2C_OCORES is not set
-CONFIG_I2C_PCA_PLATFORM=m
-# CONFIG_I2C_PXA_PCI is not set
-# CONFIG_I2C_RIIC is not set
-# CONFIG_I2C_SH_MOBILE is not set
-CONFIG_I2C_SIMTEC=m
-# CONFIG_I2C_SUN6I_P2WI is not set
-# CONFIG_I2C_XILINX is not set
-# CONFIG_I2C_XLP9XX is not set
-# CONFIG_I2C_RCAR is not set
-
-#
-# External I2C/SMBus adapter drivers
-#
-CONFIG_I2C_DIOLAN_U2C=m
-CONFIG_I2C_PARPORT=m
-CONFIG_I2C_PARPORT_LIGHT=m
-# CONFIG_I2C_ROBOTFUZZ_OSIF is not set
-# CONFIG_I2C_TAOS_EVM is not set
-CONFIG_I2C_TINY_USB=m
-CONFIG_I2C_VIPERBOARD=m
-
-#
-# Other I2C/SMBus bus drivers
-#
-CONFIG_I2C_STUB=m
-CONFIG_I2C_SLAVE=y
-CONFIG_I2C_SLAVE_EEPROM=m
-# CONFIG_I2C_DEBUG_CORE is not set
-# CONFIG_I2C_DEBUG_ALGO is not set
-# CONFIG_I2C_DEBUG_BUS is not set
-CONFIG_SPI=y
-# CONFIG_SPI_DEBUG is not set
-CONFIG_SPI_MASTER=y
-
-#
-# SPI Master Controller Drivers
-#
-# CONFIG_SPI_ALTERA is not set
-# CONFIG_SPI_ATMEL is not set
-# CONFIG_SPI_BCM2835 is not set
-# CONFIG_SPI_BCM2835AUX is not set
-# CONFIG_SPI_BCM63XX is not set
-# CONFIG_SPI_BCM63XX_HSSPI is not set
-# CONFIG_SPI_BITBANG is not set
-# CONFIG_SPI_BUTTERFLY is not set
-# CONFIG_SPI_CADENCE is not set
-# CONFIG_SPI_CLPS711X is not set
-# CONFIG_SPI_EP93XX is not set
-# CONFIG_SPI_GPIO is not set
-# CONFIG_SPI_IMG_SPFI is not set
-# CONFIG_SPI_IMX is not set
-# CONFIG_SPI_LM70_LLP is not set
-# CONFIG_SPI_FSL_DSPI is not set
-# CONFIG_SPI_MESON_SPIFC is not set
-# CONFIG_SPI_MT65XX is not set
-# CONFIG_SPI_OC_TINY is not set
-# CONFIG_SPI_TI_QSPI is not set
-# CONFIG_SPI_OMAP_100K is not set
-# CONFIG_SPI_ORION is not set
-CONFIG_SPI_PXA2XX_DMA=y
-CONFIG_SPI_PXA2XX=m
-CONFIG_SPI_PXA2XX_PCI=m
-# CONFIG_SPI_RSPI is not set
-# CONFIG_SPI_SC18IS602 is not set
-# CONFIG_SPI_SH_MSIOF is not set
-# CONFIG_SPI_SH is not set
-# CONFIG_SPI_SH_HSPI is not set
-# CONFIG_SPI_SUN4I is not set
-# CONFIG_SPI_SUN6I is not set
-# CONFIG_SPI_TEGRA114 is not set
-# CONFIG_SPI_TEGRA20_SFLASH is not set
-# CONFIG_SPI_TEGRA20_SLINK is not set
-# CONFIG_SPI_TOPCLIFF_PCH is not set
-# CONFIG_SPI_TXX9 is not set
-# CONFIG_SPI_XCOMM is not set
-# CONFIG_SPI_XILINX is not set
-# CONFIG_SPI_XLP is not set
-# CONFIG_SPI_XTENSA_XTFPGA is not set
-# CONFIG_SPI_ZYNQMP_GQSPI is not set
-# CONFIG_SPI_DESIGNWARE is not set
-
-#
-# SPI Protocol Masters
-#
-# CONFIG_SPI_SPIDEV is not set
-# CONFIG_SPI_TLE62X0 is not set
-# CONFIG_SPMI is not set
-# CONFIG_HSI is not set
-
-#
-# PPS support
-#
-CONFIG_PPS=m
-# CONFIG_PPS_DEBUG is not set
-
-#
-# PPS clients support
-#
-# CONFIG_PPS_CLIENT_KTIMER is not set
-CONFIG_PPS_CLIENT_LDISC=m
-CONFIG_PPS_CLIENT_PARPORT=m
-CONFIG_PPS_CLIENT_GPIO=m
-
-#
-# PPS generators support
-#
-
-#
-# PTP clock support
-#
-CONFIG_PTP_1588_CLOCK=m
-CONFIG_DP83640_PHY=m
-# CONFIG_PTP_1588_CLOCK_PCH is not set
-CONFIG_PINCTRL=y
-
-#
-# Pin controllers
-#
-CONFIG_PINMUX=y
-CONFIG_PINCONF=y
-CONFIG_GENERIC_PINCONF=y
-# CONFIG_DEBUG_PINCTRL is not set
-# CONFIG_PINCTRL_AMD is not set
-# CONFIG_PINCTRL_CYGNUS_MUX is not set
-CONFIG_PINCTRL_BAYTRAIL=y
-CONFIG_PINCTRL_CHERRYVIEW=y
-CONFIG_PINCTRL_INTEL=m
-CONFIG_PINCTRL_BROXTON=m
-CONFIG_PINCTRL_SUNRISEPOINT=m
-# CONFIG_PINCTRL_QDF2XXX is not set
-CONFIG_ARCH_WANT_OPTIONAL_GPIOLIB=y
-CONFIG_GPIOLIB=y
-CONFIG_GPIO_DEVRES=y
-CONFIG_GPIO_ACPI=y
-CONFIG_GPIOLIB_IRQCHIP=y
-# CONFIG_DEBUG_GPIO is not set
-CONFIG_GPIO_SYSFS=y
-
-#
-# Memory mapped GPIO drivers
-#
-# CONFIG_GPIO_AMDPT is not set
-# CONFIG_GPIO_CLPS711X is not set
-# CONFIG_GPIO_DWAPB is not set
-# CONFIG_GPIO_GENERIC_PLATFORM is not set
-CONFIG_GPIO_ICH=m
-# CONFIG_GPIO_LYNXPOINT is not set
-# CONFIG_GPIO_RCAR is not set
-# CONFIG_GPIO_VX855 is not set
-# CONFIG_GPIO_ZX is not set
-
-#
-# Port-mapped I/O GPIO drivers
-#
-# CONFIG_GPIO_104_IDIO_16 is not set
-# CONFIG_GPIO_F7188X is not set
-# CONFIG_GPIO_IT87 is not set
-# CONFIG_GPIO_SCH is not set
-# CONFIG_GPIO_SCH311X is not set
-# CONFIG_GPIO_TS5500 is not set
-
-#
-# I2C GPIO expanders
-#
-# CONFIG_GPIO_ADP5588 is not set
-# CONFIG_GPIO_MAX7300 is not set
-# CONFIG_GPIO_MAX732X is not set
-# CONFIG_GPIO_PCA953X is not set
-# CONFIG_GPIO_PCF857X is not set
-# CONFIG_GPIO_SX150X is not set
-
-#
-# MFD GPIO expanders
-#
-CONFIG_GPIO_CRYSTAL_COVE=y
-
-#
-# PCI GPIO expanders
-#
-# CONFIG_GPIO_AMD8111 is not set
-# CONFIG_GPIO_INTEL_MID is not set
-# CONFIG_GPIO_ML_IOH is not set
-# CONFIG_GPIO_PCH is not set
-# CONFIG_GPIO_RDC321X is not set
-
-#
-# SPI GPIO expanders
-#
-# CONFIG_GPIO_MAX7301 is not set
-# CONFIG_GPIO_MC33880 is not set
-
-#
-# SPI or I2C GPIO expanders
-#
-# CONFIG_GPIO_MCP23S08 is not set
-
-#
-# USB GPIO expanders
-#
-CONFIG_GPIO_VIPERBOARD=m
-CONFIG_W1=m
-CONFIG_W1_CON=y
-
-#
-# 1-wire Bus Masters
-#
-# CONFIG_W1_MASTER_MATROX is not set
-CONFIG_W1_MASTER_DS2490=m
-CONFIG_W1_MASTER_DS2482=m
-# CONFIG_W1_MASTER_MXC is not set
-CONFIG_W1_MASTER_DS1WM=m
-# CONFIG_W1_MASTER_GPIO is not set
-
-#
-# 1-wire Slaves
-#
-CONFIG_W1_SLAVE_THERM=m
-CONFIG_W1_SLAVE_SMEM=m
-CONFIG_W1_SLAVE_DS2408=m
-# CONFIG_W1_SLAVE_DS2408_READBACK is not set
-CONFIG_W1_SLAVE_DS2413=m
-CONFIG_W1_SLAVE_DS2406=m
-CONFIG_W1_SLAVE_DS2423=m
-CONFIG_W1_SLAVE_DS2431=m
-CONFIG_W1_SLAVE_DS2433=m
-CONFIG_W1_SLAVE_DS2433_CRC=y
-CONFIG_W1_SLAVE_DS2760=m
-CONFIG_W1_SLAVE_DS2780=m
-CONFIG_W1_SLAVE_DS2781=m
-CONFIG_W1_SLAVE_DS28E04=m
-CONFIG_W1_SLAVE_BQ27000=m
-CONFIG_POWER_SUPPLY=y
-# CONFIG_POWER_SUPPLY_DEBUG is not set
-# CONFIG_PDA_POWER is not set
-# CONFIG_GENERIC_ADC_BATTERY is not set
-# CONFIG_TEST_POWER is not set
-# CONFIG_BATTERY_DS2760 is not set
-# CONFIG_BATTERY_DS2780 is not set
-# CONFIG_BATTERY_DS2781 is not set
-# CONFIG_BATTERY_DS2782 is not set
-# CONFIG_BATTERY_SBS is not set
-# CONFIG_BATTERY_BQ27XXX is not set
-# CONFIG_BATTERY_MAX17040 is not set
-# CONFIG_BATTERY_MAX17042 is not set
-# CONFIG_CHARGER_ISP1704 is not set
-# CONFIG_CHARGER_MAX8903 is not set
-# CONFIG_CHARGER_LP8727 is not set
-# CONFIG_CHARGER_GPIO is not set
-# CONFIG_CHARGER_BQ2415X is not set
-# CONFIG_CHARGER_BQ24190 is not set
-# CONFIG_CHARGER_BQ24257 is not set
-# CONFIG_CHARGER_BQ24735 is not set
-# CONFIG_CHARGER_BQ25890 is not set
-CONFIG_CHARGER_SMB347=m
-# CONFIG_BATTERY_GAUGE_LTC2941 is not set
-# CONFIG_BATTERY_GOLDFISH is not set
-# CONFIG_CHARGER_RT9455 is not set
-CONFIG_POWER_RESET=y
-# CONFIG_POWER_RESET_RESTART is not set
-# CONFIG_POWER_RESET_RMOBILE is not set
-# CONFIG_POWER_RESET_ZX is not set
-# CONFIG_POWER_AVS is not set
-CONFIG_HWMON=y
-CONFIG_HWMON_VID=m
-# CONFIG_HWMON_DEBUG_CHIP is not set
-
-#
-# Native drivers
-#
-CONFIG_SENSORS_ABITUGURU=m
-CONFIG_SENSORS_ABITUGURU3=m
-CONFIG_SENSORS_AD7314=m
-CONFIG_SENSORS_AD7414=m
-CONFIG_SENSORS_AD7418=m
-CONFIG_SENSORS_ADM1021=m
-CONFIG_SENSORS_ADM1025=m
-CONFIG_SENSORS_ADM1026=m
-CONFIG_SENSORS_ADM1029=m
-CONFIG_SENSORS_ADM1031=m
-CONFIG_SENSORS_ADM9240=m
-CONFIG_SENSORS_ADT7X10=m
-CONFIG_SENSORS_ADT7310=m
-CONFIG_SENSORS_ADT7410=m
-CONFIG_SENSORS_ADT7411=m
-CONFIG_SENSORS_ADT7462=m
-CONFIG_SENSORS_ADT7470=m
-CONFIG_SENSORS_ADT7475=m
-CONFIG_SENSORS_ASC7621=m
-CONFIG_SENSORS_K8TEMP=m
-CONFIG_SENSORS_K10TEMP=m
-CONFIG_SENSORS_FAM15H_POWER=m
-CONFIG_SENSORS_APPLESMC=m
-CONFIG_SENSORS_ASB100=m
-CONFIG_SENSORS_ATXP1=m
-CONFIG_SENSORS_DS620=m
-CONFIG_SENSORS_DS1621=m
-CONFIG_SENSORS_DELL_SMM=m
-CONFIG_SENSORS_I5K_AMB=m
-CONFIG_SENSORS_F71805F=m
-CONFIG_SENSORS_F71882FG=m
-CONFIG_SENSORS_F75375S=m
-CONFIG_SENSORS_FSCHMD=m
-CONFIG_SENSORS_GL518SM=m
-CONFIG_SENSORS_GL520SM=m
-CONFIG_SENSORS_G760A=m
-CONFIG_SENSORS_G762=m
-# CONFIG_SENSORS_GPIO_FAN is not set
-# CONFIG_SENSORS_HIH6130 is not set
-CONFIG_SENSORS_IBMAEM=m
-CONFIG_SENSORS_IBMPEX=m
-# CONFIG_SENSORS_IIO_HWMON is not set
-CONFIG_SENSORS_I5500=m
-CONFIG_SENSORS_CORETEMP=m
-CONFIG_SENSORS_IT87=m
-# CONFIG_SENSORS_JC42 is not set
-CONFIG_SENSORS_POWR1220=m
-CONFIG_SENSORS_LINEAGE=m
-CONFIG_SENSORS_LTC2945=m
-CONFIG_SENSORS_LTC4151=m
-CONFIG_SENSORS_LTC4215=m
-CONFIG_SENSORS_LTC4222=m
-CONFIG_SENSORS_LTC4245=m
-CONFIG_SENSORS_LTC4260=m
-CONFIG_SENSORS_LTC4261=m
-CONFIG_SENSORS_MAX1111=m
-CONFIG_SENSORS_MAX16065=m
-CONFIG_SENSORS_MAX1619=m
-CONFIG_SENSORS_MAX1668=m
-CONFIG_SENSORS_MAX197=m
-CONFIG_SENSORS_MAX6639=m
-CONFIG_SENSORS_MAX6642=m
-CONFIG_SENSORS_MAX6650=m
-CONFIG_SENSORS_MAX6697=m
-CONFIG_SENSORS_MAX31790=m
-# CONFIG_SENSORS_HTU21 is not set
-CONFIG_SENSORS_MCP3021=m
-CONFIG_SENSORS_ADCXX=m
-CONFIG_SENSORS_LM63=m
-CONFIG_SENSORS_LM70=m
-CONFIG_SENSORS_LM73=m
-CONFIG_SENSORS_LM75=m
-CONFIG_SENSORS_LM77=m
-CONFIG_SENSORS_LM78=m
-CONFIG_SENSORS_LM80=m
-CONFIG_SENSORS_LM83=m
-CONFIG_SENSORS_LM85=m
-CONFIG_SENSORS_LM87=m
-CONFIG_SENSORS_LM90=m
-CONFIG_SENSORS_LM92=m
-CONFIG_SENSORS_LM93=m
-CONFIG_SENSORS_LM95234=m
-CONFIG_SENSORS_LM95241=m
-CONFIG_SENSORS_LM95245=m
-CONFIG_SENSORS_PC87360=m
-CONFIG_SENSORS_PC87427=m
-CONFIG_SENSORS_NTC_THERMISTOR=m
-CONFIG_SENSORS_NCT6683=m
-CONFIG_SENSORS_NCT6775=m
-CONFIG_SENSORS_NCT7802=m
-CONFIG_SENSORS_NCT7904=m
-CONFIG_SENSORS_PCF8591=m
-CONFIG_PMBUS=m
-CONFIG_SENSORS_PMBUS=m
-CONFIG_SENSORS_ADM1275=m
-CONFIG_SENSORS_LM25066=m
-CONFIG_SENSORS_LTC2978=m
-CONFIG_SENSORS_MAX16064=m
-CONFIG_SENSORS_MAX20751=m
-CONFIG_SENSORS_MAX34440=m
-CONFIG_SENSORS_MAX8688=m
-CONFIG_SENSORS_TPS40422=m
-CONFIG_SENSORS_UCD9000=m
-CONFIG_SENSORS_UCD9200=m
-CONFIG_SENSORS_ZL6100=m
-# CONFIG_SENSORS_PWM_FAN is not set
-CONFIG_SENSORS_SHT15=m
-CONFIG_SENSORS_SHT21=m
-CONFIG_SENSORS_SHTC1=m
-CONFIG_SENSORS_SIS5595=m
-CONFIG_SENSORS_DME1737=m
-CONFIG_SENSORS_EMC1403=m
-# CONFIG_SENSORS_EMC2103 is not set
-CONFIG_SENSORS_EMC6W201=m
-CONFIG_SENSORS_SMSC47M1=m
-CONFIG_SENSORS_SMSC47M192=m
-CONFIG_SENSORS_SMSC47B397=m
-CONFIG_SENSORS_SCH56XX_COMMON=m
-CONFIG_SENSORS_SCH5627=m
-CONFIG_SENSORS_SCH5636=m
-# CONFIG_SENSORS_SMM665 is not set
-CONFIG_SENSORS_ADC128D818=m
-CONFIG_SENSORS_ADS1015=m
-CONFIG_SENSORS_ADS7828=m
-CONFIG_SENSORS_ADS7871=m
-CONFIG_SENSORS_AMC6821=m
-CONFIG_SENSORS_INA209=m
-CONFIG_SENSORS_INA2XX=m
-CONFIG_SENSORS_TC74=m
-CONFIG_SENSORS_THMC50=m
-CONFIG_SENSORS_TMP102=m
-CONFIG_SENSORS_TMP103=m
-CONFIG_SENSORS_TMP401=m
-CONFIG_SENSORS_TMP421=m
-CONFIG_SENSORS_VIA_CPUTEMP=m
-CONFIG_SENSORS_VIA686A=m
-CONFIG_SENSORS_VT1211=m
-CONFIG_SENSORS_VT8231=m
-CONFIG_SENSORS_W83781D=m
-CONFIG_SENSORS_W83791D=m
-CONFIG_SENSORS_W83792D=m
-CONFIG_SENSORS_W83793=m
-CONFIG_SENSORS_W83795=m
-# CONFIG_SENSORS_W83795_FANCTRL is not set
-CONFIG_SENSORS_W83L785TS=m
-CONFIG_SENSORS_W83L786NG=m
-CONFIG_SENSORS_W83627HF=m
-CONFIG_SENSORS_W83627EHF=m
-
-#
-# ACPI drivers
-#
-CONFIG_SENSORS_ACPI_POWER=m
-CONFIG_SENSORS_ATK0110=m
-CONFIG_THERMAL=y
-CONFIG_THERMAL_HWMON=y
-CONFIG_THERMAL_WRITABLE_TRIPS=y
-CONFIG_THERMAL_DEFAULT_GOV_STEP_WISE=y
-# CONFIG_THERMAL_DEFAULT_GOV_FAIR_SHARE is not set
-# CONFIG_THERMAL_DEFAULT_GOV_USER_SPACE is not set
-# CONFIG_THERMAL_DEFAULT_GOV_POWER_ALLOCATOR is not set
-CONFIG_THERMAL_GOV_FAIR_SHARE=y
-CONFIG_THERMAL_GOV_STEP_WISE=y
-CONFIG_THERMAL_GOV_BANG_BANG=y
-CONFIG_THERMAL_GOV_USER_SPACE=y
-# CONFIG_THERMAL_GOV_POWER_ALLOCATOR is not set
-# CONFIG_THERMAL_EMULATION is not set
-# CONFIG_HISI_THERMAL is not set
-# CONFIG_ROCKCHIP_THERMAL is not set
-# CONFIG_RCAR_THERMAL is not set
-CONFIG_INTEL_POWERCLAMP=m
-CONFIG_X86_PKG_TEMP_THERMAL=m
-CONFIG_INTEL_SOC_DTS_IOSF_CORE=m
-CONFIG_INTEL_SOC_DTS_THERMAL=m
-CONFIG_INT340X_THERMAL=m
-CONFIG_ACPI_THERMAL_REL=m
-CONFIG_INTEL_PCH_THERMAL=m
-
-#
-# Texas Instruments thermal drivers
-#
-# CONFIG_TI_SOC_THERMAL is not set
-
-#
-# Samsung thermal drivers
-#
-CONFIG_WATCHDOG=y
-CONFIG_WATCHDOG_CORE=y
-# CONFIG_WATCHDOG_NOWAYOUT is not set
-
-#
-# Watchdog Device Drivers
-#
-CONFIG_SOFT_WATCHDOG=m
-# CONFIG_XILINX_WATCHDOG is not set
-# CONFIG_CADENCE_WATCHDOG is not set
-# CONFIG_DW_WATCHDOG is not set
-# CONFIG_MAX63XX_WATCHDOG is not set
-# CONFIG_TEGRA_WATCHDOG is not set
-# CONFIG_LPC18XX_WATCHDOG is not set
-# CONFIG_ACQUIRE_WDT is not set
-# CONFIG_ADVANTECH_WDT is not set
-CONFIG_ALIM1535_WDT=m
-CONFIG_ALIM7101_WDT=m
-CONFIG_F71808E_WDT=m
-CONFIG_SP5100_TCO=m
-CONFIG_SBC_FITPC2_WATCHDOG=m
-# CONFIG_EUROTECH_WDT is not set
-CONFIG_IB700_WDT=m
-CONFIG_IBMASR=m
-# CONFIG_WAFER_WDT is not set
-CONFIG_I6300ESB_WDT=m
-CONFIG_IE6XX_WDT=m
-CONFIG_ITCO_WDT=m
-CONFIG_ITCO_VENDOR_SUPPORT=y
-CONFIG_IT8712F_WDT=m
-CONFIG_IT87_WDT=m
-CONFIG_HP_WATCHDOG=m
-CONFIG_HPWDT_NMI_DECODING=y
-# CONFIG_SC1200_WDT is not set
-# CONFIG_PC87413_WDT is not set
-CONFIG_NV_TCO=m
-# CONFIG_60XX_WDT is not set
-# CONFIG_CPU5_WDT is not set
-CONFIG_SMSC_SCH311X_WDT=m
-# CONFIG_SMSC37B787_WDT is not set
-CONFIG_VIA_WDT=m
-CONFIG_W83627HF_WDT=m
-CONFIG_W83877F_WDT=m
-CONFIG_W83977F_WDT=m
-CONFIG_MACHZ_WDT=m
-# CONFIG_SBC_EPX_C3_WATCHDOG is not set
-# CONFIG_BCM7038_WDT is not set
-# CONFIG_IMGPDC_WDT is not set
-# CONFIG_MEN_A21_WDT is not set
-CONFIG_XEN_WDT=m
-
-#
-# PCI-based Watchdog Cards
-#
-CONFIG_PCIPCWATCHDOG=m
-CONFIG_WDTPCI=m
-
-#
-# USB-based Watchdog Cards
-#
-CONFIG_USBPCWATCHDOG=m
-CONFIG_SSB_POSSIBLE=y
-
-#
-# Sonics Silicon Backplane
-#
-CONFIG_SSB=m
-CONFIG_SSB_SPROM=y
-CONFIG_SSB_BLOCKIO=y
-CONFIG_SSB_PCIHOST_POSSIBLE=y
-CONFIG_SSB_PCIHOST=y
-CONFIG_SSB_B43_PCI_BRIDGE=y
-CONFIG_SSB_PCMCIAHOST_POSSIBLE=y
-CONFIG_SSB_PCMCIAHOST=y
-CONFIG_SSB_SDIOHOST_POSSIBLE=y
-CONFIG_SSB_SDIOHOST=y
-# CONFIG_SSB_HOST_SOC is not set
-# CONFIG_SSB_DEBUG is not set
-CONFIG_SSB_DRIVER_PCICORE_POSSIBLE=y
-CONFIG_SSB_DRIVER_PCICORE=y
-CONFIG_SSB_DRIVER_GPIO=y
-CONFIG_BCMA_POSSIBLE=y
-
-#
-# Broadcom specific AMBA
-#
-CONFIG_BCMA=m
-CONFIG_BCMA_BLOCKIO=y
-CONFIG_BCMA_HOST_PCI_POSSIBLE=y
-CONFIG_BCMA_HOST_PCI=y
-# CONFIG_BCMA_HOST_SOC is not set
-CONFIG_BCMA_DRIVER_PCI=y
-CONFIG_BCMA_DRIVER_GMAC_CMN=y
-CONFIG_BCMA_DRIVER_GPIO=y
-# CONFIG_BCMA_DEBUG is not set
-
-#
-# Multifunction device drivers
-#
-CONFIG_MFD_CORE=y
-# CONFIG_MFD_CS5535 is not set
-# CONFIG_MFD_AS3711 is not set
-# CONFIG_PMIC_ADP5520 is not set
-# CONFIG_MFD_AAT2870_CORE is not set
-# CONFIG_MFD_BCM590XX is not set
-# CONFIG_MFD_AXP20X is not set
-# CONFIG_MFD_CROS_EC is not set
-# CONFIG_PMIC_DA903X is not set
-# CONFIG_MFD_DA9052_SPI is not set
-# CONFIG_MFD_DA9052_I2C is not set
-# CONFIG_MFD_DA9055 is not set
-# CONFIG_MFD_DA9062 is not set
-# CONFIG_MFD_DA9063 is not set
-# CONFIG_MFD_DA9150 is not set
-# CONFIG_MFD_DLN2 is not set
-# CONFIG_MFD_MC13XXX_SPI is not set
-# CONFIG_MFD_MC13XXX_I2C is not set
-# CONFIG_HTC_PASIC3 is not set
-# CONFIG_HTC_I2CPLD is not set
-# CONFIG_MFD_INTEL_QUARK_I2C_GPIO is not set
-CONFIG_LPC_ICH=m
-CONFIG_LPC_SCH=m
-CONFIG_INTEL_SOC_PMIC=y
-CONFIG_MFD_INTEL_LPSS=m
-CONFIG_MFD_INTEL_LPSS_ACPI=m
-CONFIG_MFD_INTEL_LPSS_PCI=m
-# CONFIG_MFD_JANZ_CMODIO is not set
-# CONFIG_MFD_KEMPLD is not set
-# CONFIG_MFD_88PM800 is not set
-# CONFIG_MFD_88PM805 is not set
-# CONFIG_MFD_88PM860X is not set
-# CONFIG_MFD_MAX14577 is not set
-# CONFIG_MFD_MAX77693 is not set
-# CONFIG_MFD_MAX77843 is not set
-# CONFIG_MFD_MAX8907 is not set
-# CONFIG_MFD_MAX8925 is not set
-# CONFIG_MFD_MAX8997 is not set
-# CONFIG_MFD_MAX8998 is not set
-# CONFIG_MFD_MT6397 is not set
-# CONFIG_MFD_MENF21BMC is not set
-# CONFIG_EZX_PCAP is not set
-CONFIG_MFD_VIPERBOARD=m
-# CONFIG_MFD_RETU is not set
-# CONFIG_MFD_PCF50633 is not set
-# CONFIG_UCB1400_CORE is not set
-# CONFIG_MFD_RDC321X is not set
-CONFIG_MFD_RTSX_PCI=m
-# CONFIG_MFD_RT5033 is not set
-CONFIG_MFD_RTSX_USB=m
-# CONFIG_MFD_RC5T583 is not set
-# CONFIG_MFD_RN5T618 is not set
-# CONFIG_MFD_SEC_CORE is not set
-# CONFIG_MFD_SI476X_CORE is not set
-CONFIG_MFD_SM501=m
-CONFIG_MFD_SM501_GPIO=y
-# CONFIG_MFD_SKY81452 is not set
-# CONFIG_MFD_SMSC is not set
-# CONFIG_ABX500_CORE is not set
-# CONFIG_MFD_SYSCON is not set
-# CONFIG_MFD_TI_AM335X_TSCADC is not set
-# CONFIG_MFD_LP3943 is not set
-# CONFIG_MFD_LP8788 is not set
-# CONFIG_MFD_PALMAS is not set
-# CONFIG_TPS6105X is not set
-# CONFIG_TPS65010 is not set
-# CONFIG_TPS6507X is not set
-# CONFIG_MFD_TPS65090 is not set
-# CONFIG_MFD_TPS65217 is not set
-# CONFIG_MFD_TPS65218 is not set
-# CONFIG_MFD_TPS6586X is not set
-# CONFIG_MFD_TPS65910 is not set
-# CONFIG_MFD_TPS65912 is not set
-# CONFIG_MFD_TPS65912_I2C is not set
-# CONFIG_MFD_TPS65912_SPI is not set
-# CONFIG_MFD_TPS80031 is not set
-# CONFIG_TWL4030_CORE is not set
-# CONFIG_TWL6040_CORE is not set
-CONFIG_MFD_WL1273_CORE=m
-# CONFIG_MFD_LM3533 is not set
-# CONFIG_MFD_TIMBERDALE is not set
-# CONFIG_MFD_TMIO is not set
-CONFIG_MFD_VX855=m
-# CONFIG_MFD_ARIZONA_I2C is not set
-# CONFIG_MFD_ARIZONA_SPI is not set
-# CONFIG_MFD_WM8400 is not set
-# CONFIG_MFD_WM831X_I2C is not set
-# CONFIG_MFD_WM831X_SPI is not set
-# CONFIG_MFD_WM8350_I2C is not set
-# CONFIG_MFD_WM8994 is not set
-# CONFIG_MFD_STW481X is not set
-# CONFIG_REGULATOR is not set
-CONFIG_MEDIA_SUPPORT=m
-
-#
-# Multimedia core support
-#
-CONFIG_MEDIA_CAMERA_SUPPORT=y
-CONFIG_MEDIA_ANALOG_TV_SUPPORT=y
-CONFIG_MEDIA_DIGITAL_TV_SUPPORT=y
-CONFIG_MEDIA_RADIO_SUPPORT=y
-# CONFIG_MEDIA_SDR_SUPPORT is not set
-CONFIG_MEDIA_RC_SUPPORT=y
-CONFIG_MEDIA_CONTROLLER=y
-CONFIG_VIDEO_DEV=m
-CONFIG_VIDEO_V4L2_SUBDEV_API=y
-CONFIG_VIDEO_V4L2=m
-# CONFIG_VIDEO_ADV_DEBUG is not set
-# CONFIG_VIDEO_FIXED_MINOR_RANGES is not set
-# CONFIG_VIDEO_PCI_SKELETON is not set
-CONFIG_VIDEO_TUNER=m
-# CONFIG_V4L2_FLASH_LED_CLASS is not set
-CONFIG_VIDEOBUF_GEN=m
-CONFIG_VIDEOBUF_DMA_SG=m
-CONFIG_VIDEOBUF_VMALLOC=m
-CONFIG_VIDEOBUF_DVB=m
-CONFIG_VIDEOBUF2_CORE=m
-CONFIG_VIDEOBUF2_MEMOPS=m
-CONFIG_VIDEOBUF2_DMA_CONTIG=m
-CONFIG_VIDEOBUF2_VMALLOC=m
-CONFIG_VIDEOBUF2_DMA_SG=m
-CONFIG_VIDEOBUF2_DVB=m
-CONFIG_DVB_CORE=m
-CONFIG_DVB_NET=y
-CONFIG_TTPCI_EEPROM=m
-CONFIG_DVB_MAX_ADAPTERS=8
-CONFIG_DVB_DYNAMIC_MINORS=y
-
-#
-# Media drivers
-#
-CONFIG_RC_CORE=m
-CONFIG_RC_MAP=m
-CONFIG_RC_DECODERS=y
-CONFIG_LIRC=m
-CONFIG_IR_LIRC_CODEC=m
-CONFIG_IR_NEC_DECODER=m
-CONFIG_IR_RC5_DECODER=m
-CONFIG_IR_RC6_DECODER=m
-CONFIG_IR_JVC_DECODER=m
-CONFIG_IR_SONY_DECODER=m
-CONFIG_IR_SANYO_DECODER=m
-CONFIG_IR_SHARP_DECODER=m
-CONFIG_IR_MCE_KBD_DECODER=m
-CONFIG_IR_XMP_DECODER=m
-CONFIG_RC_DEVICES=y
-CONFIG_RC_ATI_REMOTE=m
-CONFIG_IR_ENE=m
-CONFIG_IR_HIX5HD2=m
-CONFIG_IR_IMON=m
-CONFIG_IR_MCEUSB=m
-CONFIG_IR_ITE_CIR=m
-CONFIG_IR_FINTEK=m
-# CONFIG_IR_MESON is not set
-CONFIG_IR_NUVOTON=m
-CONFIG_IR_REDRAT3=m
-CONFIG_IR_STREAMZAP=m
-CONFIG_IR_WINBOND_CIR=m
-CONFIG_IR_IGORPLUGUSB=m
-CONFIG_IR_IGUANA=m
-CONFIG_IR_TTUSBIR=m
-# CONFIG_IR_IMG is not set
-CONFIG_RC_LOOPBACK=m
-CONFIG_IR_GPIO_CIR=m
-# CONFIG_RC_ST is not set
-# CONFIG_IR_SUNXI is not set
-CONFIG_MEDIA_USB_SUPPORT=y
-
-#
-# Webcam devices
-#
-CONFIG_USB_VIDEO_CLASS=m
-CONFIG_USB_VIDEO_CLASS_INPUT_EVDEV=y
-CONFIG_USB_GSPCA=m
-CONFIG_USB_M5602=m
-CONFIG_USB_STV06XX=m
-CONFIG_USB_GL860=m
-CONFIG_USB_GSPCA_BENQ=m
-CONFIG_USB_GSPCA_CONEX=m
-CONFIG_USB_GSPCA_CPIA1=m
-CONFIG_USB_GSPCA_DTCS033=m
-CONFIG_USB_GSPCA_ETOMS=m
-CONFIG_USB_GSPCA_FINEPIX=m
-CONFIG_USB_GSPCA_JEILINJ=m
-CONFIG_USB_GSPCA_JL2005BCD=m
-CONFIG_USB_GSPCA_KINECT=m
-CONFIG_USB_GSPCA_KONICA=m
-CONFIG_USB_GSPCA_MARS=m
-CONFIG_USB_GSPCA_MR97310A=m
-CONFIG_USB_GSPCA_NW80X=m
-CONFIG_USB_GSPCA_OV519=m
-CONFIG_USB_GSPCA_OV534=m
-CONFIG_USB_GSPCA_OV534_9=m
-CONFIG_USB_GSPCA_PAC207=m
-CONFIG_USB_GSPCA_PAC7302=m
-CONFIG_USB_GSPCA_PAC7311=m
-CONFIG_USB_GSPCA_SE401=m
-CONFIG_USB_GSPCA_SN9C2028=m
-CONFIG_USB_GSPCA_SN9C20X=m
-CONFIG_USB_GSPCA_SONIXB=m
-CONFIG_USB_GSPCA_SONIXJ=m
-CONFIG_USB_GSPCA_SPCA500=m
-CONFIG_USB_GSPCA_SPCA501=m
-CONFIG_USB_GSPCA_SPCA505=m
-CONFIG_USB_GSPCA_SPCA506=m
-CONFIG_USB_GSPCA_SPCA508=m
-CONFIG_USB_GSPCA_SPCA561=m
-CONFIG_USB_GSPCA_SPCA1528=m
-CONFIG_USB_GSPCA_SQ905=m
-CONFIG_USB_GSPCA_SQ905C=m
-CONFIG_USB_GSPCA_SQ930X=m
-CONFIG_USB_GSPCA_STK014=m
-CONFIG_USB_GSPCA_STK1135=m
-CONFIG_USB_GSPCA_STV0680=m
-CONFIG_USB_GSPCA_SUNPLUS=m
-CONFIG_USB_GSPCA_T613=m
-CONFIG_USB_GSPCA_TOPRO=m
-CONFIG_USB_GSPCA_TOUPTEK=m
-CONFIG_USB_GSPCA_TV8532=m
-CONFIG_USB_GSPCA_VC032X=m
-CONFIG_USB_GSPCA_VICAM=m
-CONFIG_USB_GSPCA_XIRLINK_CIT=m
-CONFIG_USB_GSPCA_ZC3XX=m
-CONFIG_USB_PWC=m
-# CONFIG_USB_PWC_DEBUG is not set
-CONFIG_USB_PWC_INPUT_EVDEV=y
-CONFIG_VIDEO_CPIA2=m
-CONFIG_USB_ZR364XX=m
-CONFIG_USB_STKWEBCAM=m
-CONFIG_USB_S2255=m
-CONFIG_VIDEO_USBTV=m
-
-#
-# Analog TV USB devices
-#
-CONFIG_VIDEO_PVRUSB2=m
-CONFIG_VIDEO_PVRUSB2_SYSFS=y
-CONFIG_VIDEO_PVRUSB2_DVB=y
-# CONFIG_VIDEO_PVRUSB2_DEBUGIFC is not set
-CONFIG_VIDEO_HDPVR=m
-CONFIG_VIDEO_USBVISION=m
-CONFIG_VIDEO_STK1160_COMMON=m
-CONFIG_VIDEO_STK1160_AC97=y
-CONFIG_VIDEO_STK1160=m
-CONFIG_VIDEO_GO7007=m
-CONFIG_VIDEO_GO7007_USB=m
-CONFIG_VIDEO_GO7007_LOADER=m
-CONFIG_VIDEO_GO7007_USB_S2250_BOARD=m
-
-#
-# Analog/digital TV USB devices
-#
-CONFIG_VIDEO_AU0828=m
-CONFIG_VIDEO_AU0828_V4L2=y
-# CONFIG_VIDEO_AU0828_RC is not set
-CONFIG_VIDEO_CX231XX=m
-CONFIG_VIDEO_CX231XX_RC=y
-CONFIG_VIDEO_CX231XX_ALSA=m
-CONFIG_VIDEO_CX231XX_DVB=m
-CONFIG_VIDEO_TM6000=m
-CONFIG_VIDEO_TM6000_ALSA=m
-CONFIG_VIDEO_TM6000_DVB=m
-
-#
-# Digital TV USB devices
-#
-CONFIG_DVB_USB=m
-# CONFIG_DVB_USB_DEBUG is not set
-CONFIG_DVB_USB_A800=m
-CONFIG_DVB_USB_DIBUSB_MB=m
-# CONFIG_DVB_USB_DIBUSB_MB_FAULTY is not set
-CONFIG_DVB_USB_DIBUSB_MC=m
-CONFIG_DVB_USB_DIB0700=m
-CONFIG_DVB_USB_UMT_010=m
-CONFIG_DVB_USB_CXUSB=m
-CONFIG_DVB_USB_M920X=m
-CONFIG_DVB_USB_DIGITV=m
-CONFIG_DVB_USB_VP7045=m
-CONFIG_DVB_USB_VP702X=m
-CONFIG_DVB_USB_GP8PSK=m
-CONFIG_DVB_USB_NOVA_T_USB2=m
-CONFIG_DVB_USB_TTUSB2=m
-CONFIG_DVB_USB_DTT200U=m
-CONFIG_DVB_USB_OPERA1=m
-CONFIG_DVB_USB_AF9005=m
-CONFIG_DVB_USB_AF9005_REMOTE=m
-CONFIG_DVB_USB_PCTV452E=m
-CONFIG_DVB_USB_DW2102=m
-CONFIG_DVB_USB_CINERGY_T2=m
-CONFIG_DVB_USB_DTV5100=m
-CONFIG_DVB_USB_FRIIO=m
-CONFIG_DVB_USB_AZ6027=m
-CONFIG_DVB_USB_TECHNISAT_USB2=m
-CONFIG_DVB_USB_V2=m
-CONFIG_DVB_USB_AF9015=m
-CONFIG_DVB_USB_AF9035=m
-CONFIG_DVB_USB_ANYSEE=m
-CONFIG_DVB_USB_AU6610=m
-CONFIG_DVB_USB_AZ6007=m
-CONFIG_DVB_USB_CE6230=m
-CONFIG_DVB_USB_EC168=m
-CONFIG_DVB_USB_GL861=m
-CONFIG_DVB_USB_LME2510=m
-CONFIG_DVB_USB_MXL111SF=m
-CONFIG_DVB_USB_RTL28XXU=m
-CONFIG_DVB_USB_DVBSKY=m
-CONFIG_DVB_TTUSB_BUDGET=m
-CONFIG_DVB_TTUSB_DEC=m
-CONFIG_SMS_USB_DRV=m
-CONFIG_DVB_B2C2_FLEXCOP_USB=m
-# CONFIG_DVB_B2C2_FLEXCOP_USB_DEBUG is not set
-# CONFIG_DVB_AS102 is not set
-
-#
-# Webcam, TV (analog/digital) USB devices
-#
-CONFIG_VIDEO_EM28XX=m
-CONFIG_VIDEO_EM28XX_V4L2=m
-CONFIG_VIDEO_EM28XX_ALSA=m
-CONFIG_VIDEO_EM28XX_DVB=m
-CONFIG_VIDEO_EM28XX_RC=m
-CONFIG_MEDIA_PCI_SUPPORT=y
-
-#
-# Media capture support
-#
-CONFIG_VIDEO_MEYE=m
-CONFIG_VIDEO_SOLO6X10=m
-# CONFIG_VIDEO_TW68 is not set
-CONFIG_VIDEO_ZORAN=m
-CONFIG_VIDEO_ZORAN_DC30=m
-CONFIG_VIDEO_ZORAN_ZR36060=m
-CONFIG_VIDEO_ZORAN_BUZ=m
-CONFIG_VIDEO_ZORAN_DC10=m
-CONFIG_VIDEO_ZORAN_LML33=m
-CONFIG_VIDEO_ZORAN_LML33R10=m
-CONFIG_VIDEO_ZORAN_AVS6EYES=m
-
-#
-# Media capture/analog TV support
-#
-CONFIG_VIDEO_IVTV=m
-# CONFIG_VIDEO_IVTV_ALSA is not set
-CONFIG_VIDEO_FB_IVTV=m
-CONFIG_VIDEO_HEXIUM_GEMINI=m
-CONFIG_VIDEO_HEXIUM_ORION=m
-CONFIG_VIDEO_MXB=m
-# CONFIG_VIDEO_DT3155 is not set
-
-#
-# Media capture/analog/hybrid TV support
-#
-CONFIG_VIDEO_CX18=m
-CONFIG_VIDEO_CX18_ALSA=m
-CONFIG_VIDEO_CX23885=m
-CONFIG_MEDIA_ALTERA_CI=m
-# CONFIG_VIDEO_CX25821 is not set
-CONFIG_VIDEO_CX88=m
-CONFIG_VIDEO_CX88_ALSA=m
-CONFIG_VIDEO_CX88_BLACKBIRD=m
-CONFIG_VIDEO_CX88_DVB=m
-CONFIG_VIDEO_CX88_ENABLE_VP3054=y
-CONFIG_VIDEO_CX88_VP3054=m
-CONFIG_VIDEO_CX88_MPEG=m
-CONFIG_VIDEO_BT848=m
-CONFIG_DVB_BT8XX=m
-CONFIG_VIDEO_SAA7134=m
-CONFIG_VIDEO_SAA7134_ALSA=m
-CONFIG_VIDEO_SAA7134_RC=y
-CONFIG_VIDEO_SAA7134_DVB=m
-CONFIG_VIDEO_SAA7134_GO7007=m
-CONFIG_VIDEO_SAA7164=m
-
-#
-# Media digital TV PCI Adapters
-#
-CONFIG_DVB_AV7110_IR=y
-CONFIG_DVB_AV7110=m
-CONFIG_DVB_AV7110_OSD=y
-CONFIG_DVB_BUDGET_CORE=m
-CONFIG_DVB_BUDGET=m
-CONFIG_DVB_BUDGET_CI=m
-CONFIG_DVB_BUDGET_AV=m
-CONFIG_DVB_BUDGET_PATCH=m
-CONFIG_DVB_B2C2_FLEXCOP_PCI=m
-# CONFIG_DVB_B2C2_FLEXCOP_PCI_DEBUG is not set
-CONFIG_DVB_PLUTO2=m
-CONFIG_DVB_DM1105=m
-CONFIG_DVB_PT1=m
-# CONFIG_DVB_PT3 is not set
-CONFIG_MANTIS_CORE=m
-CONFIG_DVB_MANTIS=m
-CONFIG_DVB_HOPPER=m
-CONFIG_DVB_NGENE=m
-CONFIG_DVB_DDBRIDGE=m
-CONFIG_DVB_SMIPCIE=m
-CONFIG_DVB_NETUP_UNIDVB=m
-# CONFIG_V4L_PLATFORM_DRIVERS is not set
-CONFIG_V4L_MEM2MEM_DRIVERS=y
-# CONFIG_VIDEO_MEM2MEM_DEINTERLACE is not set
-# CONFIG_VIDEO_SAMSUNG_S5P_G2D is not set
-# CONFIG_VIDEO_SAMSUNG_S5P_JPEG is not set
-# CONFIG_VIDEO_SAMSUNG_S5P_MFC is not set
-# CONFIG_VIDEO_MX2_EMMAPRP is not set
-# CONFIG_VIDEO_SAMSUNG_EXYNOS_GSC is not set
-# CONFIG_VIDEO_STI_BDISP is not set
-# CONFIG_VIDEO_SH_VEU is not set
-# CONFIG_VIDEO_RENESAS_JPU is not set
-# CONFIG_VIDEO_RENESAS_VSP1 is not set
-# CONFIG_VIDEO_TI_VPE is not set
-# CONFIG_V4L_TEST_DRIVERS is not set
-# CONFIG_DVB_PLATFORM_DRIVERS is not set
-
-#
-# Supported MMC/SDIO adapters
-#
-CONFIG_SMS_SDIO_DRV=m
-CONFIG_RADIO_ADAPTERS=y
-CONFIG_RADIO_TEA575X=m
-CONFIG_RADIO_SI470X=y
-CONFIG_USB_SI470X=m
-CONFIG_I2C_SI470X=m
-CONFIG_RADIO_SI4713=m
-# CONFIG_USB_SI4713 is not set
-# CONFIG_PLATFORM_SI4713 is not set
-# CONFIG_I2C_SI4713 is not set
-CONFIG_USB_MR800=m
-CONFIG_USB_DSBR=m
-CONFIG_RADIO_MAXIRADIO=m
-CONFIG_RADIO_SHARK=m
-CONFIG_RADIO_SHARK2=m
-CONFIG_USB_KEENE=m
-# CONFIG_USB_RAREMONO is not set
-CONFIG_USB_MA901=m
-CONFIG_RADIO_TEA5764=m
-CONFIG_RADIO_SAA7706H=m
-# CONFIG_RADIO_TEF6862 is not set
-CONFIG_RADIO_WL1273=m
-
-#
-# Texas Instruments WL128x FM driver (ST based)
-#
-
-#
-# Supported FireWire (IEEE 1394) Adapters
-#
-CONFIG_DVB_FIREDTV=m
-CONFIG_DVB_FIREDTV_INPUT=y
-CONFIG_MEDIA_COMMON_OPTIONS=y
-
-#
-# common driver options
-#
-CONFIG_VIDEO_CX2341X=m
-CONFIG_VIDEO_TVEEPROM=m
-CONFIG_CYPRESS_FIRMWARE=m
-CONFIG_DVB_B2C2_FLEXCOP=m
-CONFIG_VIDEO_SAA7146=m
-CONFIG_VIDEO_SAA7146_VV=m
-CONFIG_SMS_SIANO_MDTV=m
-CONFIG_SMS_SIANO_RC=y
-# CONFIG_SMS_SIANO_DEBUGFS is not set
-
-#
-# Media ancillary drivers (tuners, sensors, i2c, frontends)
-#
-CONFIG_MEDIA_SUBDRV_AUTOSELECT=y
-CONFIG_MEDIA_ATTACH=y
-CONFIG_VIDEO_IR_I2C=m
-
-#
-# Encoders, decoders, sensors and other helper chips
-#
-
-#
-# Audio decoders, processors and mixers
-#
-CONFIG_VIDEO_TVAUDIO=m
-CONFIG_VIDEO_TDA7432=m
-CONFIG_VIDEO_TDA9840=m
-CONFIG_VIDEO_TEA6415C=m
-CONFIG_VIDEO_TEA6420=m
-CONFIG_VIDEO_MSP3400=m
-CONFIG_VIDEO_CS5345=m
-CONFIG_VIDEO_CS53L32A=m
-# CONFIG_VIDEO_TLV320AIC23B is not set
-CONFIG_VIDEO_UDA1342=m
-CONFIG_VIDEO_WM8775=m
-CONFIG_VIDEO_WM8739=m
-CONFIG_VIDEO_VP27SMPX=m
-CONFIG_VIDEO_SONY_BTF_MPX=m
-
-#
-# RDS decoders
-#
-CONFIG_VIDEO_SAA6588=m
-
-#
-# Video decoders
-#
-# CONFIG_VIDEO_ADV7180 is not set
-# CONFIG_VIDEO_ADV7183 is not set
-# CONFIG_VIDEO_ADV7604 is not set
-# CONFIG_VIDEO_ADV7842 is not set
-CONFIG_VIDEO_BT819=m
-CONFIG_VIDEO_BT856=m
-CONFIG_VIDEO_BT866=m
-CONFIG_VIDEO_KS0127=m
-# CONFIG_VIDEO_ML86V7667 is not set
-CONFIG_VIDEO_SAA7110=m
-CONFIG_VIDEO_SAA711X=m
-# CONFIG_VIDEO_TC358743 is not set
-# CONFIG_VIDEO_TVP514X is not set
-CONFIG_VIDEO_TVP5150=m
-# CONFIG_VIDEO_TVP7002 is not set
-CONFIG_VIDEO_TW2804=m
-CONFIG_VIDEO_TW9903=m
-CONFIG_VIDEO_TW9906=m
-CONFIG_VIDEO_VPX3220=m
-
-#
-# Video and audio decoders
-#
-CONFIG_VIDEO_SAA717X=m
-CONFIG_VIDEO_CX25840=m
-
-#
-# Video encoders
-#
-CONFIG_VIDEO_SAA7127=m
-CONFIG_VIDEO_SAA7185=m
-CONFIG_VIDEO_ADV7170=m
-CONFIG_VIDEO_ADV7175=m
-# CONFIG_VIDEO_ADV7343 is not set
-# CONFIG_VIDEO_ADV7393 is not set
-# CONFIG_VIDEO_ADV7511 is not set
-# CONFIG_VIDEO_AD9389B is not set
-# CONFIG_VIDEO_AK881X is not set
-# CONFIG_VIDEO_THS8200 is not set
-
-#
-# Camera sensor devices
-#
-# CONFIG_VIDEO_OV2659 is not set
-CONFIG_VIDEO_OV7640=m
-# CONFIG_VIDEO_OV7670 is not set
-# CONFIG_VIDEO_OV9650 is not set
-# CONFIG_VIDEO_VS6624 is not set
-# CONFIG_VIDEO_MT9M032 is not set
-# CONFIG_VIDEO_MT9P031 is not set
-# CONFIG_VIDEO_MT9T001 is not set
-CONFIG_VIDEO_MT9V011=m
-# CONFIG_VIDEO_MT9V032 is not set
-# CONFIG_VIDEO_SR030PC30 is not set
-# CONFIG_VIDEO_NOON010PC30 is not set
-# CONFIG_VIDEO_M5MOLS is not set
-# CONFIG_VIDEO_S5K6AA is not set
-# CONFIG_VIDEO_S5K6A3 is not set
-# CONFIG_VIDEO_S5K4ECGX is not set
-# CONFIG_VIDEO_S5K5BAF is not set
-# CONFIG_VIDEO_SMIAPP is not set
-# CONFIG_VIDEO_S5C73M3 is not set
-
-#
-# Flash devices
-#
-# CONFIG_VIDEO_ADP1653 is not set
-# CONFIG_VIDEO_AS3645A is not set
-# CONFIG_VIDEO_LM3560 is not set
-# CONFIG_VIDEO_LM3646 is not set
-
-#
-# Video improvement chips
-#
-CONFIG_VIDEO_UPD64031A=m
-CONFIG_VIDEO_UPD64083=m
-
-#
-# Audio/Video compression chips
-#
-CONFIG_VIDEO_SAA6752HS=m
-
-#
-# Miscellaneous helper chips
-#
-# CONFIG_VIDEO_THS7303 is not set
-CONFIG_VIDEO_M52790=m
-
-#
-# Sensors used on soc_camera driver
-#
-CONFIG_MEDIA_TUNER=m
-
-#
-# Customize TV tuners
-#
-CONFIG_MEDIA_TUNER_SIMPLE=m
-CONFIG_MEDIA_TUNER_TDA8290=m
-CONFIG_MEDIA_TUNER_TDA827X=m
-CONFIG_MEDIA_TUNER_TDA18271=m
-CONFIG_MEDIA_TUNER_TDA9887=m
-CONFIG_MEDIA_TUNER_TEA5761=m
-CONFIG_MEDIA_TUNER_TEA5767=m
-# CONFIG_MEDIA_TUNER_MSI001 is not set
-CONFIG_MEDIA_TUNER_MT20XX=m
-CONFIG_MEDIA_TUNER_MT2060=m
-CONFIG_MEDIA_TUNER_MT2063=m
-CONFIG_MEDIA_TUNER_MT2266=m
-CONFIG_MEDIA_TUNER_MT2131=m
-CONFIG_MEDIA_TUNER_QT1010=m
-CONFIG_MEDIA_TUNER_XC2028=m
-CONFIG_MEDIA_TUNER_XC5000=m
-CONFIG_MEDIA_TUNER_XC4000=m
-CONFIG_MEDIA_TUNER_MXL5005S=m
-CONFIG_MEDIA_TUNER_MXL5007T=m
-CONFIG_MEDIA_TUNER_MC44S803=m
-CONFIG_MEDIA_TUNER_MAX2165=m
-CONFIG_MEDIA_TUNER_TDA18218=m
-CONFIG_MEDIA_TUNER_FC0011=m
-CONFIG_MEDIA_TUNER_FC0012=m
-CONFIG_MEDIA_TUNER_FC0013=m
-CONFIG_MEDIA_TUNER_TDA18212=m
-CONFIG_MEDIA_TUNER_E4000=m
-CONFIG_MEDIA_TUNER_FC2580=m
-CONFIG_MEDIA_TUNER_M88RS6000T=m
-CONFIG_MEDIA_TUNER_TUA9001=m
-CONFIG_MEDIA_TUNER_SI2157=m
-CONFIG_MEDIA_TUNER_IT913X=m
-CONFIG_MEDIA_TUNER_R820T=m
-# CONFIG_MEDIA_TUNER_MXL301RF is not set
-CONFIG_MEDIA_TUNER_QM1D1C0042=m
-
-#
-# Customise DVB Frontends
-#
-
-#
-# Multistandard (satellite) frontends
-#
-CONFIG_DVB_STB0899=m
-CONFIG_DVB_STB6100=m
-CONFIG_DVB_STV090x=m
-CONFIG_DVB_STV6110x=m
-CONFIG_DVB_M88DS3103=m
-
-#
-# Multistandard (cable + terrestrial) frontends
-#
-CONFIG_DVB_DRXK=m
-CONFIG_DVB_TDA18271C2DD=m
-CONFIG_DVB_SI2165=m
-
-#
-# DVB-S (satellite) frontends
-#
-CONFIG_DVB_CX24110=m
-CONFIG_DVB_CX24123=m
-CONFIG_DVB_MT312=m
-CONFIG_DVB_ZL10036=m
-CONFIG_DVB_ZL10039=m
-CONFIG_DVB_S5H1420=m
-CONFIG_DVB_STV0288=m
-CONFIG_DVB_STB6000=m
-CONFIG_DVB_STV0299=m
-CONFIG_DVB_STV6110=m
-CONFIG_DVB_STV0900=m
-CONFIG_DVB_TDA8083=m
-CONFIG_DVB_TDA10086=m
-CONFIG_DVB_TDA8261=m
-CONFIG_DVB_VES1X93=m
-CONFIG_DVB_TUNER_ITD1000=m
-CONFIG_DVB_TUNER_CX24113=m
-CONFIG_DVB_TDA826X=m
-CONFIG_DVB_TUA6100=m
-CONFIG_DVB_CX24116=m
-CONFIG_DVB_CX24117=m
-CONFIG_DVB_CX24120=m
-CONFIG_DVB_SI21XX=m
-CONFIG_DVB_TS2020=m
-CONFIG_DVB_DS3000=m
-CONFIG_DVB_MB86A16=m
-CONFIG_DVB_TDA10071=m
-
-#
-# DVB-T (terrestrial) frontends
-#
-CONFIG_DVB_SP8870=m
-CONFIG_DVB_SP887X=m
-CONFIG_DVB_CX22700=m
-CONFIG_DVB_CX22702=m
-# CONFIG_DVB_S5H1432 is not set
-CONFIG_DVB_DRXD=m
-CONFIG_DVB_L64781=m
-CONFIG_DVB_TDA1004X=m
-CONFIG_DVB_NXT6000=m
-CONFIG_DVB_MT352=m
-CONFIG_DVB_ZL10353=m
-CONFIG_DVB_DIB3000MB=m
-CONFIG_DVB_DIB3000MC=m
-CONFIG_DVB_DIB7000M=m
-CONFIG_DVB_DIB7000P=m
-# CONFIG_DVB_DIB9000 is not set
-CONFIG_DVB_TDA10048=m
-CONFIG_DVB_AF9013=m
-CONFIG_DVB_EC100=m
-# CONFIG_DVB_HD29L2 is not set
-CONFIG_DVB_STV0367=m
-CONFIG_DVB_CXD2820R=m
-CONFIG_DVB_CXD2841ER=m
-CONFIG_DVB_RTL2830=m
-CONFIG_DVB_RTL2832=m
-CONFIG_DVB_SI2168=m
-# CONFIG_DVB_AS102_FE is not set
-
-#
-# DVB-C (cable) frontends
-#
-CONFIG_DVB_VES1820=m
-CONFIG_DVB_TDA10021=m
-CONFIG_DVB_TDA10023=m
-CONFIG_DVB_STV0297=m
-
-#
-# ATSC (North American/Korean Terrestrial/Cable DTV) frontends
-#
-CONFIG_DVB_NXT200X=m
-CONFIG_DVB_OR51211=m
-CONFIG_DVB_OR51132=m
-CONFIG_DVB_BCM3510=m
-CONFIG_DVB_LGDT330X=m
-CONFIG_DVB_LGDT3305=m
-CONFIG_DVB_LGDT3306A=m
-CONFIG_DVB_LG2160=m
-CONFIG_DVB_S5H1409=m
-CONFIG_DVB_AU8522=m
-CONFIG_DVB_AU8522_DTV=m
-CONFIG_DVB_AU8522_V4L=m
-CONFIG_DVB_S5H1411=m
-
-#
-# ISDB-T (terrestrial) frontends
-#
-CONFIG_DVB_S921=m
-CONFIG_DVB_DIB8000=m
-CONFIG_DVB_MB86A20S=m
-
-#
-# ISDB-S (satellite) & ISDB-T (terrestrial) frontends
-#
-CONFIG_DVB_TC90522=m
-
-#
-# Digital terrestrial only tuners/PLL
-#
-CONFIG_DVB_PLL=m
-CONFIG_DVB_TUNER_DIB0070=m
-CONFIG_DVB_TUNER_DIB0090=m
-
-#
-# SEC control devices for DVB-S
-#
-CONFIG_DVB_DRX39XYJ=m
-CONFIG_DVB_LNBH25=m
-CONFIG_DVB_LNBP21=m
-CONFIG_DVB_LNBP22=m
-CONFIG_DVB_ISL6405=m
-CONFIG_DVB_ISL6421=m
-CONFIG_DVB_ISL6423=m
-CONFIG_DVB_A8293=m
-CONFIG_DVB_SP2=m
-# CONFIG_DVB_LGS8GL5 is not set
-CONFIG_DVB_LGS8GXX=m
-CONFIG_DVB_ATBM8830=m
-CONFIG_DVB_TDA665x=m
-CONFIG_DVB_IX2505V=m
-CONFIG_DVB_M88RS2000=m
-CONFIG_DVB_AF9033=m
-CONFIG_DVB_HORUS3A=m
-CONFIG_DVB_ASCOT2E=m
-
-#
-# Tools to develop new frontends
-#
-# CONFIG_DVB_DUMMY_FE is not set
-
-#
-# Graphics support
-#
-CONFIG_AGP=y
-CONFIG_AGP_AMD64=y
-CONFIG_AGP_INTEL=y
-CONFIG_AGP_SIS=y
-CONFIG_AGP_VIA=y
-CONFIG_INTEL_GTT=y
-CONFIG_VGA_ARB=y
-CONFIG_VGA_ARB_MAX_GPUS=16
-CONFIG_VGA_SWITCHEROO=y
-CONFIG_DRM=m
-CONFIG_DRM_MIPI_DSI=y
-CONFIG_DRM_KMS_HELPER=m
-CONFIG_DRM_KMS_FB_HELPER=y
-CONFIG_DRM_FBDEV_EMULATION=y
-CONFIG_DRM_LOAD_EDID_FIRMWARE=y
-CONFIG_DRM_TTM=m
-
-#
-# I2C encoder or helper chips
-#
-CONFIG_DRM_I2C_ADV7511=m
-CONFIG_DRM_I2C_CH7006=m
-CONFIG_DRM_I2C_SIL164=m
-CONFIG_DRM_I2C_NXP_TDA998X=m
-# CONFIG_DRM_TDFX is not set
-# CONFIG_DRM_R128 is not set
-CONFIG_DRM_RADEON=m
-CONFIG_DRM_RADEON_USERPTR=y
-# CONFIG_DRM_RADEON_UMS is not set
-CONFIG_DRM_AMDGPU=m
-# CONFIG_DRM_AMDGPU_CIK is not set
-CONFIG_DRM_AMDGPU_USERPTR=y
-CONFIG_DRM_NOUVEAU=m
-CONFIG_NOUVEAU_DEBUG=5
-CONFIG_NOUVEAU_DEBUG_DEFAULT=3
-CONFIG_DRM_NOUVEAU_BACKLIGHT=y
-# CONFIG_DRM_I810 is not set
-CONFIG_DRM_I915=m
-# CONFIG_DRM_I915_PRELIMINARY_HW_SUPPORT is not set
-# CONFIG_DRM_MGA is not set
-# CONFIG_DRM_SIS is not set
-CONFIG_DRM_VIA=m
-# CONFIG_DRM_SAVAGE is not set
-CONFIG_DRM_VGEM=m
-CONFIG_DRM_VMWGFX=m
-CONFIG_DRM_VMWGFX_FBCON=y
-CONFIG_DRM_GMA500=m
-# CONFIG_DRM_GMA600 is not set
-CONFIG_DRM_GMA3600=y
-CONFIG_DRM_UDL=m
-CONFIG_DRM_AST=m
-CONFIG_DRM_MGAG200=m
-CONFIG_DRM_CIRRUS_QEMU=m
-CONFIG_DRM_QXL=m
-CONFIG_DRM_BOCHS=m
-CONFIG_DRM_VIRTIO_GPU=m
-CONFIG_DRM_PANEL=y
-
-#
-# Display Panels
-#
-CONFIG_DRM_BRIDGE=y
-
-#
-# Display Interface Bridges
-#
-CONFIG_HSA_AMD=m
-# CONFIG_DRM_VC4 is not set
-
-#
-# Frame buffer Devices
-#
-CONFIG_FB=y
-# CONFIG_FIRMWARE_EDID is not set
-CONFIG_FB_CMDLINE=y
-# CONFIG_FB_DDC is not set
-CONFIG_FB_BOOT_VESA_SUPPORT=y
-CONFIG_FB_CFB_FILLRECT=y
-CONFIG_FB_CFB_COPYAREA=y
-CONFIG_FB_CFB_IMAGEBLIT=y
-# CONFIG_FB_CFB_REV_PIXELS_IN_BYTE is not set
-CONFIG_FB_SYS_FILLRECT=y
-CONFIG_FB_SYS_COPYAREA=y
-CONFIG_FB_SYS_IMAGEBLIT=y
-# CONFIG_FB_FOREIGN_ENDIAN is not set
-CONFIG_FB_SYS_FOPS=y
-CONFIG_FB_DEFERRED_IO=y
-# CONFIG_FB_SVGALIB is not set
-# CONFIG_FB_MACMODES is not set
-CONFIG_FB_BACKLIGHT=y
-# CONFIG_FB_MODE_HELPERS is not set
-CONFIG_FB_TILEBLITTING=y
-
-#
-# Frame buffer hardware drivers
-#
-# CONFIG_FB_CIRRUS is not set
-# CONFIG_FB_PM2 is not set
-# CONFIG_FB_CLPS711X is not set
-# CONFIG_FB_CYBER2000 is not set
-# CONFIG_FB_ARC is not set
-# CONFIG_FB_ASILIANT is not set
-# CONFIG_FB_IMSTT is not set
-CONFIG_FB_VGA16=m
-# CONFIG_FB_UVESA is not set
-CONFIG_FB_VESA=y
-CONFIG_FB_EFI=y
-# CONFIG_FB_N411 is not set
-# CONFIG_FB_HGA is not set
-# CONFIG_FB_OPENCORES is not set
-# CONFIG_FB_S1D13XXX is not set
-# CONFIG_FB_NVIDIA is not set
-# CONFIG_FB_RIVA is not set
-# CONFIG_FB_I740 is not set
-# CONFIG_FB_LE80578 is not set
-# CONFIG_FB_MATROX is not set
-# CONFIG_FB_RADEON is not set
-# CONFIG_FB_ATY128 is not set
-# CONFIG_FB_ATY is not set
-# CONFIG_FB_S3 is not set
-# CONFIG_FB_SAVAGE is not set
-# CONFIG_FB_SIS is not set
-# CONFIG_FB_VIA is not set
-# CONFIG_FB_NEOMAGIC is not set
-# CONFIG_FB_KYRO is not set
-# CONFIG_FB_3DFX is not set
-# CONFIG_FB_VOODOO1 is not set
-# CONFIG_FB_VT8623 is not set
-# CONFIG_FB_TRIDENT is not set
-# CONFIG_FB_ARK is not set
-# CONFIG_FB_PM3 is not set
-# CONFIG_FB_CARMINE is not set
-# CONFIG_FB_GEODE is not set
-# CONFIG_FB_TMIO is not set
-# CONFIG_FB_SM501 is not set
-# CONFIG_FB_SMSCUFX is not set
-# CONFIG_FB_UDL is not set
-# CONFIG_FB_IBM_GXT4500 is not set
-# CONFIG_FB_GOLDFISH is not set
-CONFIG_FB_VIRTUAL=m
-CONFIG_XEN_FBDEV_FRONTEND=y
-# CONFIG_FB_METRONOME is not set
-# CONFIG_FB_MB862XX is not set
-# CONFIG_FB_BROADSHEET is not set
-# CONFIG_FB_AUO_K190X is not set
-CONFIG_FB_HYPERV=m
-# CONFIG_FB_SIMPLE is not set
-# CONFIG_FB_SM712 is not set
-CONFIG_BACKLIGHT_LCD_SUPPORT=y
-CONFIG_LCD_CLASS_DEVICE=m
-# CONFIG_LCD_L4F00242T03 is not set
-# CONFIG_LCD_LMS283GF05 is not set
-# CONFIG_LCD_LTV350QV is not set
-# CONFIG_LCD_ILI922X is not set
-# CONFIG_LCD_ILI9320 is not set
-# CONFIG_LCD_TDO24M is not set
-# CONFIG_LCD_VGG2432A4 is not set
-CONFIG_LCD_PLATFORM=m
-# CONFIG_LCD_S6E63M0 is not set
-# CONFIG_LCD_LD9040 is not set
-# CONFIG_LCD_AMS369FG06 is not set
-# CONFIG_LCD_LMS501KF03 is not set
-# CONFIG_LCD_HX8357 is not set
-CONFIG_BACKLIGHT_CLASS_DEVICE=y
-# CONFIG_BACKLIGHT_GENERIC is not set
-# CONFIG_BACKLIGHT_PWM is not set
-CONFIG_BACKLIGHT_APPLE=m
-# CONFIG_BACKLIGHT_PM8941_WLED is not set
-# CONFIG_BACKLIGHT_SAHARA is not set
-# CONFIG_BACKLIGHT_ADP8860 is not set
-# CONFIG_BACKLIGHT_ADP8870 is not set
-# CONFIG_BACKLIGHT_LM3630A is not set
-# CONFIG_BACKLIGHT_LM3639 is not set
-CONFIG_BACKLIGHT_LP855X=m
-# CONFIG_BACKLIGHT_GPIO is not set
-# CONFIG_BACKLIGHT_LV5207LP is not set
-# CONFIG_BACKLIGHT_BD6107 is not set
-CONFIG_VGASTATE=m
-CONFIG_HDMI=y
-
-#
-# Console display driver support
-#
-CONFIG_VGA_CONSOLE=y
-CONFIG_VGACON_SOFT_SCROLLBACK=y
-CONFIG_VGACON_SOFT_SCROLLBACK_SIZE=64
-CONFIG_DUMMY_CONSOLE=y
-CONFIG_DUMMY_CONSOLE_COLUMNS=80
-CONFIG_DUMMY_CONSOLE_ROWS=25
-CONFIG_FRAMEBUFFER_CONSOLE=y
-CONFIG_FRAMEBUFFER_CONSOLE_DETECT_PRIMARY=y
-CONFIG_FRAMEBUFFER_CONSOLE_ROTATION=y
-CONFIG_LOGO=y
-# CONFIG_LOGO_LINUX_MONO is not set
-# CONFIG_LOGO_LINUX_VGA16 is not set
-CONFIG_LOGO_LINUX_CLUT224=y
-CONFIG_SOUND=m
-CONFIG_SOUND_OSS_CORE=y
-CONFIG_SOUND_OSS_CORE_PRECLAIM=y
-CONFIG_SND=m
-CONFIG_SND_TIMER=m
-CONFIG_SND_PCM=m
-CONFIG_SND_DMAENGINE_PCM=m
-CONFIG_SND_HWDEP=m
-CONFIG_SND_RAWMIDI=m
-CONFIG_SND_COMPRESS_OFFLOAD=m
-CONFIG_SND_JACK=y
-CONFIG_SND_SEQUENCER=m
-CONFIG_SND_SEQ_DUMMY=m
-CONFIG_SND_OSSEMUL=y
-CONFIG_SND_MIXER_OSS=m
-CONFIG_SND_PCM_OSS=m
-CONFIG_SND_PCM_OSS_PLUGINS=y
-CONFIG_SND_PCM_TIMER=y
-CONFIG_SND_SEQUENCER_OSS=y
-CONFIG_SND_HRTIMER=m
-CONFIG_SND_SEQ_HRTIMER_DEFAULT=y
-CONFIG_SND_DYNAMIC_MINORS=y
-CONFIG_SND_MAX_CARDS=32
-# CONFIG_SND_SUPPORT_OLD_API is not set
-CONFIG_SND_PROC_FS=y
-CONFIG_SND_VERBOSE_PROCFS=y
-CONFIG_SND_VERBOSE_PRINTK=y
-CONFIG_SND_DEBUG=y
-# CONFIG_SND_DEBUG_VERBOSE is not set
-CONFIG_SND_PCM_XRUN_DEBUG=y
-CONFIG_SND_VMASTER=y
-CONFIG_SND_DMA_SGBUF=y
-CONFIG_SND_RAWMIDI_SEQ=m
-CONFIG_SND_OPL3_LIB_SEQ=m
-# CONFIG_SND_OPL4_LIB_SEQ is not set
-# CONFIG_SND_SBAWE_SEQ is not set
-CONFIG_SND_EMU10K1_SEQ=m
-CONFIG_SND_MPU401_UART=m
-CONFIG_SND_OPL3_LIB=m
-CONFIG_SND_VX_LIB=m
-CONFIG_SND_AC97_CODEC=m
-CONFIG_SND_DRIVERS=y
-CONFIG_SND_PCSP=m
-CONFIG_SND_DUMMY=m
-CONFIG_SND_ALOOP=m
-CONFIG_SND_VIRMIDI=m
-CONFIG_SND_MTPAV=m
-CONFIG_SND_MTS64=m
-CONFIG_SND_SERIAL_U16550=m
-CONFIG_SND_MPU401=m
-CONFIG_SND_PORTMAN2X4=m
-CONFIG_SND_AC97_POWER_SAVE=y
-CONFIG_SND_AC97_POWER_SAVE_DEFAULT=0
-CONFIG_SND_SB_COMMON=m
-CONFIG_SND_PCI=y
-CONFIG_SND_AD1889=m
-CONFIG_SND_ALS300=m
-CONFIG_SND_ALS4000=m
-CONFIG_SND_ALI5451=m
-CONFIG_SND_ASIHPI=m
-CONFIG_SND_ATIIXP=m
-CONFIG_SND_ATIIXP_MODEM=m
-CONFIG_SND_AU8810=m
-CONFIG_SND_AU8820=m
-CONFIG_SND_AU8830=m
-# CONFIG_SND_AW2 is not set
-CONFIG_SND_AZT3328=m
-CONFIG_SND_BT87X=m
-# CONFIG_SND_BT87X_OVERCLOCK is not set
-CONFIG_SND_CA0106=m
-CONFIG_SND_CMIPCI=m
-CONFIG_SND_OXYGEN_LIB=m
-CONFIG_SND_OXYGEN=m
-CONFIG_SND_CS4281=m
-CONFIG_SND_CS46XX=m
-CONFIG_SND_CS46XX_NEW_DSP=y
-# CONFIG_SND_CS5530 is not set
-# CONFIG_SND_CS5535AUDIO is not set
-CONFIG_SND_CTXFI=m
-CONFIG_SND_DARLA20=m
-CONFIG_SND_GINA20=m
-CONFIG_SND_LAYLA20=m
-CONFIG_SND_DARLA24=m
-CONFIG_SND_GINA24=m
-CONFIG_SND_LAYLA24=m
-CONFIG_SND_MONA=m
-CONFIG_SND_MIA=m
-CONFIG_SND_ECHO3G=m
-CONFIG_SND_INDIGO=m
-CONFIG_SND_INDIGOIO=m
-CONFIG_SND_INDIGODJ=m
-CONFIG_SND_INDIGOIOX=m
-CONFIG_SND_INDIGODJX=m
-CONFIG_SND_EMU10K1=m
-CONFIG_SND_EMU10K1X=m
-CONFIG_SND_ENS1370=m
-CONFIG_SND_ENS1371=m
-CONFIG_SND_ES1938=m
-CONFIG_SND_ES1968=m
-CONFIG_SND_ES1968_INPUT=y
-CONFIG_SND_ES1968_RADIO=y
-CONFIG_SND_FM801=m
-CONFIG_SND_FM801_TEA575X_BOOL=y
-CONFIG_SND_HDSP=m
-CONFIG_SND_HDSPM=m
-CONFIG_SND_ICE1712=m
-CONFIG_SND_ICE1724=m
-CONFIG_SND_INTEL8X0=m
-CONFIG_SND_INTEL8X0M=m
-CONFIG_SND_KORG1212=m
-CONFIG_SND_LOLA=m
-CONFIG_SND_LX6464ES=m
-CONFIG_SND_MAESTRO3=m
-CONFIG_SND_MAESTRO3_INPUT=y
-CONFIG_SND_MIXART=m
-CONFIG_SND_NM256=m
-CONFIG_SND_PCXHR=m
-CONFIG_SND_RIPTIDE=m
-CONFIG_SND_RME32=m
-CONFIG_SND_RME96=m
-CONFIG_SND_RME9652=m
-CONFIG_SND_SONICVIBES=m
-CONFIG_SND_TRIDENT=m
-CONFIG_SND_VIA82XX=m
-CONFIG_SND_VIA82XX_MODEM=m
-CONFIG_SND_VIRTUOSO=m
-CONFIG_SND_VX222=m
-CONFIG_SND_YMFPCI=m
-
-#
-# HD-Audio
-#
-CONFIG_SND_HDA=m
-CONFIG_SND_HDA_INTEL=m
-CONFIG_SND_HDA_HWDEP=y
-CONFIG_SND_HDA_RECONFIG=y
-CONFIG_SND_HDA_INPUT_BEEP=y
-CONFIG_SND_HDA_INPUT_BEEP_MODE=0
-CONFIG_SND_HDA_PATCH_LOADER=y
-CONFIG_SND_HDA_CODEC_REALTEK=m
-CONFIG_SND_HDA_CODEC_ANALOG=m
-CONFIG_SND_HDA_CODEC_SIGMATEL=m
-CONFIG_SND_HDA_CODEC_VIA=m
-CONFIG_SND_HDA_CODEC_HDMI=m
-CONFIG_SND_HDA_CODEC_CIRRUS=m
-CONFIG_SND_HDA_CODEC_CONEXANT=m
-CONFIG_SND_HDA_CODEC_CA0110=m
-CONFIG_SND_HDA_CODEC_CA0132=m
-CONFIG_SND_HDA_CODEC_CA0132_DSP=y
-CONFIG_SND_HDA_CODEC_CMEDIA=m
-CONFIG_SND_HDA_CODEC_SI3054=m
-CONFIG_SND_HDA_GENERIC=m
-CONFIG_SND_HDA_POWER_SAVE_DEFAULT=0
-CONFIG_SND_HDA_CORE=m
-CONFIG_SND_HDA_DSP_LOADER=y
-CONFIG_SND_HDA_I915=y
-CONFIG_SND_HDA_EXT_CORE=m
-CONFIG_SND_HDA_PREALLOC_SIZE=4096
-# CONFIG_SND_SPI is not set
-CONFIG_SND_USB=y
-CONFIG_SND_USB_AUDIO=m
-CONFIG_SND_USB_UA101=m
-CONFIG_SND_USB_USX2Y=m
-CONFIG_SND_USB_CAIAQ=m
-CONFIG_SND_USB_CAIAQ_INPUT=y
-CONFIG_SND_USB_US122L=m
-CONFIG_SND_USB_6FIRE=m
-CONFIG_SND_USB_HIFACE=m
-# CONFIG_SND_BCD2000 is not set
-CONFIG_SND_USB_LINE6=m
-CONFIG_SND_USB_POD=m
-CONFIG_SND_USB_PODHD=m
-CONFIG_SND_USB_TONEPORT=m
-CONFIG_SND_USB_VARIAX=m
-CONFIG_SND_FIREWIRE=y
-CONFIG_SND_FIREWIRE_LIB=m
-CONFIG_SND_DICE=m
-CONFIG_SND_OXFW=m
-CONFIG_SND_ISIGHT=m
-# CONFIG_SND_SCS1X is not set
-CONFIG_SND_FIREWORKS=m
-CONFIG_SND_BEBOB=m
-CONFIG_SND_FIREWIRE_DIGI00X=m
-CONFIG_SND_FIREWIRE_TASCAM=m
-# CONFIG_SND_PCMCIA is not set
-CONFIG_SND_SOC=m
-CONFIG_SND_SOC_AC97_BUS=y
-CONFIG_SND_SOC_GENERIC_DMAENGINE_PCM=y
-CONFIG_SND_SOC_COMPRESS=y
-CONFIG_SND_SOC_TOPOLOGY=y
-# CONFIG_SND_SOC_ADI is not set
-# CONFIG_SND_ATMEL_SOC is not set
-# CONFIG_SND_BCM2835_SOC_I2S is not set
-# CONFIG_SND_EP93XX_SOC is not set
-CONFIG_SND_DESIGNWARE_I2S=m
-
-#
-# SoC Audio for Freescale CPUs
-#
-
-#
-# Common SoC Audio options for Freescale CPUs:
-#
-# CONFIG_SND_SOC_FSL_ASRC is not set
-# CONFIG_SND_SOC_FSL_SAI is not set
-# CONFIG_SND_SOC_FSL_SSI is not set
-# CONFIG_SND_SOC_FSL_SPDIF is not set
-# CONFIG_SND_SOC_FSL_ESAI is not set
-# CONFIG_SND_SOC_IMX_AUDMUX is not set
-# CONFIG_SND_IMX_SOC is not set
-# CONFIG_SND_JZ4740_SOC is not set
-# CONFIG_SND_KIRKWOOD_SOC is not set
-CONFIG_SND_SST_MFLD_PLATFORM=m
-CONFIG_SND_SST_IPC=m
-CONFIG_SND_SST_IPC_ACPI=m
-CONFIG_SND_SOC_INTEL_SST=m
-CONFIG_SND_SOC_INTEL_SST_ACPI=m
-CONFIG_SND_SOC_INTEL_HASWELL=m
-CONFIG_SND_SOC_INTEL_HASWELL_MACH=m
-# CONFIG_SND_SOC_INTEL_BYT_RT5640_MACH is not set
-# CONFIG_SND_SOC_INTEL_BYT_MAX98090_MACH is not set
-CONFIG_SND_SOC_INTEL_BROADWELL_MACH=m
-CONFIG_SND_SOC_INTEL_BYTCR_RT5640_MACH=m
-CONFIG_SND_SOC_INTEL_CHT_BSW_RT5672_MACH=m
-CONFIG_SND_SOC_INTEL_CHT_BSW_RT5645_MACH=m
-CONFIG_SND_SOC_INTEL_CHT_BSW_MAX98090_TI_MACH=m
-CONFIG_SND_SOC_INTEL_SKYLAKE=m
-CONFIG_SND_SOC_INTEL_SKL_RT286_MACH=m
-# CONFIG_SND_MXS_SOC is not set
-# CONFIG_SND_SOC_QCOM is not set
-# CONFIG_SND_SOC_ROCKCHIP is not set
-# CONFIG_SND_SOC_SIRF is not set
-# CONFIG_SND_SOC_STI is not set
-
-#
-# Allwinner SoC Audio support
-#
-# CONFIG_SND_SUN4I_CODEC is not set
-# CONFIG_SND_SOC_TEGRA is not set
-# CONFIG_SND_SOC_XTFPGA_I2S is not set
-# CONFIG_ZX296702_SPDIF is not set
-# CONFIG_ZX296702_I2S is not set
-CONFIG_SND_SOC_I2C_AND_SPI=m
-
-#
-# CODEC drivers
-#
-# CONFIG_SND_SOC_ALL_CODECS is not set
-CONFIG_SND_SOC_AC97_CODEC=m
-# CONFIG_SND_SOC_ADAU1701 is not set
-# CONFIG_SND_SOC_AK4104 is not set
-# CONFIG_SND_SOC_AK4554 is not set
-# CONFIG_SND_SOC_AK4613 is not set
-# CONFIG_SND_SOC_AK4642 is not set
-# CONFIG_SND_SOC_AK5386 is not set
-# CONFIG_SND_SOC_ALC5623 is not set
-# CONFIG_SND_SOC_CS35L32 is not set
-# CONFIG_SND_SOC_CS42L51_I2C is not set
-# CONFIG_SND_SOC_CS42L52 is not set
-# CONFIG_SND_SOC_CS42L56 is not set
-# CONFIG_SND_SOC_CS42L73 is not set
-# CONFIG_SND_SOC_CS4265 is not set
-# CONFIG_SND_SOC_CS4270 is not set
-# CONFIG_SND_SOC_CS4271_I2C is not set
-# CONFIG_SND_SOC_CS4271_SPI is not set
-# CONFIG_SND_SOC_CS42XX8_I2C is not set
-# CONFIG_SND_SOC_CS4349 is not set
-CONFIG_SND_SOC_DMIC=m
-# CONFIG_SND_SOC_ES8328 is not set
-# CONFIG_SND_SOC_GTM601 is not set
-CONFIG_SND_SOC_MAX98090=m
-# CONFIG_SND_SOC_PCM1681 is not set
-# CONFIG_SND_SOC_PCM1792A is not set
-# CONFIG_SND_SOC_PCM512x_I2C is not set
-# CONFIG_SND_SOC_PCM512x_SPI is not set
-CONFIG_SND_SOC_RL6231=m
-CONFIG_SND_SOC_RL6347A=m
-CONFIG_SND_SOC_RT286=m
-# CONFIG_SND_SOC_RT5631 is not set
-CONFIG_SND_SOC_RT5640=m
-CONFIG_SND_SOC_RT5645=m
-CONFIG_SND_SOC_RT5670=m
-# CONFIG_SND_SOC_RT5677_SPI is not set
-# CONFIG_SND_SOC_SGTL5000 is not set
-# CONFIG_SND_SOC_SIRF_AUDIO_CODEC is not set
-CONFIG_SND_SOC_SPDIF=m
-# CONFIG_SND_SOC_SSM2602_SPI is not set
-# CONFIG_SND_SOC_SSM2602_I2C is not set
-CONFIG_SND_SOC_SSM4567=m
-# CONFIG_SND_SOC_STA32X is not set
-# CONFIG_SND_SOC_STA350 is not set
-# CONFIG_SND_SOC_STI_SAS is not set
-# CONFIG_SND_SOC_TAS2552 is not set
-# CONFIG_SND_SOC_TAS5086 is not set
-# CONFIG_SND_SOC_TAS571X is not set
-# CONFIG_SND_SOC_TFA9879 is not set
-# CONFIG_SND_SOC_TLV320AIC23_I2C is not set
-# CONFIG_SND_SOC_TLV320AIC23_SPI is not set
-# CONFIG_SND_SOC_TLV320AIC31XX is not set
-# CONFIG_SND_SOC_TLV320AIC3X is not set
-CONFIG_SND_SOC_TS3A227E=m
-# CONFIG_SND_SOC_WM8510 is not set
-# CONFIG_SND_SOC_WM8523 is not set
-# CONFIG_SND_SOC_WM8580 is not set
-# CONFIG_SND_SOC_WM8711 is not set
-# CONFIG_SND_SOC_WM8728 is not set
-# CONFIG_SND_SOC_WM8731 is not set
-# CONFIG_SND_SOC_WM8737 is not set
-# CONFIG_SND_SOC_WM8741 is not set
-# CONFIG_SND_SOC_WM8750 is not set
-# CONFIG_SND_SOC_WM8753 is not set
-# CONFIG_SND_SOC_WM8770 is not set
-# CONFIG_SND_SOC_WM8776 is not set
-# CONFIG_SND_SOC_WM8804_I2C is not set
-# CONFIG_SND_SOC_WM8804_SPI is not set
-# CONFIG_SND_SOC_WM8903 is not set
-# CONFIG_SND_SOC_WM8962 is not set
-# CONFIG_SND_SOC_WM8978 is not set
-# CONFIG_SND_SOC_TPA6130A2 is not set
-CONFIG_SND_SIMPLE_CARD=m
-# CONFIG_SOUND_PRIME is not set
-CONFIG_AC97_BUS=m
-
-#
-# HID support
-#
-CONFIG_HID=y
-CONFIG_HID_BATTERY_STRENGTH=y
-CONFIG_HIDRAW=y
-CONFIG_UHID=m
-CONFIG_HID_GENERIC=y
-
-#
-# Special HID drivers
-#
-CONFIG_HID_A4TECH=m
-CONFIG_HID_ACRUX=m
-CONFIG_HID_ACRUX_FF=y
-CONFIG_HID_APPLE=m
-CONFIG_HID_APPLEIR=m
-CONFIG_HID_AUREAL=m
-CONFIG_HID_BELKIN=m
-CONFIG_HID_BETOP_FF=m
-CONFIG_HID_CHERRY=m
-CONFIG_HID_CHICONY=m
-CONFIG_HID_CORSAIR=m
-CONFIG_HID_PRODIKEYS=m
-# CONFIG_HID_CP2112 is not set
-CONFIG_HID_CYPRESS=m
-CONFIG_HID_DRAGONRISE=m
-CONFIG_DRAGONRISE_FF=y
-CONFIG_HID_EMS_FF=m
-CONFIG_HID_ELECOM=m
-CONFIG_HID_ELO=m
-CONFIG_HID_EZKEY=m
-CONFIG_HID_GEMBIRD=m
-CONFIG_HID_GFRM=m
-CONFIG_HID_HOLTEK=m
-CONFIG_HOLTEK_FF=y
-CONFIG_HID_GT683R=m
-CONFIG_HID_KEYTOUCH=m
-CONFIG_HID_KYE=m
-CONFIG_HID_UCLOGIC=m
-CONFIG_HID_WALTOP=m
-CONFIG_HID_GYRATION=m
-CONFIG_HID_ICADE=m
-CONFIG_HID_TWINHAN=m
-CONFIG_HID_KENSINGTON=m
-CONFIG_HID_LCPOWER=m
-CONFIG_HID_LENOVO=m
-CONFIG_HID_LOGITECH=m
-CONFIG_HID_LOGITECH_DJ=m
-CONFIG_HID_LOGITECH_HIDPP=m
-CONFIG_LOGITECH_FF=y
-CONFIG_LOGIRUMBLEPAD2_FF=y
-CONFIG_LOGIG940_FF=y
-CONFIG_LOGIWHEELS_FF=y
-CONFIG_HID_MAGICMOUSE=y
-CONFIG_HID_MICROSOFT=m
-CONFIG_HID_MONTEREY=m
-CONFIG_HID_MULTITOUCH=m
-CONFIG_HID_NTRIG=y
-CONFIG_HID_ORTEK=m
-CONFIG_HID_PANTHERLORD=m
-CONFIG_PANTHERLORD_FF=y
-CONFIG_HID_PENMOUNT=m
-CONFIG_HID_PETALYNX=m
-CONFIG_HID_PICOLCD=m
-CONFIG_HID_PICOLCD_FB=y
-CONFIG_HID_PICOLCD_BACKLIGHT=y
-CONFIG_HID_PICOLCD_LCD=y
-CONFIG_HID_PICOLCD_LEDS=y
-CONFIG_HID_PICOLCD_CIR=y
-CONFIG_HID_PLANTRONICS=m
-CONFIG_HID_PRIMAX=m
-CONFIG_HID_ROCCAT=m
-CONFIG_HID_SAITEK=m
-CONFIG_HID_SAMSUNG=m
-CONFIG_HID_SONY=m
-CONFIG_SONY_FF=y
-CONFIG_HID_SPEEDLINK=m
-CONFIG_HID_STEELSERIES=m
-CONFIG_HID_SUNPLUS=m
-CONFIG_HID_RMI=m
-CONFIG_HID_GREENASIA=m
-CONFIG_GREENASIA_FF=y
-CONFIG_HID_HYPERV_MOUSE=m
-CONFIG_HID_SMARTJOYPLUS=m
-CONFIG_SMARTJOYPLUS_FF=y
-CONFIG_HID_TIVO=m
-CONFIG_HID_TOPSEED=m
-CONFIG_HID_THINGM=m
-CONFIG_HID_THRUSTMASTER=m
-CONFIG_THRUSTMASTER_FF=y
-CONFIG_HID_WACOM=m
-CONFIG_HID_WIIMOTE=m
-CONFIG_HID_XINMO=m
-CONFIG_HID_ZEROPLUS=m
-CONFIG_ZEROPLUS_FF=y
-CONFIG_HID_ZYDACRON=m
-CONFIG_HID_SENSOR_HUB=m
-# CONFIG_HID_SENSOR_CUSTOM_SENSOR is not set
-
-#
-# USB HID support
-#
-CONFIG_USB_HID=y
-CONFIG_HID_PID=y
-CONFIG_USB_HIDDEV=y
-
-#
-# I2C HID support
-#
-CONFIG_I2C_HID=m
-CONFIG_USB_OHCI_LITTLE_ENDIAN=y
-CONFIG_USB_SUPPORT=y
-CONFIG_USB_COMMON=y
-CONFIG_USB_ARCH_HAS_HCD=y
-CONFIG_USB=y
-CONFIG_USB_ANNOUNCE_NEW_DEVICES=y
-
-#
-# Miscellaneous USB options
-#
-CONFIG_USB_DEFAULT_PERSIST=y
-# CONFIG_USB_DYNAMIC_MINORS is not set
-# CONFIG_USB_OTG is not set
-# CONFIG_USB_OTG_WHITELIST is not set
-CONFIG_USB_ULPI_BUS=m
-CONFIG_USB_MON=y
-CONFIG_USB_WUSB=m
-CONFIG_USB_WUSB_CBAF=m
-# CONFIG_USB_WUSB_CBAF_DEBUG is not set
-
-#
-# USB Host Controller Drivers
-#
-# CONFIG_USB_C67X00_HCD is not set
-CONFIG_USB_XHCI_HCD=y
-CONFIG_USB_XHCI_PCI=y
-# CONFIG_USB_XHCI_PLATFORM is not set
-# CONFIG_USB_XHCI_MVEBU is not set
-# CONFIG_USB_XHCI_RCAR is not set
-CONFIG_USB_EHCI_HCD=y
-CONFIG_USB_EHCI_ROOT_HUB_TT=y
-CONFIG_USB_EHCI_TT_NEWSCHED=y
-CONFIG_USB_EHCI_PCI=y
-# CONFIG_USB_EHCI_HCD_PLATFORM is not set
-# CONFIG_USB_OXU210HP_HCD is not set
-# CONFIG_USB_ISP116X_HCD is not set
-CONFIG_USB_ISP1362_HCD=m
-# CONFIG_USB_FOTG210_HCD is not set
-# CONFIG_USB_MAX3421_HCD is not set
-CONFIG_USB_OHCI_HCD=y
-CONFIG_USB_OHCI_HCD_PCI=y
-# CONFIG_USB_OHCI_HCD_PLATFORM is not set
-CONFIG_USB_UHCI_HCD=y
-# CONFIG_USB_U132_HCD is not set
-CONFIG_USB_SL811_HCD=m
-CONFIG_USB_SL811_HCD_ISO=y
-# CONFIG_USB_SL811_CS is not set
-# CONFIG_USB_R8A66597_HCD is not set
-# CONFIG_USB_WHCI_HCD is not set
-CONFIG_USB_HWA_HCD=m
-# CONFIG_USB_HCD_BCMA is not set
-# CONFIG_USB_HCD_SSB is not set
-# CONFIG_USB_HCD_TEST_MODE is not set
-
-#
-# USB Device Class drivers
-#
-CONFIG_USB_ACM=m
-CONFIG_USB_PRINTER=m
-CONFIG_USB_WDM=m
-CONFIG_USB_TMC=m
-
-#
-# NOTE: USB_STORAGE depends on SCSI but BLK_DEV_SD may
-#
-
-#
-# also be needed; see USB_STORAGE Help for more info
-#
-CONFIG_USB_STORAGE=m
-# CONFIG_USB_STORAGE_DEBUG is not set
-CONFIG_USB_STORAGE_REALTEK=m
-CONFIG_REALTEK_AUTOPM=y
-CONFIG_USB_STORAGE_DATAFAB=m
-CONFIG_USB_STORAGE_FREECOM=m
-CONFIG_USB_STORAGE_ISD200=m
-CONFIG_USB_STORAGE_USBAT=m
-CONFIG_USB_STORAGE_SDDR09=m
-CONFIG_USB_STORAGE_SDDR55=m
-CONFIG_USB_STORAGE_JUMPSHOT=m
-CONFIG_USB_STORAGE_ALAUDA=m
-CONFIG_USB_STORAGE_ONETOUCH=m
-CONFIG_USB_STORAGE_KARMA=m
-CONFIG_USB_STORAGE_CYPRESS_ATACB=m
-CONFIG_USB_STORAGE_ENE_UB6250=m
-CONFIG_USB_UAS=m
-
-#
-# USB Imaging devices
-#
-CONFIG_USB_MDC800=m
-CONFIG_USB_MICROTEK=m
-CONFIG_USBIP_CORE=m
-CONFIG_USBIP_VHCI_HCD=m
-CONFIG_USBIP_HOST=m
-# CONFIG_USBIP_DEBUG is not set
-# CONFIG_USB_MUSB_HDRC is not set
-# CONFIG_USB_DWC3 is not set
-# CONFIG_USB_DWC2 is not set
-# CONFIG_USB_CHIPIDEA is not set
-# CONFIG_USB_ISP1760 is not set
-
-#
-# USB port drivers
-#
-CONFIG_USB_USS720=m
-CONFIG_USB_SERIAL=y
-CONFIG_USB_SERIAL_CONSOLE=y
-CONFIG_USB_SERIAL_GENERIC=y
-CONFIG_USB_SERIAL_SIMPLE=m
-CONFIG_USB_SERIAL_AIRCABLE=m
-CONFIG_USB_SERIAL_ARK3116=m
-CONFIG_USB_SERIAL_BELKIN=m
-CONFIG_USB_SERIAL_CH341=m
-CONFIG_USB_SERIAL_WHITEHEAT=m
-CONFIG_USB_SERIAL_DIGI_ACCELEPORT=m
-CONFIG_USB_SERIAL_CP210X=m
-CONFIG_USB_SERIAL_CYPRESS_M8=m
-CONFIG_USB_SERIAL_EMPEG=m
-CONFIG_USB_SERIAL_FTDI_SIO=m
-CONFIG_USB_SERIAL_VISOR=m
-CONFIG_USB_SERIAL_IPAQ=m
-CONFIG_USB_SERIAL_IR=m
-CONFIG_USB_SERIAL_EDGEPORT=m
-CONFIG_USB_SERIAL_EDGEPORT_TI=m
-# CONFIG_USB_SERIAL_F81232 is not set
-CONFIG_USB_SERIAL_GARMIN=m
-CONFIG_USB_SERIAL_IPW=m
-CONFIG_USB_SERIAL_IUU=m
-CONFIG_USB_SERIAL_KEYSPAN_PDA=m
-CONFIG_USB_SERIAL_KEYSPAN=m
-CONFIG_USB_SERIAL_KLSI=m
-CONFIG_USB_SERIAL_KOBIL_SCT=m
-CONFIG_USB_SERIAL_MCT_U232=m
-# CONFIG_USB_SERIAL_METRO is not set
-CONFIG_USB_SERIAL_MOS7720=m
-CONFIG_USB_SERIAL_MOS7715_PARPORT=y
-CONFIG_USB_SERIAL_MOS7840=m
-# CONFIG_USB_SERIAL_MXUPORT is not set
-CONFIG_USB_SERIAL_NAVMAN=m
-CONFIG_USB_SERIAL_PL2303=m
-CONFIG_USB_SERIAL_OTI6858=m
-CONFIG_USB_SERIAL_QCAUX=m
-CONFIG_USB_SERIAL_QUALCOMM=m
-CONFIG_USB_SERIAL_SPCP8X5=m
-CONFIG_USB_SERIAL_SAFE=m
-CONFIG_USB_SERIAL_SAFE_PADDED=y
-CONFIG_USB_SERIAL_SIERRAWIRELESS=m
-CONFIG_USB_SERIAL_SYMBOL=m
-CONFIG_USB_SERIAL_TI=m
-CONFIG_USB_SERIAL_CYBERJACK=m
-CONFIG_USB_SERIAL_XIRCOM=m
-CONFIG_USB_SERIAL_WWAN=m
-CONFIG_USB_SERIAL_OPTION=m
-CONFIG_USB_SERIAL_OMNINET=m
-CONFIG_USB_SERIAL_OPTICON=m
-CONFIG_USB_SERIAL_XSENS_MT=m
-# CONFIG_USB_SERIAL_WISHBONE is not set
-CONFIG_USB_SERIAL_SSU100=m
-CONFIG_USB_SERIAL_QT2=m
-CONFIG_USB_SERIAL_DEBUG=m
-
-#
-# USB Miscellaneous drivers
-#
-CONFIG_USB_EMI62=m
-CONFIG_USB_EMI26=m
-CONFIG_USB_ADUTUX=m
-CONFIG_USB_SEVSEG=m
-# CONFIG_USB_RIO500 is not set
-CONFIG_USB_LEGOTOWER=m
-CONFIG_USB_LCD=m
-CONFIG_USB_LED=m
-# CONFIG_USB_CYPRESS_CY7C63 is not set
-# CONFIG_USB_CYTHERM is not set
-CONFIG_USB_IDMOUSE=m
-CONFIG_USB_FTDI_ELAN=m
-CONFIG_USB_APPLEDISPLAY=m
-CONFIG_USB_SISUSBVGA=m
-CONFIG_USB_SISUSBVGA_CON=y
-CONFIG_USB_LD=m
-CONFIG_USB_TRANCEVIBRATOR=m
-CONFIG_USB_IOWARRIOR=m
-# CONFIG_USB_TEST is not set
-# CONFIG_USB_EHSET_TEST_FIXTURE is not set
-CONFIG_USB_ISIGHTFW=m
-CONFIG_USB_YUREX=m
-CONFIG_USB_EZUSB_FX2=m
-CONFIG_USB_HSIC_USB3503=m
-# CONFIG_USB_LINK_LAYER_TEST is not set
-CONFIG_USB_CHAOSKEY=m
-CONFIG_USB_ATM=m
-CONFIG_USB_SPEEDTOUCH=m
-CONFIG_USB_CXACRU=m
-CONFIG_USB_UEAGLEATM=m
-CONFIG_USB_XUSBATM=m
-
-#
-# USB Physical Layer drivers
-#
-CONFIG_USB_PHY=y
-# CONFIG_KEYSTONE_USB_PHY is not set
-CONFIG_NOP_USB_XCEIV=m
-# CONFIG_AM335X_PHY_USB is not set
-# CONFIG_USB_GPIO_VBUS is not set
-# CONFIG_USB_ISP1301 is not set
-# CONFIG_USB_RCAR_PHY is not set
-# CONFIG_USB_GADGET is not set
-CONFIG_USB_LED_TRIG=y
-CONFIG_UWB=m
-CONFIG_UWB_HWA=m
-CONFIG_UWB_WHCI=m
-CONFIG_UWB_I1480U=m
-CONFIG_MMC=m
-# CONFIG_MMC_DEBUG is not set
-
-#
-# MMC/SD/SDIO Card Drivers
-#
-CONFIG_MMC_BLOCK=m
-CONFIG_MMC_BLOCK_MINORS=8
-CONFIG_MMC_BLOCK_BOUNCE=y
-CONFIG_SDIO_UART=m
-# CONFIG_MMC_TEST is not set
-
-#
-# MMC/SD/SDIO Host Controller Drivers
-#
-CONFIG_MMC_SDHCI=m
-CONFIG_MMC_SDHCI_PCI=m
-CONFIG_MMC_RICOH_MMC=y
-CONFIG_MMC_SDHCI_ACPI=m
-CONFIG_MMC_SDHCI_PLTFM=m
-# CONFIG_MMC_SDHCI_PXAV3 is not set
-# CONFIG_MMC_SDHCI_PXAV2 is not set
-# CONFIG_MMC_SDHCI_IPROC is not set
-# CONFIG_MMC_OMAP_HS is not set
-CONFIG_MMC_WBSD=m
-CONFIG_MMC_TIFM_SD=m
-# CONFIG_MMC_GOLDFISH is not set
-# CONFIG_MMC_SPI is not set
-CONFIG_MMC_SDRICOH_CS=m
-CONFIG_MMC_CB710=m
-CONFIG_MMC_VIA_SDMMC=m
-# CONFIG_MMC_DW is not set
-# CONFIG_MMC_SH_MMCIF is not set
-CONFIG_MMC_VUB300=m
-CONFIG_MMC_USHC=m
-# CONFIG_MMC_USDHI6ROL0 is not set
-CONFIG_MMC_REALTEK_PCI=m
-CONFIG_MMC_REALTEK_USB=m
-CONFIG_MMC_TOSHIBA_PCI=m
-CONFIG_MMC_MTK=m
-CONFIG_MEMSTICK=m
-# CONFIG_MEMSTICK_DEBUG is not set
-
-#
-# MemoryStick drivers
-#
-# CONFIG_MEMSTICK_UNSAFE_RESUME is not set
-CONFIG_MSPRO_BLOCK=m
-# CONFIG_MS_BLOCK is not set
-
-#
-# MemoryStick Host Controller Drivers
-#
-CONFIG_MEMSTICK_TIFM_MS=m
-CONFIG_MEMSTICK_JMICRON_38X=m
-CONFIG_MEMSTICK_R592=m
-CONFIG_MEMSTICK_REALTEK_PCI=m
-CONFIG_MEMSTICK_REALTEK_USB=m
-CONFIG_NEW_LEDS=y
-CONFIG_LEDS_CLASS=y
-CONFIG_LEDS_CLASS_FLASH=m
-
-#
-# LED drivers
-#
-CONFIG_LEDS_LM3530=m
-# CONFIG_LEDS_LM3642 is not set
-# CONFIG_LEDS_PCA9532 is not set
-# CONFIG_LEDS_GPIO is not set
-CONFIG_LEDS_LP3944=m
-# CONFIG_LEDS_LP5521 is not set
-# CONFIG_LEDS_LP5523 is not set
-# CONFIG_LEDS_LP5562 is not set
-# CONFIG_LEDS_LP8501 is not set
-# CONFIG_LEDS_LP8860 is not set
-CONFIG_LEDS_CLEVO_MAIL=m
-# CONFIG_LEDS_PCA955X is not set
-# CONFIG_LEDS_PCA963X is not set
-# CONFIG_LEDS_DAC124S085 is not set
-# CONFIG_LEDS_PWM is not set
-# CONFIG_LEDS_BD2802 is not set
-CONFIG_LEDS_INTEL_SS4200=m
-CONFIG_LEDS_LT3593=m
-CONFIG_LEDS_DELL_NETBOOKS=m
-# CONFIG_LEDS_TCA6507 is not set
-# CONFIG_LEDS_TLC591XX is not set
-# CONFIG_LEDS_LM355x is not set
-# CONFIG_LEDS_OT200 is not set
-
-#
-# LED driver for blink(1) USB RGB LED is under Special HID drivers (HID_THINGM)
-#
-CONFIG_LEDS_BLINKM=m
-
-#
-# LED Triggers
-#
-CONFIG_LEDS_TRIGGERS=y
-CONFIG_LEDS_TRIGGER_TIMER=m
-CONFIG_LEDS_TRIGGER_ONESHOT=m
-CONFIG_LEDS_TRIGGER_HEARTBEAT=m
-CONFIG_LEDS_TRIGGER_BACKLIGHT=m
-# CONFIG_LEDS_TRIGGER_CPU is not set
-CONFIG_LEDS_TRIGGER_GPIO=m
-CONFIG_LEDS_TRIGGER_DEFAULT_ON=m
-
-#
-# iptables trigger is under Netfilter config (LED target)
-#
-CONFIG_LEDS_TRIGGER_TRANSIENT=m
-CONFIG_LEDS_TRIGGER_CAMERA=m
-CONFIG_ACCESSIBILITY=y
-CONFIG_A11Y_BRAILLE_CONSOLE=y
-CONFIG_INFINIBAND=m
-CONFIG_INFINIBAND_USER_MAD=m
-CONFIG_INFINIBAND_USER_ACCESS=m
-CONFIG_INFINIBAND_USER_MEM=y
-CONFIG_INFINIBAND_ON_DEMAND_PAGING=y
-CONFIG_INFINIBAND_ADDR_TRANS=y
-CONFIG_INFINIBAND_MTHCA=m
-CONFIG_INFINIBAND_MTHCA_DEBUG=y
-CONFIG_INFINIBAND_QIB=m
-CONFIG_INFINIBAND_QIB_DCA=y
-CONFIG_INFINIBAND_CXGB3=m
-# CONFIG_INFINIBAND_CXGB3_DEBUG is not set
-CONFIG_INFINIBAND_CXGB4=m
-CONFIG_MLX4_INFINIBAND=m
-CONFIG_MLX5_INFINIBAND=m
-CONFIG_INFINIBAND_NES=m
-# CONFIG_INFINIBAND_NES_DEBUG is not set
-CONFIG_INFINIBAND_OCRDMA=m
-CONFIG_INFINIBAND_USNIC=m
-CONFIG_INFINIBAND_IPOIB=m
-CONFIG_INFINIBAND_IPOIB_CM=y
-CONFIG_INFINIBAND_IPOIB_DEBUG=y
-CONFIG_INFINIBAND_IPOIB_DEBUG_DATA=y
-CONFIG_INFINIBAND_SRP=m
-CONFIG_INFINIBAND_SRPT=m
-CONFIG_INFINIBAND_ISER=m
-CONFIG_INFINIBAND_ISERT=m
-CONFIG_EDAC_ATOMIC_SCRUB=y
-CONFIG_EDAC_SUPPORT=y
-CONFIG_EDAC=y
-CONFIG_EDAC_LEGACY_SYSFS=y
-# CONFIG_EDAC_DEBUG is not set
-CONFIG_EDAC_DECODE_MCE=m
-CONFIG_EDAC_MM_EDAC=m
-CONFIG_EDAC_AMD64=m
-# CONFIG_EDAC_AMD64_ERROR_INJECTION is not set
-CONFIG_EDAC_E752X=m
-CONFIG_EDAC_I82975X=m
-CONFIG_EDAC_I3000=m
-CONFIG_EDAC_I3200=m
-CONFIG_EDAC_IE31200=m
-CONFIG_EDAC_X38=m
-CONFIG_EDAC_I5400=m
-CONFIG_EDAC_I7CORE=m
-CONFIG_EDAC_I5000=m
-CONFIG_EDAC_I5100=m
-CONFIG_EDAC_I7300=m
-CONFIG_EDAC_SBRIDGE=m
-# CONFIG_EDAC_XGENE is not set
-CONFIG_RTC_LIB=y
-CONFIG_RTC_CLASS=y
-CONFIG_RTC_HCTOSYS=y
-CONFIG_RTC_HCTOSYS_DEVICE="rtc0"
-# CONFIG_RTC_SYSTOHC is not set
-# CONFIG_RTC_DEBUG is not set
-
-#
-# RTC interfaces
-#
-CONFIG_RTC_INTF_SYSFS=y
-CONFIG_RTC_INTF_PROC=y
-CONFIG_RTC_INTF_DEV=y
-# CONFIG_RTC_INTF_DEV_UIE_EMUL is not set
-# CONFIG_RTC_DRV_TEST is not set
-
-#
-# I2C RTC drivers
-#
-# CONFIG_RTC_DRV_ABB5ZES3 is not set
-CONFIG_RTC_DRV_ABX80X=m
-CONFIG_RTC_DRV_DS1307=m
-CONFIG_RTC_DRV_DS1374=m
-CONFIG_RTC_DRV_DS1374_WDT=y
-CONFIG_RTC_DRV_DS1672=m
-CONFIG_RTC_DRV_DS3232=m
-CONFIG_RTC_DRV_MAX6900=m
-CONFIG_RTC_DRV_RS5C372=m
-CONFIG_RTC_DRV_ISL1208=m
-CONFIG_RTC_DRV_ISL12022=m
-# CONFIG_RTC_DRV_ISL12057 is not set
-CONFIG_RTC_DRV_X1205=m
-CONFIG_RTC_DRV_PCF2127=m
-CONFIG_RTC_DRV_PCF8523=m
-CONFIG_RTC_DRV_PCF8563=m
-CONFIG_RTC_DRV_PCF85063=m
-CONFIG_RTC_DRV_PCF8583=m
-CONFIG_RTC_DRV_M41T80=m
-CONFIG_RTC_DRV_M41T80_WDT=y
-CONFIG_RTC_DRV_BQ32K=m
-# CONFIG_RTC_DRV_S35390A is not set
-CONFIG_RTC_DRV_FM3130=m
-CONFIG_RTC_DRV_RX8581=m
-CONFIG_RTC_DRV_RX8025=m
-CONFIG_RTC_DRV_EM3027=m
-CONFIG_RTC_DRV_RV3029C2=m
-# CONFIG_RTC_DRV_RV8803 is not set
-
-#
-# SPI RTC drivers
-#
-CONFIG_RTC_DRV_M41T93=m
-CONFIG_RTC_DRV_M41T94=m
-CONFIG_RTC_DRV_DS1305=m
-CONFIG_RTC_DRV_DS1343=m
-CONFIG_RTC_DRV_DS1347=m
-CONFIG_RTC_DRV_DS1390=m
-CONFIG_RTC_DRV_MAX6902=m
-CONFIG_RTC_DRV_R9701=m
-CONFIG_RTC_DRV_RS5C348=m
-# CONFIG_RTC_DRV_DS3234 is not set
-CONFIG_RTC_DRV_PCF2123=m
-CONFIG_RTC_DRV_RX4581=m
-CONFIG_RTC_DRV_MCP795=m
-
-#
-# Platform RTC drivers
-#
-CONFIG_RTC_DRV_CMOS=y
-CONFIG_RTC_DRV_DS1286=m
-CONFIG_RTC_DRV_DS1511=m
-CONFIG_RTC_DRV_DS1553=m
-CONFIG_RTC_DRV_DS1685_FAMILY=m
-CONFIG_RTC_DRV_DS1685=y
-# CONFIG_RTC_DRV_DS1689 is not set
-# CONFIG_RTC_DRV_DS17285 is not set
-# CONFIG_RTC_DRV_DS17485 is not set
-# CONFIG_RTC_DRV_DS17885 is not set
-# CONFIG_RTC_DS1685_PROC_REGS is not set
-CONFIG_RTC_DS1685_SYSFS_REGS=y
-CONFIG_RTC_DRV_DS1742=m
-CONFIG_RTC_DRV_DS2404=m
-CONFIG_RTC_DRV_STK17TA8=m
-# CONFIG_RTC_DRV_M48T86 is not set
-CONFIG_RTC_DRV_M48T35=m
-CONFIG_RTC_DRV_M48T59=m
-CONFIG_RTC_DRV_MSM6242=m
-CONFIG_RTC_DRV_BQ4802=m
-CONFIG_RTC_DRV_RP5C01=m
-CONFIG_RTC_DRV_V3020=m
-
-#
-# on-CPU RTC drivers
-#
-# CONFIG_RTC_DRV_GEMINI is not set
-# CONFIG_RTC_DRV_MOXART is not set
-# CONFIG_RTC_DRV_MT6397 is not set
-# CONFIG_RTC_DRV_XGENE is not set
-
-#
-# HID Sensor RTC drivers
-#
-# CONFIG_RTC_DRV_HID_SENSOR_TIME is not set
-CONFIG_DMADEVICES=y
-# CONFIG_DMADEVICES_DEBUG is not set
-
-#
-# DMA Devices
-#
-CONFIG_DMA_ENGINE=y
-CONFIG_DMA_VIRTUAL_CHANNELS=y
-CONFIG_DMA_ACPI=y
-# CONFIG_AXI_DMAC is not set
-# CONFIG_DMA_SUN6I is not set
-CONFIG_INTEL_IDMA64=m
-CONFIG_INTEL_IOATDMA=m
-CONFIG_INTEL_MIC_X100_DMA=m
-# CONFIG_NBPFAXI_DMA is not set
-# CONFIG_PCH_DMA is not set
-# CONFIG_XGENE_DMA is not set
-CONFIG_DW_DMAC_CORE=y
-CONFIG_DW_DMAC=y
-CONFIG_DW_DMAC_PCI=m
-CONFIG_HSU_DMA=y
-CONFIG_RENESAS_DMA=y
-CONFIG_SH_DMAE_BASE=y
-# CONFIG_SH_DMAE is not set
-# CONFIG_RCAR_DMAC is not set
-# CONFIG_RCAR_HPB_DMAE is not set
-# CONFIG_RENESAS_USB_DMAC is not set
-# CONFIG_SUDMAC is not set
-
-#
-# DMA Clients
-#
-CONFIG_ASYNC_TX_DMA=y
-# CONFIG_DMATEST is not set
-CONFIG_DMA_ENGINE_RAID=y
-CONFIG_DCA=m
-CONFIG_AUXDISPLAY=y
-CONFIG_KS0108=m
-CONFIG_KS0108_PORT=0x378
-CONFIG_KS0108_DELAY=2
-CONFIG_CFAG12864B=m
-CONFIG_CFAG12864B_RATE=20
-CONFIG_UIO=m
-CONFIG_UIO_CIF=m
-# CONFIG_UIO_PDRV_GENIRQ is not set
-# CONFIG_UIO_DMEM_GENIRQ is not set
-CONFIG_UIO_AEC=m
-CONFIG_UIO_SERCOS3=m
-CONFIG_UIO_PCI_GENERIC=m
-# CONFIG_UIO_NETX is not set
-# CONFIG_UIO_PRUSS is not set
-# CONFIG_UIO_MF624 is not set
-CONFIG_VFIO_IOMMU_TYPE1=m
-CONFIG_VFIO_VIRQFD=m
-CONFIG_VFIO=m
-CONFIG_VFIO_PCI=m
-CONFIG_VFIO_PCI_VGA=y
-CONFIG_VFIO_PCI_MMAP=y
-CONFIG_VFIO_PCI_INTX=y
-CONFIG_IRQ_BYPASS_MANAGER=m
-CONFIG_VIRT_DRIVERS=y
-CONFIG_VIRTIO=y
-
-#
-# Virtio drivers
-#
-CONFIG_VIRTIO_PCI=y
-CONFIG_VIRTIO_PCI_LEGACY=y
-CONFIG_VIRTIO_BALLOON=y
-CONFIG_VIRTIO_INPUT=y
-CONFIG_VIRTIO_MMIO=y
-# CONFIG_VIRTIO_MMIO_CMDLINE_DEVICES is not set
-
-#
-# Microsoft Hyper-V guest support
-#
-CONFIG_HYPERV=y
-CONFIG_HYPERV_UTILS=y
-CONFIG_HYPERV_BALLOON=y
-
-#
-# Xen driver support
-#
-CONFIG_XEN_BALLOON=y
-CONFIG_XEN_SELFBALLOONING=y
-# CONFIG_XEN_BALLOON_MEMORY_HOTPLUG is not set
-CONFIG_XEN_SCRUB_PAGES=y
-CONFIG_XEN_DEV_EVTCHN=m
-CONFIG_XEN_BACKEND=y
-CONFIG_XENFS=m
-CONFIG_XEN_COMPAT_XENFS=y
-CONFIG_XEN_SYS_HYPERVISOR=y
-CONFIG_XEN_XENBUS_FRONTEND=y
-CONFIG_XEN_GNTDEV=m
-CONFIG_XEN_GRANT_DEV_ALLOC=m
-CONFIG_SWIOTLB_XEN=y
-CONFIG_XEN_TMEM=m
-CONFIG_XEN_PCIDEV_BACKEND=m
-CONFIG_XEN_SCSI_BACKEND=m
-CONFIG_XEN_PRIVCMD=m
-CONFIG_XEN_ACPI_PROCESSOR=m
-# CONFIG_XEN_MCE_LOG is not set
-CONFIG_XEN_HAVE_PVMMU=y
-CONFIG_XEN_EFI=y
-CONFIG_XEN_AUTO_XLATE=y
-CONFIG_XEN_ACPI=y
-CONFIG_XEN_SYMS=y
-CONFIG_XEN_HAVE_VPMU=y
-CONFIG_STAGING=y
-# CONFIG_SLICOSS is not set
-# CONFIG_PRISM2_USB is not set
-# CONFIG_COMEDI is not set
-# CONFIG_PANEL is not set
-# CONFIG_RTL8192U is not set
-CONFIG_RTLLIB=m
-CONFIG_RTLLIB_CRYPTO_CCMP=m
-CONFIG_RTLLIB_CRYPTO_TKIP=m
-CONFIG_RTLLIB_CRYPTO_WEP=m
-CONFIG_RTL8192E=m
-CONFIG_R8712U=m
-# CONFIG_R8188EU is not set
-CONFIG_R8723AU=m
-# CONFIG_8723AU_AP_MODE is not set
-# CONFIG_8723AU_BT_COEXIST is not set
-# CONFIG_RTS5208 is not set
-# CONFIG_VT6655 is not set
-# CONFIG_VT6656 is not set
-
-#
-# IIO staging drivers
-#
-
-#
-# Accelerometers
-#
-# CONFIG_ADIS16201 is not set
-# CONFIG_ADIS16203 is not set
-# CONFIG_ADIS16204 is not set
-# CONFIG_ADIS16209 is not set
-# CONFIG_ADIS16220 is not set
-# CONFIG_ADIS16240 is not set
-# CONFIG_LIS3L02DQ is not set
-# CONFIG_SCA3000 is not set
-
-#
-# Analog to digital converters
-#
-# CONFIG_AD7606 is not set
-# CONFIG_AD7780 is not set
-# CONFIG_AD7816 is not set
-# CONFIG_AD7192 is not set
-# CONFIG_AD7280 is not set
-# CONFIG_LPC32XX_ADC is not set
-# CONFIG_MXS_LRADC is not set
-# CONFIG_SPEAR_ADC is not set
-
-#
-# Analog digital bi-direction converters
-#
-# CONFIG_ADT7316 is not set
-
-#
-# Capacitance to digital converters
-#
-# CONFIG_AD7150 is not set
-# CONFIG_AD7152 is not set
-# CONFIG_AD7746 is not set
-
-#
-# Direct Digital Synthesis
-#
-# CONFIG_AD9832 is not set
-# CONFIG_AD9834 is not set
-
-#
-# Digital gyroscope sensors
-#
-# CONFIG_ADIS16060 is not set
-
-#
-# Network Analyzer, Impedance Converters
-#
-# CONFIG_AD5933 is not set
-
-#
-# Light sensors
-#
-# CONFIG_SENSORS_ISL29018 is not set
-# CONFIG_SENSORS_ISL29028 is not set
-# CONFIG_TSL2583 is not set
-# CONFIG_TSL2x7x is not set
-
-#
-# Magnetometer sensors
-#
-# CONFIG_SENSORS_HMC5843_I2C is not set
-# CONFIG_SENSORS_HMC5843_SPI is not set
-
-#
-# Active energy metering IC
-#
-# CONFIG_ADE7753 is not set
-# CONFIG_ADE7754 is not set
-# CONFIG_ADE7758 is not set
-# CONFIG_ADE7759 is not set
-# CONFIG_ADE7854 is not set
-
-#
-# Resolver to digital converters
-#
-# CONFIG_AD2S90 is not set
-# CONFIG_AD2S1200 is not set
-# CONFIG_AD2S1210 is not set
-
-#
-# Triggers - standalone
-#
-# CONFIG_IIO_PERIODIC_RTC_TRIGGER is not set
-# CONFIG_IIO_SIMPLE_DUMMY is not set
-# CONFIG_FB_SM750 is not set
-# CONFIG_FB_XGI is not set
-
-#
-# Speakup console speech
-#
-# CONFIG_SPEAKUP is not set
-# CONFIG_TOUCHSCREEN_SYNAPTICS_I2C_RMI4 is not set
-CONFIG_STAGING_MEDIA=y
-# CONFIG_I2C_BCM2048 is not set
-# CONFIG_DVB_CXD2099 is not set
-# CONFIG_DVB_MN88472 is not set
-# CONFIG_DVB_MN88473 is not set
-CONFIG_LIRC_STAGING=y
-CONFIG_LIRC_BT829=m
-CONFIG_LIRC_IMON=m
-CONFIG_LIRC_PARALLEL=m
-CONFIG_LIRC_SASEM=m
-CONFIG_LIRC_SERIAL=m
-CONFIG_LIRC_SERIAL_TRANSMITTER=y
-CONFIG_LIRC_SIR=m
-CONFIG_LIRC_ZILOG=m
-# CONFIG_STAGING_RDMA is not set
-
-#
-# Android
-#
-# CONFIG_WIMAX_GDM72XX is not set
-# CONFIG_LTE_GDM724X is not set
-# CONFIG_FIREWIRE_SERIAL is not set
-# CONFIG_LUSTRE_FS is not set
-# CONFIG_DGNC is not set
-# CONFIG_DGAP is not set
-# CONFIG_GS_FPGABOOT is not set
-# CONFIG_CRYPTO_SKEIN is not set
-# CONFIG_UNISYSSPAR is not set
-# CONFIG_FB_TFT is not set
-# CONFIG_WILC1000_DRIVER is not set
-# CONFIG_MOST is not set
-CONFIG_X86_PLATFORM_DEVICES=y
-CONFIG_ACER_WMI=m
-CONFIG_ACERHDF=m
-CONFIG_ALIENWARE_WMI=m
-CONFIG_ASUS_LAPTOP=m
-CONFIG_DELL_LAPTOP=m
-CONFIG_DELL_WMI=m
-CONFIG_DELL_WMI_AIO=m
-CONFIG_DELL_SMO8800=m
-CONFIG_DELL_RBTN=m
-CONFIG_FUJITSU_LAPTOP=m
-# CONFIG_FUJITSU_LAPTOP_DEBUG is not set
-CONFIG_FUJITSU_TABLET=m
-CONFIG_AMILO_RFKILL=m
-CONFIG_HP_ACCEL=m
-CONFIG_HP_WIRELESS=m
-CONFIG_HP_WMI=m
-CONFIG_MSI_LAPTOP=m
-CONFIG_PANASONIC_LAPTOP=m
-CONFIG_COMPAL_LAPTOP=m
-CONFIG_SONY_LAPTOP=m
-CONFIG_SONYPI_COMPAT=y
-CONFIG_IDEAPAD_LAPTOP=m
-CONFIG_THINKPAD_ACPI=m
-CONFIG_THINKPAD_ACPI_ALSA_SUPPORT=y
-# CONFIG_THINKPAD_ACPI_DEBUGFACILITIES is not set
-# CONFIG_THINKPAD_ACPI_DEBUG is not set
-# CONFIG_THINKPAD_ACPI_UNSAFE_LEDS is not set
-CONFIG_THINKPAD_ACPI_VIDEO=y
-CONFIG_THINKPAD_ACPI_HOTKEY_POLL=y
-CONFIG_SENSORS_HDAPS=m
-# CONFIG_INTEL_MENLOW is not set
-CONFIG_EEEPC_LAPTOP=m
-CONFIG_ASUS_WMI=m
-CONFIG_ASUS_NB_WMI=m
-CONFIG_EEEPC_WMI=m
-CONFIG_ACPI_WMI=m
-CONFIG_MSI_WMI=m
-CONFIG_TOPSTAR_LAPTOP=m
-CONFIG_ACPI_TOSHIBA=m
-CONFIG_TOSHIBA_BT_RFKILL=m
-CONFIG_TOSHIBA_HAPS=m
-CONFIG_TOSHIBA_WMI=m
-CONFIG_ACPI_CMPC=m
-CONFIG_INTEL_IPS=m
-# CONFIG_IBM_RTL is not set
-# CONFIG_XO1_RFKILL is not set
-# CONFIG_XO15_EBOOK is not set
-CONFIG_SAMSUNG_LAPTOP=m
-CONFIG_MXM_WMI=m
-CONFIG_INTEL_OAKTRAIL=m
-CONFIG_SAMSUNG_Q10=m
-CONFIG_APPLE_GMUX=m
-CONFIG_INTEL_RST=m
-CONFIG_INTEL_SMARTCONNECT=y
-CONFIG_PVPANIC=m
-# CONFIG_INTEL_PMC_IPC is not set
-CONFIG_SURFACE_PRO3_BUTTON=m
-CONFIG_CHROME_PLATFORMS=y
-CONFIG_CHROMEOS_LAPTOP=m
-CONFIG_CHROMEOS_PSTORE=m
-CONFIG_CLKDEV_LOOKUP=y
-CONFIG_HAVE_CLK_PREPARE=y
-CONFIG_COMMON_CLK=y
-
-#
-# Common Clock Framework
-#
-# CONFIG_COMMON_CLK_VERSATILE is not set
-# CONFIG_COMMON_CLK_SCPI is not set
-# CONFIG_COMMON_CLK_SI5351 is not set
-# CONFIG_COMMON_CLK_AXI_CLKGEN is not set
-CONFIG_COMMON_CLK_XGENE=y
-# CONFIG_COMMON_CLK_PWM is not set
-# CONFIG_COMMON_CLK_PXA is not set
-# CONFIG_COMMON_CLK_CDCE706 is not set
-CONFIG_CLK_BCM_KONA=y
-# CONFIG_COMMON_CLK_HI6220 is not set
-
-#
-# Hardware Spinlock drivers
-#
-
-#
-# Clock Source drivers
-#
-CONFIG_CLKEVT_I8253=y
-CONFIG_I8253_LOCK=y
-CONFIG_CLKBLD_I8253=y
-# CONFIG_ATMEL_PIT is not set
-# CONFIG_SH_TIMER_CMT is not set
-# CONFIG_SH_TIMER_MTU2 is not set
-# CONFIG_SH_TIMER_TMU is not set
-# CONFIG_EM_TIMER_STI is not set
-# CONFIG_MAILBOX is not set
-CONFIG_IOMMU_API=y
-CONFIG_IOMMU_SUPPORT=y
-
-#
-# Generic IOMMU Pagetable Support
-#
-# CONFIG_IOMMU_IO_PGTABLE_LPAE is not set
-CONFIG_IOMMU_IOVA=y
-CONFIG_AMD_IOMMU=y
-# CONFIG_AMD_IOMMU_STATS is not set
-CONFIG_AMD_IOMMU_V2=m
-CONFIG_DMAR_TABLE=y
-CONFIG_INTEL_IOMMU=y
-CONFIG_INTEL_IOMMU_SVM=y
-# CONFIG_INTEL_IOMMU_DEFAULT_ON is not set
-CONFIG_INTEL_IOMMU_FLOPPY_WA=y
-CONFIG_IRQ_REMAP=y
-
-#
-# Remoteproc drivers
-#
-# CONFIG_STE_MODEM_RPROC is not set
-
-#
-# Rpmsg drivers
-#
-
-#
-# SOC (System On Chip) specific Drivers
-#
-# CONFIG_MTK_INFRACFG is not set
-# CONFIG_MTK_SCPSYS is not set
-# CONFIG_ROCKCHIP_PM_DOMAINS is not set
-# CONFIG_SUNXI_SRAM is not set
-# CONFIG_SOC_TI is not set
-CONFIG_PM_DEVFREQ=y
-
-#
-# DEVFREQ Governors
-#
-CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND=m
-# CONFIG_DEVFREQ_GOV_PERFORMANCE is not set
-# CONFIG_DEVFREQ_GOV_POWERSAVE is not set
-# CONFIG_DEVFREQ_GOV_USERSPACE is not set
-
-#
-# DEVFREQ Drivers
-#
-# CONFIG_PM_DEVFREQ_EVENT is not set
-# CONFIG_EXTCON is not set
-# CONFIG_MEMORY is not set
-CONFIG_IIO=m
-CONFIG_IIO_BUFFER=y
-CONFIG_IIO_BUFFER_CB=m
-CONFIG_IIO_KFIFO_BUF=m
-CONFIG_IIO_TRIGGERED_BUFFER=m
-CONFIG_IIO_TRIGGER=y
-CONFIG_IIO_CONSUMERS_PER_TRIGGER=2
-
-#
-# Accelerometers
-#
-# CONFIG_BMA180 is not set
-CONFIG_BMC150_ACCEL=m
-CONFIG_BMC150_ACCEL_I2C=m
-CONFIG_BMC150_ACCEL_SPI=m
-CONFIG_HID_SENSOR_ACCEL_3D=m
-CONFIG_IIO_ST_ACCEL_3AXIS=m
-CONFIG_IIO_ST_ACCEL_I2C_3AXIS=m
-CONFIG_IIO_ST_ACCEL_SPI_3AXIS=m
-# CONFIG_KXSD9 is not set
-CONFIG_KXCJK1013=m
-# CONFIG_MMA8452 is not set
-# CONFIG_MMA9551 is not set
-# CONFIG_MMA9553 is not set
-# CONFIG_MXC4005 is not set
-# CONFIG_STK8312 is not set
-# CONFIG_STK8BA50 is not set
-
-#
-# Analog to digital converters
-#
-# CONFIG_AD7266 is not set
-# CONFIG_AD7291 is not set
-# CONFIG_AD7298 is not set
-# CONFIG_AD7476 is not set
-# CONFIG_AD7791 is not set
-# CONFIG_AD7793 is not set
-# CONFIG_AD7887 is not set
-# CONFIG_AD7923 is not set
-# CONFIG_AD799X is not set
-# CONFIG_HI8435 is not set
-# CONFIG_MAX1027 is not set
-# CONFIG_MAX1363 is not set
-# CONFIG_MCP320X is not set
-# CONFIG_MCP3422 is not set
-# CONFIG_NAU7802 is not set
-# CONFIG_TI_ADC081C is not set
-# CONFIG_TI_ADC128S052 is not set
-# CONFIG_VIPERBOARD_ADC is not set
-# CONFIG_XILINX_XADC is not set
-
-#
-# Amplifiers
-#
-# CONFIG_AD8366 is not set
-
-#
-# Chemical Sensors
-#
-# CONFIG_VZ89X is not set
-
-#
-# Hid Sensor IIO Common
-#
-CONFIG_HID_SENSOR_IIO_COMMON=m
-CONFIG_HID_SENSOR_IIO_TRIGGER=m
-
-#
-# SSP Sensor Common
-#
-# CONFIG_IIO_SSP_SENSORHUB is not set
-CONFIG_IIO_ST_SENSORS_I2C=m
-CONFIG_IIO_ST_SENSORS_SPI=m
-CONFIG_IIO_ST_SENSORS_CORE=m
-
-#
-# Digital to analog converters
-#
-# CONFIG_AD5064 is not set
-# CONFIG_AD5360 is not set
-# CONFIG_AD5380 is not set
-# CONFIG_AD5421 is not set
-# CONFIG_AD5446 is not set
-# CONFIG_AD5449 is not set
-# CONFIG_AD5504 is not set
-# CONFIG_AD5624R_SPI is not set
-# CONFIG_AD5686 is not set
-# CONFIG_AD5755 is not set
-# CONFIG_AD5764 is not set
-# CONFIG_AD5791 is not set
-# CONFIG_AD7303 is not set
-# CONFIG_M62332 is not set
-# CONFIG_MAX517 is not set
-# CONFIG_MCP4725 is not set
-# CONFIG_MCP4922 is not set
-
-#
-# Frequency Synthesizers DDS/PLL
-#
-
-#
-# Clock Generator/Distribution
-#
-# CONFIG_AD9523 is not set
-
-#
-# Phase-Locked Loop (PLL) frequency synthesizers
-#
-# CONFIG_ADF4350 is not set
-
-#
-# Digital gyroscope sensors
-#
-# CONFIG_ADIS16080 is not set
-# CONFIG_ADIS16130 is not set
-# CONFIG_ADIS16136 is not set
-# CONFIG_ADIS16260 is not set
-# CONFIG_ADXRS450 is not set
-# CONFIG_BMG160 is not set
-CONFIG_HID_SENSOR_GYRO_3D=m
-CONFIG_IIO_ST_GYRO_3AXIS=m
-CONFIG_IIO_ST_GYRO_I2C_3AXIS=m
-CONFIG_IIO_ST_GYRO_SPI_3AXIS=m
-# CONFIG_ITG3200 is not set
-
-#
-# Humidity sensors
-#
-CONFIG_DHT11=m
-# CONFIG_HDC100X is not set
-# CONFIG_HTU21 is not set
-# CONFIG_SI7005 is not set
-# CONFIG_SI7020 is not set
-
-#
-# Inertial measurement units
-#
-# CONFIG_ADIS16400 is not set
-# CONFIG_ADIS16480 is not set
-# CONFIG_KMX61 is not set
-# CONFIG_INV_MPU6050_IIO is not set
-
-#
-# Light sensors
-#
-CONFIG_ACPI_ALS=m
-# CONFIG_ADJD_S311 is not set
-# CONFIG_AL3320A is not set
-# CONFIG_APDS9300 is not set
-# CONFIG_APDS9960 is not set
-CONFIG_BH1750=m
-CONFIG_CM32181=m
-# CONFIG_CM3232 is not set
-# CONFIG_CM3323 is not set
-# CONFIG_CM36651 is not set
-# CONFIG_GP2AP020A00F is not set
-# CONFIG_ISL29125 is not set
-CONFIG_HID_SENSOR_ALS=m
-# CONFIG_HID_SENSOR_PROX is not set
-# CONFIG_JSA1212 is not set
-CONFIG_RPR0521=m
-# CONFIG_LTR501 is not set
-CONFIG_OPT3001=m
-CONFIG_PA12203001=m
-CONFIG_STK3310=m
-# CONFIG_TCS3414 is not set
-# CONFIG_TCS3472 is not set
-# CONFIG_SENSORS_TSL2563 is not set
-# CONFIG_TSL4531 is not set
-# CONFIG_US5182D is not set
-# CONFIG_VCNL4000 is not set
-
-#
-# Magnetometer sensors
-#
-# CONFIG_AK8975 is not set
-# CONFIG_AK09911 is not set
-# CONFIG_BMC150_MAGN is not set
-# CONFIG_MAG3110 is not set
-CONFIG_HID_SENSOR_MAGNETOMETER_3D=m
-# CONFIG_MMC35240 is not set
-CONFIG_IIO_ST_MAGN_3AXIS=m
-CONFIG_IIO_ST_MAGN_I2C_3AXIS=m
-CONFIG_IIO_ST_MAGN_SPI_3AXIS=m
-
-#
-# Inclinometer sensors
-#
-CONFIG_HID_SENSOR_INCLINOMETER_3D=m
-CONFIG_HID_SENSOR_DEVICE_ROTATION=m
-
-#
-# Triggers - standalone
-#
-CONFIG_IIO_INTERRUPT_TRIGGER=m
-# CONFIG_IIO_SYSFS_TRIGGER is not set
-
-#
-# Digital potentiometers
-#
-# CONFIG_MCP4531 is not set
-
-#
-# Pressure sensors
-#
-# CONFIG_BMP280 is not set
-# CONFIG_HID_SENSOR_PRESS is not set
-# CONFIG_MPL115 is not set
-# CONFIG_MPL3115 is not set
-# CONFIG_MS5611 is not set
-# CONFIG_MS5637 is not set
-# CONFIG_IIO_ST_PRESS is not set
-# CONFIG_T5403 is not set
-
-#
-# Lightning sensors
-#
-# CONFIG_AS3935 is not set
-
-#
-# Proximity sensors
-#
-# CONFIG_LIDAR_LITE_V2 is not set
-# CONFIG_SX9500 is not set
-
-#
-# Temperature sensors
-#
-# CONFIG_MLX90614 is not set
-# CONFIG_TMP006 is not set
-# CONFIG_TSYS01 is not set
-# CONFIG_TSYS02D is not set
-CONFIG_NTB=m
-CONFIG_NTB_INTEL=m
-CONFIG_NTB_PINGPONG=m
-CONFIG_NTB_TOOL=m
-CONFIG_NTB_TRANSPORT=m
-# CONFIG_VME_BUS is not set
-CONFIG_PWM=y
-CONFIG_PWM_SYSFS=y
-# CONFIG_PWM_CLPS711X is not set
-# CONFIG_PWM_CRC is not set
-CONFIG_PWM_LPSS=m
-CONFIG_PWM_LPSS_PCI=m
-CONFIG_PWM_LPSS_PLATFORM=m
-# CONFIG_PWM_MTK_DISP is not set
-# CONFIG_PWM_PCA9685 is not set
-# CONFIG_PWM_RCAR is not set
-# CONFIG_PWM_RENESAS_TPU is not set
-# CONFIG_PWM_SUN4I is not set
-# CONFIG_IPACK_BUS is not set
-CONFIG_RESET_CONTROLLER=y
-CONFIG_FMC=m
-CONFIG_FMC_FAKEDEV=m
-CONFIG_FMC_TRIVIAL=m
-CONFIG_FMC_WRITE_EEPROM=m
-CONFIG_FMC_CHARDEV=m
-
-#
-# PHY Subsystem
-#
-CONFIG_GENERIC_PHY=y
-# CONFIG_PHY_EXYNOS_MIPI_VIDEO is not set
-# CONFIG_PHY_PXA_28NM_HSIC is not set
-# CONFIG_PHY_PXA_28NM_USB2 is not set
-# CONFIG_OMAP_CONTROL_PHY is not set
-# CONFIG_BCM_KONA_USB2_PHY is not set
-# CONFIG_PHY_ST_SPEAR1310_MIPHY is not set
-# CONFIG_PHY_ST_SPEAR1340_MIPHY is not set
-# CONFIG_PHY_STIH407_USB is not set
-# CONFIG_PHY_TUSB1210 is not set
-CONFIG_POWERCAP=y
-CONFIG_INTEL_RAPL=m
-# CONFIG_MCB is not set
-
-#
-# Performance monitor support
-#
-CONFIG_RAS=y
-# CONFIG_AMD_MCE_INJ is not set
-CONFIG_THUNDERBOLT=m
-
-#
-# Android
-#
-# CONFIG_ANDROID is not set
-CONFIG_LIBNVDIMM=y
-CONFIG_BLK_DEV_PMEM=m
-CONFIG_ND_BLK=m
-CONFIG_ND_CLAIM=y
-CONFIG_ND_BTT=m
-CONFIG_BTT=y
-CONFIG_NVMEM=m
-# CONFIG_NVMEM_MXS_OCOTP is not set
-# CONFIG_QCOM_QFPROM is not set
-# CONFIG_ROCKCHIP_EFUSE is not set
-# CONFIG_NVMEM_VF610_OCOTP is not set
-# CONFIG_STM is not set
-# CONFIG_STM_DUMMY is not set
-# CONFIG_STM_SOURCE_CONSOLE is not set
-# CONFIG_INTEL_TH is not set
-
-#
-# FPGA Configuration Support
-#
-# CONFIG_FPGA is not set
-
-#
-# Firmware Drivers
-#
-CONFIG_EDD=m
-# CONFIG_EDD_OFF is not set
-CONFIG_FIRMWARE_MEMMAP=y
-# CONFIG_DELL_RBU is not set
-CONFIG_DCDBAS=m
-CONFIG_DMIID=y
-CONFIG_DMI_SYSFS=y
-CONFIG_DMI_SCAN_MACHINE_NON_EFI_FALLBACK=y
-CONFIG_ISCSI_IBFT_FIND=y
-CONFIG_ISCSI_IBFT=m
-# CONFIG_GOOGLE_FIRMWARE is not set
-
-#
-# EFI (Extensible Firmware Interface) Support
-#
-# CONFIG_EFI_VARS is not set
-CONFIG_EFI_ESRT=y
-CONFIG_EFI_RUNTIME_MAP=y
-# CONFIG_EFI_FAKE_MEMMAP is not set
-CONFIG_EFI_RUNTIME_WRAPPERS=y
-CONFIG_UEFI_CPER=y
-
-#
-# File systems
-#
-CONFIG_DCACHE_WORD_ACCESS=y
-# CONFIG_EXT2_FS is not set
-# CONFIG_EXT3_FS is not set
-CONFIG_EXT4_FS=y
-CONFIG_EXT4_USE_FOR_EXT2=y
-CONFIG_EXT4_FS_POSIX_ACL=y
-CONFIG_EXT4_FS_SECURITY=y
-# CONFIG_EXT4_ENCRYPTION is not set
-# CONFIG_EXT4_DEBUG is not set
-CONFIG_JBD2=y
-# CONFIG_JBD2_DEBUG is not set
-CONFIG_FS_MBCACHE=y
-CONFIG_REISERFS_FS=m
-# CONFIG_REISERFS_CHECK is not set
-CONFIG_REISERFS_PROC_INFO=y
-CONFIG_REISERFS_FS_XATTR=y
-CONFIG_REISERFS_FS_POSIX_ACL=y
-CONFIG_REISERFS_FS_SECURITY=y
-CONFIG_JFS_FS=m
-CONFIG_JFS_POSIX_ACL=y
-CONFIG_JFS_SECURITY=y
-# CONFIG_JFS_DEBUG is not set
-# CONFIG_JFS_STATISTICS is not set
-CONFIG_XFS_FS=m
-CONFIG_XFS_QUOTA=y
-CONFIG_XFS_POSIX_ACL=y
-# CONFIG_XFS_RT is not set
-# CONFIG_XFS_WARN is not set
-# CONFIG_XFS_DEBUG is not set
-CONFIG_GFS2_FS=m
-CONFIG_GFS2_FS_LOCKING_DLM=y
-CONFIG_OCFS2_FS=m
-CONFIG_OCFS2_FS_O2CB=m
-CONFIG_OCFS2_FS_USERSPACE_CLUSTER=m
-# CONFIG_OCFS2_FS_STATS is not set
-# CONFIG_OCFS2_DEBUG_MASKLOG is not set
-# CONFIG_OCFS2_DEBUG_FS is not set
-CONFIG_BTRFS_FS=m
-CONFIG_BTRFS_FS_POSIX_ACL=y
-# CONFIG_BTRFS_FS_CHECK_INTEGRITY is not set
-# CONFIG_BTRFS_FS_RUN_SANITY_TESTS is not set
-# CONFIG_BTRFS_DEBUG is not set
-# CONFIG_BTRFS_ASSERT is not set
-CONFIG_NILFS2_FS=m
-CONFIG_F2FS_FS=m
-CONFIG_F2FS_STAT_FS=y
-CONFIG_F2FS_FS_XATTR=y
-CONFIG_F2FS_FS_POSIX_ACL=y
-CONFIG_F2FS_FS_SECURITY=y
-# CONFIG_F2FS_CHECK_FS is not set
-# CONFIG_F2FS_FS_ENCRYPTION is not set
-# CONFIG_F2FS_IO_TRACE is not set
-CONFIG_FS_DAX=y
-CONFIG_FS_POSIX_ACL=y
-CONFIG_EXPORTFS=y
-CONFIG_FILE_LOCKING=y
-CONFIG_FSNOTIFY=y
-CONFIG_DNOTIFY=y
-CONFIG_INOTIFY_USER=y
-CONFIG_FANOTIFY=y
-CONFIG_FANOTIFY_ACCESS_PERMISSIONS=y
-CONFIG_QUOTA=y
-CONFIG_QUOTA_NETLINK_INTERFACE=y
-# CONFIG_PRINT_QUOTA_WARNING is not set
-# CONFIG_QUOTA_DEBUG is not set
-CONFIG_QUOTA_TREE=y
-# CONFIG_QFMT_V1 is not set
-CONFIG_QFMT_V2=y
-CONFIG_QUOTACTL=y
-CONFIG_QUOTACTL_COMPAT=y
-CONFIG_AUTOFS4_FS=y
-CONFIG_FUSE_FS=m
-CONFIG_CUSE=m
-CONFIG_OVERLAY_FS=m
-
-#
-# Caches
-#
-CONFIG_FSCACHE=m
-CONFIG_FSCACHE_STATS=y
-# CONFIG_FSCACHE_HISTOGRAM is not set
-# CONFIG_FSCACHE_DEBUG is not set
-CONFIG_FSCACHE_OBJECT_LIST=y
-CONFIG_CACHEFILES=m
-# CONFIG_CACHEFILES_DEBUG is not set
-# CONFIG_CACHEFILES_HISTOGRAM is not set
-
-#
-# CD-ROM/DVD Filesystems
-#
-CONFIG_ISO9660_FS=m
-CONFIG_JOLIET=y
-CONFIG_ZISOFS=y
-CONFIG_UDF_FS=m
-CONFIG_UDF_NLS=y
-
-#
-# DOS/FAT/NT Filesystems
-#
-CONFIG_FAT_FS=m
-CONFIG_MSDOS_FS=m
-CONFIG_VFAT_FS=m
-CONFIG_FAT_DEFAULT_CODEPAGE=437
-CONFIG_FAT_DEFAULT_IOCHARSET="ascii"
-# CONFIG_NTFS_FS is not set
-
-#
-# Pseudo filesystems
-#
-CONFIG_PROC_FS=y
-CONFIG_PROC_KCORE=y
-CONFIG_PROC_VMCORE=y
-CONFIG_PROC_SYSCTL=y
-CONFIG_PROC_PAGE_MONITOR=y
-CONFIG_PROC_CHILDREN=y
-CONFIG_KERNFS=y
-CONFIG_SYSFS=y
-CONFIG_TMPFS=y
-CONFIG_TMPFS_POSIX_ACL=y
-CONFIG_TMPFS_XATTR=y
-CONFIG_HUGETLBFS=y
-CONFIG_HUGETLB_PAGE=y
-CONFIG_CONFIGFS_FS=y
-CONFIG_EFIVAR_FS=y
-CONFIG_MISC_FILESYSTEMS=y
-# CONFIG_ADFS_FS is not set
-CONFIG_AFFS_FS=m
-CONFIG_ECRYPT_FS=m
-# CONFIG_ECRYPT_FS_MESSAGING is not set
-CONFIG_HFS_FS=m
-CONFIG_HFSPLUS_FS=m
-# CONFIG_HFSPLUS_FS_POSIX_ACL is not set
-CONFIG_BEFS_FS=m
-# CONFIG_BEFS_DEBUG is not set
-# CONFIG_BFS_FS is not set
-# CONFIG_EFS_FS is not set
-# CONFIG_JFFS2_FS is not set
-CONFIG_UBIFS_FS=m
-# CONFIG_UBIFS_FS_ADVANCED_COMPR is not set
-CONFIG_UBIFS_FS_LZO=y
-CONFIG_UBIFS_FS_ZLIB=y
-CONFIG_UBIFS_ATIME_SUPPORT=y
-# CONFIG_LOGFS is not set
-CONFIG_CRAMFS=m
-CONFIG_SQUASHFS=m
-CONFIG_SQUASHFS_FILE_CACHE=y
-# CONFIG_SQUASHFS_FILE_DIRECT is not set
-CONFIG_SQUASHFS_DECOMP_SINGLE=y
-# CONFIG_SQUASHFS_DECOMP_MULTI is not set
-# CONFIG_SQUASHFS_DECOMP_MULTI_PERCPU is not set
-CONFIG_SQUASHFS_XATTR=y
-CONFIG_SQUASHFS_ZLIB=y
-CONFIG_SQUASHFS_LZ4=y
-CONFIG_SQUASHFS_LZO=y
-CONFIG_SQUASHFS_XZ=y
-# CONFIG_SQUASHFS_4K_DEVBLK_SIZE is not set
-# CONFIG_SQUASHFS_EMBEDDED is not set
-CONFIG_SQUASHFS_FRAGMENT_CACHE_SIZE=3
-# CONFIG_VXFS_FS is not set
-CONFIG_MINIX_FS=m
-# CONFIG_OMFS_FS is not set
-# CONFIG_HPFS_FS is not set
-# CONFIG_QNX4FS_FS is not set
-# CONFIG_QNX6FS_FS is not set
-CONFIG_ROMFS_FS=m
-CONFIG_ROMFS_BACKED_BY_BLOCK=y
-# CONFIG_ROMFS_BACKED_BY_MTD is not set
-# CONFIG_ROMFS_BACKED_BY_BOTH is not set
-CONFIG_ROMFS_ON_BLOCK=y
-CONFIG_PSTORE=y
-# CONFIG_PSTORE_CONSOLE is not set
-# CONFIG_PSTORE_PMSG is not set
-# CONFIG_PSTORE_FTRACE is not set
-CONFIG_PSTORE_RAM=m
-CONFIG_SYSV_FS=m
-CONFIG_UFS_FS=m
-# CONFIG_UFS_FS_WRITE is not set
-# CONFIG_UFS_DEBUG is not set
-# CONFIG_EXOFS_FS is not set
-CONFIG_ORE=m
-CONFIG_NETWORK_FILESYSTEMS=y
-CONFIG_NFS_FS=m
-# CONFIG_NFS_V2 is not set
-CONFIG_NFS_V3=m
-CONFIG_NFS_V3_ACL=y
-CONFIG_NFS_V4=m
-CONFIG_NFS_SWAP=y
-CONFIG_NFS_V4_1=y
-CONFIG_NFS_V4_2=y
-CONFIG_PNFS_FILE_LAYOUT=m
-CONFIG_PNFS_BLOCK=m
-CONFIG_PNFS_OBJLAYOUT=m
-CONFIG_PNFS_FLEXFILE_LAYOUT=m
-CONFIG_NFS_V4_1_IMPLEMENTATION_ID_DOMAIN="kernel.org"
-# CONFIG_NFS_V4_1_MIGRATION is not set
-CONFIG_NFS_V4_SECURITY_LABEL=y
-CONFIG_NFS_FSCACHE=y
-# CONFIG_NFS_USE_LEGACY_DNS is not set
-CONFIG_NFS_USE_KERNEL_DNS=y
-CONFIG_NFS_DEBUG=y
-CONFIG_NFSD=m
-CONFIG_NFSD_V2_ACL=y
-CONFIG_NFSD_V3=y
-CONFIG_NFSD_V3_ACL=y
-CONFIG_NFSD_V4=y
-CONFIG_NFSD_PNFS=y
-CONFIG_NFSD_V4_SECURITY_LABEL=y
-# CONFIG_NFSD_FAULT_INJECTION is not set
-CONFIG_GRACE_PERIOD=m
-CONFIG_LOCKD=m
-CONFIG_LOCKD_V4=y
-CONFIG_NFS_ACL_SUPPORT=m
-CONFIG_NFS_COMMON=y
-CONFIG_SUNRPC=m
-CONFIG_SUNRPC_GSS=m
-CONFIG_SUNRPC_BACKCHANNEL=y
-CONFIG_SUNRPC_SWAP=y
-CONFIG_RPCSEC_GSS_KRB5=m
-CONFIG_SUNRPC_DEBUG=y
-CONFIG_SUNRPC_XPRT_RDMA=m
-CONFIG_CEPH_FS=m
-CONFIG_CEPH_FSCACHE=y
-CONFIG_CEPH_FS_POSIX_ACL=y
-CONFIG_CIFS=m
-CONFIG_CIFS_STATS=y
-# CONFIG_CIFS_STATS2 is not set
-CONFIG_CIFS_WEAK_PW_HASH=y
-CONFIG_CIFS_UPCALL=y
-CONFIG_CIFS_XATTR=y
-CONFIG_CIFS_POSIX=y
-CONFIG_CIFS_ACL=y
-CONFIG_CIFS_DEBUG=y
-# CONFIG_CIFS_DEBUG2 is not set
-CONFIG_CIFS_DFS_UPCALL=y
-CONFIG_CIFS_SMB2=y
-# CONFIG_CIFS_SMB311 is not set
-CONFIG_CIFS_FSCACHE=y
-CONFIG_NCP_FS=m
-CONFIG_NCPFS_PACKET_SIGNING=y
-CONFIG_NCPFS_IOCTL_LOCKING=y
-CONFIG_NCPFS_STRONG=y
-CONFIG_NCPFS_NFS_NS=y
-CONFIG_NCPFS_OS2_NS=y
-CONFIG_NCPFS_SMALLDOS=y
-CONFIG_NCPFS_NLS=y
-CONFIG_NCPFS_EXTRAS=y
-CONFIG_CODA_FS=m
-# CONFIG_AFS_FS is not set
-CONFIG_9P_FS=m
-CONFIG_9P_FSCACHE=y
-CONFIG_9P_FS_POSIX_ACL=y
-CONFIG_9P_FS_SECURITY=y
-CONFIG_NLS=y
-CONFIG_NLS_DEFAULT="utf8"
-CONFIG_NLS_CODEPAGE_437=y
-CONFIG_NLS_CODEPAGE_737=m
-CONFIG_NLS_CODEPAGE_775=m
-CONFIG_NLS_CODEPAGE_850=m
-CONFIG_NLS_CODEPAGE_852=m
-CONFIG_NLS_CODEPAGE_855=m
-CONFIG_NLS_CODEPAGE_857=m
-CONFIG_NLS_CODEPAGE_860=m
-CONFIG_NLS_CODEPAGE_861=m
-CONFIG_NLS_CODEPAGE_862=m
-CONFIG_NLS_CODEPAGE_863=m
-CONFIG_NLS_CODEPAGE_864=m
-CONFIG_NLS_CODEPAGE_865=m
-CONFIG_NLS_CODEPAGE_866=m
-CONFIG_NLS_CODEPAGE_869=m
-CONFIG_NLS_CODEPAGE_936=m
-CONFIG_NLS_CODEPAGE_950=m
-CONFIG_NLS_CODEPAGE_932=m
-CONFIG_NLS_CODEPAGE_949=m
-CONFIG_NLS_CODEPAGE_874=m
-CONFIG_NLS_ISO8859_8=m
-CONFIG_NLS_CODEPAGE_1250=m
-CONFIG_NLS_CODEPAGE_1251=m
-CONFIG_NLS_ASCII=y
-CONFIG_NLS_ISO8859_1=m
-CONFIG_NLS_ISO8859_2=m
-CONFIG_NLS_ISO8859_3=m
-CONFIG_NLS_ISO8859_4=m
-CONFIG_NLS_ISO8859_5=m
-CONFIG_NLS_ISO8859_6=m
-CONFIG_NLS_ISO8859_7=m
-CONFIG_NLS_ISO8859_9=m
-CONFIG_NLS_ISO8859_13=m
-CONFIG_NLS_ISO8859_14=m
-CONFIG_NLS_ISO8859_15=m
-CONFIG_NLS_KOI8_R=m
-CONFIG_NLS_KOI8_U=m
-CONFIG_NLS_MAC_ROMAN=m
-CONFIG_NLS_MAC_CELTIC=m
-CONFIG_NLS_MAC_CENTEURO=m
-CONFIG_NLS_MAC_CROATIAN=m
-CONFIG_NLS_MAC_CYRILLIC=m
-CONFIG_NLS_MAC_GAELIC=m
-CONFIG_NLS_MAC_GREEK=m
-CONFIG_NLS_MAC_ICELAND=m
-CONFIG_NLS_MAC_INUIT=m
-CONFIG_NLS_MAC_ROMANIAN=m
-CONFIG_NLS_MAC_TURKISH=m
-CONFIG_NLS_UTF8=m
-CONFIG_DLM=m
-CONFIG_DLM_DEBUG=y
-
-#
-# Kernel hacking
-#
-CONFIG_TRACE_IRQFLAGS_SUPPORT=y
-
-#
-# printk and dmesg options
-#
-CONFIG_PRINTK_TIME=y
-CONFIG_MESSAGE_LOGLEVEL_DEFAULT=4
-CONFIG_BOOT_PRINTK_DELAY=y
-CONFIG_DYNAMIC_DEBUG=y
-
-#
-# Compile-time checks and compiler options
-#
-# CONFIG_ENABLE_WARN_DEPRECATED is not set
-CONFIG_ENABLE_MUST_CHECK=y
-CONFIG_FRAME_WARN=2048
-CONFIG_STRIP_ASM_SYMS=y
-# CONFIG_READABLE_ASM is not set
-CONFIG_UNUSED_SYMBOLS=y
-# CONFIG_PAGE_OWNER is not set
-CONFIG_DEBUG_FS=y
-CONFIG_HEADERS_CHECK=y
-# CONFIG_DEBUG_SECTION_MISMATCH is not set
-CONFIG_SECTION_MISMATCH_WARN_ONLY=y
-CONFIG_ARCH_WANT_FRAME_POINTERS=y
-CONFIG_FRAME_POINTER=y
-# CONFIG_DEBUG_FORCE_WEAK_PER_CPU is not set
-CONFIG_MAGIC_SYSRQ=y
-CONFIG_MAGIC_SYSRQ_DEFAULT_ENABLE=0x0
-CONFIG_DEBUG_KERNEL=y
-
-#
-# Memory Debugging
-#
-# CONFIG_PAGE_EXTENSION is not set
-# CONFIG_DEBUG_PAGEALLOC is not set
-# CONFIG_DEBUG_OBJECTS is not set
-# CONFIG_SLUB_DEBUG_ON is not set
-# CONFIG_SLUB_STATS is not set
-CONFIG_HAVE_DEBUG_KMEMLEAK=y
-# CONFIG_DEBUG_KMEMLEAK is not set
-# CONFIG_DEBUG_STACK_USAGE is not set
-CONFIG_DEBUG_VM=y
-# CONFIG_DEBUG_VM_VMACACHE is not set
-# CONFIG_DEBUG_VM_RB is not set
-# CONFIG_DEBUG_VIRTUAL is not set
-CONFIG_DEBUG_MEMORY_INIT=y
-# CONFIG_DEBUG_PER_CPU_MAPS is not set
-CONFIG_HAVE_DEBUG_STACKOVERFLOW=y
-CONFIG_DEBUG_STACKOVERFLOW=y
-CONFIG_HAVE_ARCH_KMEMCHECK=y
-CONFIG_HAVE_ARCH_KASAN=y
-# CONFIG_KASAN is not set
-CONFIG_DEBUG_SHIRQ=y
-
-#
-# Debug Lockups and Hangs
-#
-CONFIG_LOCKUP_DETECTOR=y
-CONFIG_HARDLOCKUP_DETECTOR=y
-# CONFIG_BOOTPARAM_HARDLOCKUP_PANIC is not set
-CONFIG_BOOTPARAM_HARDLOCKUP_PANIC_VALUE=0
-# CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC is not set
-CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC_VALUE=0
-# CONFIG_DETECT_HUNG_TASK is not set
-# CONFIG_PANIC_ON_OOPS is not set
-CONFIG_PANIC_ON_OOPS_VALUE=0
-CONFIG_PANIC_TIMEOUT=0
-CONFIG_SCHED_DEBUG=y
-CONFIG_SCHED_INFO=y
-CONFIG_SCHEDSTATS=y
-# CONFIG_SCHED_STACK_END_CHECK is not set
-# CONFIG_DEBUG_TIMEKEEPING is not set
-CONFIG_TIMER_STATS=y
-
-#
-# Lock Debugging (spinlocks, mutexes, etc...)
-#
-# CONFIG_DEBUG_RT_MUTEXES is not set
-# CONFIG_DEBUG_SPINLOCK is not set
-# CONFIG_DEBUG_MUTEXES is not set
-# CONFIG_DEBUG_WW_MUTEX_SLOWPATH is not set
-# CONFIG_DEBUG_LOCK_ALLOC is not set
-# CONFIG_PROVE_LOCKING is not set
-# CONFIG_LOCK_STAT is not set
-# CONFIG_DEBUG_ATOMIC_SLEEP is not set
-# CONFIG_DEBUG_LOCKING_API_SELFTESTS is not set
-# CONFIG_LOCK_TORTURE_TEST is not set
-CONFIG_STACKTRACE=y
-# CONFIG_DEBUG_KOBJECT is not set
-CONFIG_DEBUG_BUGVERBOSE=y
-CONFIG_DEBUG_LIST=y
-# CONFIG_DEBUG_PI_LIST is not set
-# CONFIG_DEBUG_SG is not set
-# CONFIG_DEBUG_NOTIFIERS is not set
-# CONFIG_DEBUG_CREDENTIALS is not set
-
-#
-# RCU Debugging
-#
-# CONFIG_PROVE_RCU is not set
-CONFIG_SPARSE_RCU_POINTER=y
-CONFIG_TORTURE_TEST=m
-CONFIG_RCU_TORTURE_TEST=m
-# CONFIG_RCU_TORTURE_TEST_SLOW_PREINIT is not set
-# CONFIG_RCU_TORTURE_TEST_SLOW_INIT is not set
-# CONFIG_RCU_TORTURE_TEST_SLOW_CLEANUP is not set
-CONFIG_RCU_CPU_STALL_TIMEOUT=60
-# CONFIG_RCU_TRACE is not set
-# CONFIG_RCU_EQS_DEBUG is not set
-# CONFIG_DEBUG_BLOCK_EXT_DEVT is not set
-# CONFIG_NOTIFIER_ERROR_INJECTION is not set
-# CONFIG_FAULT_INJECTION is not set
-CONFIG_LATENCYTOP=y
-CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS=y
-# CONFIG_DEBUG_STRICT_USER_COPY_CHECKS is not set
-CONFIG_USER_STACKTRACE_SUPPORT=y
-CONFIG_NOP_TRACER=y
-CONFIG_HAVE_FUNCTION_TRACER=y
-CONFIG_HAVE_FUNCTION_GRAPH_TRACER=y
-CONFIG_HAVE_FUNCTION_GRAPH_FP_TEST=y
-CONFIG_HAVE_DYNAMIC_FTRACE=y
-CONFIG_HAVE_DYNAMIC_FTRACE_WITH_REGS=y
-CONFIG_HAVE_FTRACE_MCOUNT_RECORD=y
-CONFIG_HAVE_SYSCALL_TRACEPOINTS=y
-CONFIG_HAVE_FENTRY=y
-CONFIG_HAVE_C_RECORDMCOUNT=y
-CONFIG_TRACER_MAX_TRACE=y
-CONFIG_TRACE_CLOCK=y
-CONFIG_RING_BUFFER=y
-CONFIG_EVENT_TRACING=y
-CONFIG_CONTEXT_SWITCH_TRACER=y
-CONFIG_TRACING=y
-CONFIG_GENERIC_TRACER=y
-CONFIG_TRACING_SUPPORT=y
-CONFIG_FTRACE=y
-CONFIG_FUNCTION_TRACER=y
-CONFIG_FUNCTION_GRAPH_TRACER=y
-# CONFIG_IRQSOFF_TRACER is not set
-CONFIG_SCHED_TRACER=y
-CONFIG_FTRACE_SYSCALLS=y
-CONFIG_TRACER_SNAPSHOT=y
-# CONFIG_TRACER_SNAPSHOT_PER_CPU_SWAP is not set
-CONFIG_BRANCH_PROFILE_NONE=y
-# CONFIG_PROFILE_ANNOTATED_BRANCHES is not set
-# CONFIG_PROFILE_ALL_BRANCHES is not set
-CONFIG_STACK_TRACER=y
-CONFIG_BLK_DEV_IO_TRACE=y
-CONFIG_KPROBE_EVENT=y
-CONFIG_UPROBE_EVENT=y
-CONFIG_BPF_EVENTS=y
-CONFIG_PROBE_EVENTS=y
-CONFIG_DYNAMIC_FTRACE=y
-CONFIG_DYNAMIC_FTRACE_WITH_REGS=y
-CONFIG_FUNCTION_PROFILER=y
-CONFIG_FTRACE_MCOUNT_RECORD=y
-# CONFIG_FTRACE_STARTUP_TEST is not set
-CONFIG_MMIOTRACE=y
-# CONFIG_MMIOTRACE_TEST is not set
-# CONFIG_TRACEPOINT_BENCHMARK is not set
-CONFIG_RING_BUFFER_BENCHMARK=m
-# CONFIG_RING_BUFFER_STARTUP_TEST is not set
-# CONFIG_TRACE_ENUM_MAP_FILE is not set
-# CONFIG_TRACING_EVENTS_GPIO is not set
-
-#
-# Runtime Testing
-#
-# CONFIG_LKDTM is not set
-# CONFIG_TEST_LIST_SORT is not set
-# CONFIG_KPROBES_SANITY_TEST is not set
-# CONFIG_BACKTRACE_SELF_TEST is not set
-# CONFIG_RBTREE_TEST is not set
-# CONFIG_INTERVAL_TREE_TEST is not set
-# CONFIG_PERCPU_TEST is not set
-CONFIG_ATOMIC64_SELFTEST=y
-CONFIG_ASYNC_RAID6_TEST=m
-# CONFIG_TEST_HEXDUMP is not set
-# CONFIG_TEST_STRING_HELPERS is not set
-CONFIG_TEST_KSTRTOX=y
-# CONFIG_TEST_PRINTF is not set
-# CONFIG_TEST_RHASHTABLE is not set
-CONFIG_PROVIDE_OHCI1394_DMA_INIT=y
-CONFIG_BUILD_DOCSRC=y
-# CONFIG_DMA_API_DEBUG is not set
-# CONFIG_TEST_LKM is not set
-# CONFIG_TEST_USER_COPY is not set
-# CONFIG_TEST_BPF is not set
-# CONFIG_TEST_FIRMWARE is not set
-# CONFIG_TEST_UDELAY is not set
-# CONFIG_MEMTEST is not set
-# CONFIG_TEST_STATIC_KEYS is not set
-# CONFIG_SAMPLES is not set
-CONFIG_HAVE_ARCH_KGDB=y
-CONFIG_KGDB=y
-CONFIG_KGDB_SERIAL_CONSOLE=y
-CONFIG_KGDB_TESTS=y
-# CONFIG_KGDB_TESTS_ON_BOOT is not set
-CONFIG_KGDB_LOW_LEVEL_TRAP=y
-CONFIG_KGDB_KDB=y
-CONFIG_KDB_DEFAULT_ENABLE=0x0
-CONFIG_KDB_KEYBOARD=y
-CONFIG_KDB_CONTINUE_CATASTROPHIC=0
-CONFIG_STRICT_DEVMEM=y
-# CONFIG_X86_VERBOSE_BOOTUP is not set
-CONFIG_EARLY_PRINTK=y
-CONFIG_EARLY_PRINTK_DBGP=y
-CONFIG_EARLY_PRINTK_EFI=y
-CONFIG_X86_PTDUMP_CORE=y
-CONFIG_X86_PTDUMP=y
-# CONFIG_EFI_PGT_DUMP is not set
-CONFIG_DEBUG_RODATA=y
-CONFIG_DEBUG_RODATA_TEST=y
-# CONFIG_DEBUG_WX is not set
-CONFIG_DEBUG_SET_MODULE_RONX=y
-CONFIG_DEBUG_NX_TEST=m
-CONFIG_DOUBLEFAULT=y
-# CONFIG_DEBUG_TLBFLUSH is not set
-# CONFIG_IOMMU_STRESS is not set
-CONFIG_HAVE_MMIOTRACE_SUPPORT=y
-CONFIG_IO_DELAY_TYPE_0X80=0
-CONFIG_IO_DELAY_TYPE_0XED=1
-CONFIG_IO_DELAY_TYPE_UDELAY=2
-CONFIG_IO_DELAY_TYPE_NONE=3
-CONFIG_IO_DELAY_0X80=y
-# CONFIG_IO_DELAY_0XED is not set
-# CONFIG_IO_DELAY_UDELAY is not set
-# CONFIG_IO_DELAY_NONE is not set
-CONFIG_DEFAULT_IO_DELAY_TYPE=0
-CONFIG_DEBUG_BOOT_PARAMS=y
-# CONFIG_CPA_DEBUG is not set
-CONFIG_OPTIMIZE_INLINING=y
-# CONFIG_DEBUG_ENTRY is not set
-# CONFIG_DEBUG_NMI_SELFTEST is not set
-# CONFIG_X86_DEBUG_STATIC_CPU_HAS is not set
-# CONFIG_X86_DEBUG_FPU is not set
-# CONFIG_PUNIT_ATOM_DEBUG is not set
-
-#
-# Security options
-#
-CONFIG_KEYS=y
-CONFIG_PERSISTENT_KEYRINGS=y
-CONFIG_BIG_KEYS=y
-CONFIG_TRUSTED_KEYS=m
-CONFIG_ENCRYPTED_KEYS=m
-# CONFIG_SECURITY_DMESG_RESTRICT is not set
-CONFIG_SECURITY=y
-CONFIG_SECURITYFS=y
-CONFIG_SECURITY_NETWORK=y
-CONFIG_SECURITY_NETWORK_XFRM=y
-# CONFIG_SECURITY_PATH is not set
-CONFIG_INTEL_TXT=y
-CONFIG_LSM_MMAP_MIN_ADDR=65536
-CONFIG_SECURITY_SELINUX=y
-CONFIG_SECURITY_SELINUX_BOOTPARAM=y
-CONFIG_SECURITY_SELINUX_BOOTPARAM_VALUE=1
-CONFIG_SECURITY_SELINUX_DISABLE=y
-CONFIG_SECURITY_SELINUX_DEVELOP=y
-CONFIG_SECURITY_SELINUX_AVC_STATS=y
-CONFIG_SECURITY_SELINUX_CHECKREQPROT_VALUE=1
-# CONFIG_SECURITY_SELINUX_POLICYDB_VERSION_MAX is not set
-# CONFIG_SECURITY_SMACK is not set
-# CONFIG_SECURITY_TOMOYO is not set
-# CONFIG_SECURITY_APPARMOR is not set
-CONFIG_SECURITY_YAMA=y
-# CONFIG_INTEGRITY is not set
-CONFIG_DEFAULT_SECURITY_SELINUX=y
-# CONFIG_DEFAULT_SECURITY_DAC is not set
-CONFIG_DEFAULT_SECURITY="selinux"
-CONFIG_XOR_BLOCKS=m
-CONFIG_ASYNC_CORE=m
-CONFIG_ASYNC_MEMCPY=m
-CONFIG_ASYNC_XOR=m
-CONFIG_ASYNC_PQ=m
-CONFIG_ASYNC_RAID6_RECOV=m
-CONFIG_CRYPTO=y
-
-#
-# Crypto core or helper
-#
-CONFIG_CRYPTO_FIPS=y
-CONFIG_CRYPTO_ALGAPI=y
-CONFIG_CRYPTO_ALGAPI2=y
-CONFIG_CRYPTO_AEAD=y
-CONFIG_CRYPTO_AEAD2=y
-CONFIG_CRYPTO_BLKCIPHER=y
-CONFIG_CRYPTO_BLKCIPHER2=y
-CONFIG_CRYPTO_HASH=y
-CONFIG_CRYPTO_HASH2=y
-CONFIG_CRYPTO_RNG=y
-CONFIG_CRYPTO_RNG2=y
-CONFIG_CRYPTO_RNG_DEFAULT=y
-CONFIG_CRYPTO_PCOMP2=y
-CONFIG_CRYPTO_AKCIPHER2=y
-CONFIG_CRYPTO_AKCIPHER=y
-CONFIG_CRYPTO_RSA=y
-CONFIG_CRYPTO_MANAGER=y
-CONFIG_CRYPTO_MANAGER2=y
-CONFIG_CRYPTO_USER=m
-# CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is not set
-CONFIG_CRYPTO_GF128MUL=y
-CONFIG_CRYPTO_NULL=y
-CONFIG_CRYPTO_NULL2=y
-CONFIG_CRYPTO_PCRYPT=m
-CONFIG_CRYPTO_WORKQUEUE=y
-CONFIG_CRYPTO_CRYPTD=y
-# CONFIG_CRYPTO_MCRYPTD is not set
-CONFIG_CRYPTO_AUTHENC=m
-CONFIG_CRYPTO_TEST=m
-CONFIG_CRYPTO_ABLK_HELPER=y
-CONFIG_CRYPTO_GLUE_HELPER_X86=y
-
-#
-# Authenticated Encryption with Associated Data
-#
-CONFIG_CRYPTO_CCM=m
-CONFIG_CRYPTO_GCM=m
-CONFIG_CRYPTO_CHACHA20POLY1305=m
-CONFIG_CRYPTO_SEQIV=y
-CONFIG_CRYPTO_ECHAINIV=m
-
-#
-# Block modes
-#
-CONFIG_CRYPTO_CBC=y
-CONFIG_CRYPTO_CTR=y
-CONFIG_CRYPTO_CTS=m
-CONFIG_CRYPTO_ECB=y
-CONFIG_CRYPTO_LRW=y
-CONFIG_CRYPTO_PCBC=m
-CONFIG_CRYPTO_XTS=y
-CONFIG_CRYPTO_KEYWRAP=m
-
-#
-# Hash modes
-#
-CONFIG_CRYPTO_CMAC=m
-CONFIG_CRYPTO_HMAC=y
-CONFIG_CRYPTO_XCBC=m
-CONFIG_CRYPTO_VMAC=m
-
-#
-# Digest
-#
-CONFIG_CRYPTO_CRC32C=y
-CONFIG_CRYPTO_CRC32C_INTEL=m
-CONFIG_CRYPTO_CRC32=m
-CONFIG_CRYPTO_CRC32_PCLMUL=m
-CONFIG_CRYPTO_CRCT10DIF=y
-CONFIG_CRYPTO_CRCT10DIF_PCLMUL=m
-CONFIG_CRYPTO_GHASH=m
-CONFIG_CRYPTO_POLY1305=m
-CONFIG_CRYPTO_POLY1305_X86_64=m
-CONFIG_CRYPTO_MD4=m
-CONFIG_CRYPTO_MD5=y
-CONFIG_CRYPTO_MICHAEL_MIC=m
-CONFIG_CRYPTO_RMD128=m
-CONFIG_CRYPTO_RMD160=m
-CONFIG_CRYPTO_RMD256=m
-CONFIG_CRYPTO_RMD320=m
-CONFIG_CRYPTO_SHA1=y
-CONFIG_CRYPTO_SHA1_SSSE3=m
-CONFIG_CRYPTO_SHA256_SSSE3=m
-CONFIG_CRYPTO_SHA512_SSSE3=m
-# CONFIG_CRYPTO_SHA1_MB is not set
-CONFIG_CRYPTO_SHA256=y
-CONFIG_CRYPTO_SHA512=m
-CONFIG_CRYPTO_TGR192=m
-CONFIG_CRYPTO_WP512=m
-CONFIG_CRYPTO_GHASH_CLMUL_NI_INTEL=m
-
-#
-# Ciphers
-#
-CONFIG_CRYPTO_AES=y
-CONFIG_CRYPTO_AES_X86_64=y
-CONFIG_CRYPTO_AES_NI_INTEL=y
-CONFIG_CRYPTO_ANUBIS=m
-CONFIG_CRYPTO_ARC4=m
-CONFIG_CRYPTO_BLOWFISH=m
-CONFIG_CRYPTO_BLOWFISH_COMMON=m
-CONFIG_CRYPTO_BLOWFISH_X86_64=m
-CONFIG_CRYPTO_CAMELLIA=m
-CONFIG_CRYPTO_CAMELLIA_X86_64=m
-CONFIG_CRYPTO_CAMELLIA_AESNI_AVX_X86_64=m
-CONFIG_CRYPTO_CAMELLIA_AESNI_AVX2_X86_64=m
-CONFIG_CRYPTO_CAST_COMMON=m
-CONFIG_CRYPTO_CAST5=m
-CONFIG_CRYPTO_CAST5_AVX_X86_64=m
-CONFIG_CRYPTO_CAST6=m
-CONFIG_CRYPTO_CAST6_AVX_X86_64=m
-CONFIG_CRYPTO_DES=m
-CONFIG_CRYPTO_DES3_EDE_X86_64=m
-CONFIG_CRYPTO_FCRYPT=m
-CONFIG_CRYPTO_KHAZAD=m
-CONFIG_CRYPTO_SALSA20=m
-CONFIG_CRYPTO_SALSA20_X86_64=m
-CONFIG_CRYPTO_CHACHA20=m
-CONFIG_CRYPTO_CHACHA20_X86_64=m
-CONFIG_CRYPTO_SEED=m
-CONFIG_CRYPTO_SERPENT=m
-CONFIG_CRYPTO_SERPENT_SSE2_X86_64=m
-CONFIG_CRYPTO_SERPENT_AVX_X86_64=m
-CONFIG_CRYPTO_SERPENT_AVX2_X86_64=m
-CONFIG_CRYPTO_TEA=m
-CONFIG_CRYPTO_TWOFISH=m
-CONFIG_CRYPTO_TWOFISH_COMMON=m
-CONFIG_CRYPTO_TWOFISH_X86_64=m
-CONFIG_CRYPTO_TWOFISH_X86_64_3WAY=m
-CONFIG_CRYPTO_TWOFISH_AVX_X86_64=m
-
-#
-# Compression
-#
-CONFIG_CRYPTO_DEFLATE=m
-# CONFIG_CRYPTO_ZLIB is not set
-CONFIG_CRYPTO_LZO=y
-CONFIG_CRYPTO_842=m
-CONFIG_CRYPTO_LZ4=m
-CONFIG_CRYPTO_LZ4HC=m
-
-#
-# Random Number Generation
-#
-CONFIG_CRYPTO_ANSI_CPRNG=m
-CONFIG_CRYPTO_DRBG_MENU=y
-CONFIG_CRYPTO_DRBG_HMAC=y
-CONFIG_CRYPTO_DRBG_HASH=y
-CONFIG_CRYPTO_DRBG_CTR=y
-CONFIG_CRYPTO_DRBG=y
-CONFIG_CRYPTO_JITTERENTROPY=y
-CONFIG_CRYPTO_USER_API=y
-CONFIG_CRYPTO_USER_API_HASH=y
-CONFIG_CRYPTO_USER_API_SKCIPHER=y
-CONFIG_CRYPTO_USER_API_RNG=y
-CONFIG_CRYPTO_USER_API_AEAD=y
-CONFIG_CRYPTO_HASH_INFO=y
-CONFIG_CRYPTO_HW=y
-CONFIG_CRYPTO_DEV_PADLOCK=m
-CONFIG_CRYPTO_DEV_PADLOCK_AES=m
-CONFIG_CRYPTO_DEV_PADLOCK_SHA=m
-CONFIG_CRYPTO_DEV_CCP=y
-CONFIG_CRYPTO_DEV_CCP_DD=m
-CONFIG_CRYPTO_DEV_CCP_CRYPTO=m
-CONFIG_CRYPTO_DEV_QAT=m
-CONFIG_CRYPTO_DEV_QAT_DH895xCC=m
-CONFIG_CRYPTO_DEV_QAT_DH895xCCVF=m
-# CONFIG_CRYPTO_DEV_QCE is not set
-# CONFIG_CRYPTO_DEV_IMGTEC_HASH is not set
-CONFIG_ASYMMETRIC_KEY_TYPE=y
-CONFIG_ASYMMETRIC_PUBLIC_KEY_SUBTYPE=y
-CONFIG_PUBLIC_KEY_ALGO_RSA=y
-CONFIG_X509_CERTIFICATE_PARSER=y
-CONFIG_PKCS7_MESSAGE_PARSER=y
-# CONFIG_PKCS7_TEST_KEY is not set
-CONFIG_SIGNED_PE_FILE_VERIFICATION=y
-
-#
-# Certificates for signature checking
-#
-CONFIG_MODULE_SIG_KEY="certs/signing_key.pem"
-CONFIG_SYSTEM_TRUSTED_KEYRING=y
-CONFIG_SYSTEM_TRUSTED_KEYS=""
-CONFIG_HAVE_KVM=y
-CONFIG_HAVE_KVM_IRQCHIP=y
-CONFIG_HAVE_KVM_IRQFD=y
-CONFIG_HAVE_KVM_IRQ_ROUTING=y
-CONFIG_HAVE_KVM_EVENTFD=y
-CONFIG_KVM_APIC_ARCHITECTURE=y
-CONFIG_KVM_MMIO=y
-CONFIG_KVM_ASYNC_PF=y
-CONFIG_HAVE_KVM_MSI=y
-CONFIG_HAVE_KVM_CPU_RELAX_INTERCEPT=y
-CONFIG_KVM_VFIO=y
-CONFIG_KVM_GENERIC_DIRTYLOG_READ_PROTECT=y
-CONFIG_KVM_COMPAT=y
-CONFIG_HAVE_KVM_IRQ_BYPASS=y
-CONFIG_VIRTUALIZATION=y
-CONFIG_KVM=m
-CONFIG_KVM_INTEL=m
-CONFIG_KVM_AMD=m
-CONFIG_KVM_MMU_AUDIT=y
-CONFIG_KVM_DEVICE_ASSIGNMENT=y
-CONFIG_BINARY_PRINTF=y
-
-#
-# Library routines
-#
-CONFIG_RAID6_PQ=m
-CONFIG_BITREVERSE=y
-# CONFIG_HAVE_ARCH_BITREVERSE is not set
-CONFIG_RATIONAL=y
-CONFIG_GENERIC_STRNCPY_FROM_USER=y
-CONFIG_GENERIC_STRNLEN_USER=y
-CONFIG_GENERIC_NET_UTILS=y
-CONFIG_GENERIC_FIND_FIRST_BIT=y
-CONFIG_GENERIC_PCI_IOMAP=y
-CONFIG_GENERIC_IOMAP=y
-CONFIG_GENERIC_IO=y
-CONFIG_ARCH_USE_CMPXCHG_LOCKREF=y
-CONFIG_ARCH_HAS_FAST_MULTIPLIER=y
-CONFIG_CRC_CCITT=m
-CONFIG_CRC16=y
-CONFIG_CRC_T10DIF=y
-CONFIG_CRC_ITU_T=m
-CONFIG_CRC32=y
-# CONFIG_CRC32_SELFTEST is not set
-CONFIG_CRC32_SLICEBY8=y
-# CONFIG_CRC32_SLICEBY4 is not set
-# CONFIG_CRC32_SARWATE is not set
-# CONFIG_CRC32_BIT is not set
-CONFIG_CRC7=m
-CONFIG_LIBCRC32C=m
-CONFIG_CRC8=m
-# CONFIG_AUDIT_ARCH_COMPAT_GENERIC is not set
-# CONFIG_RANDOM32_SELFTEST is not set
-CONFIG_842_COMPRESS=m
-CONFIG_842_DECOMPRESS=m
-CONFIG_ZLIB_INFLATE=y
-CONFIG_ZLIB_DEFLATE=y
-CONFIG_LZO_COMPRESS=y
-CONFIG_LZO_DECOMPRESS=y
-CONFIG_LZ4_COMPRESS=m
-CONFIG_LZ4HC_COMPRESS=m
-CONFIG_LZ4_DECOMPRESS=y
-CONFIG_XZ_DEC=y
-CONFIG_XZ_DEC_X86=y
-CONFIG_XZ_DEC_POWERPC=y
-CONFIG_XZ_DEC_IA64=y
-CONFIG_XZ_DEC_ARM=y
-CONFIG_XZ_DEC_ARMTHUMB=y
-CONFIG_XZ_DEC_SPARC=y
-CONFIG_XZ_DEC_BCJ=y
-# CONFIG_XZ_DEC_TEST is not set
-CONFIG_DECOMPRESS_GZIP=y
-CONFIG_DECOMPRESS_BZIP2=y
-CONFIG_DECOMPRESS_LZMA=y
-CONFIG_DECOMPRESS_XZ=y
-CONFIG_DECOMPRESS_LZO=y
-CONFIG_DECOMPRESS_LZ4=y
-CONFIG_GENERIC_ALLOCATOR=y
-CONFIG_REED_SOLOMON=m
-CONFIG_REED_SOLOMON_ENC8=y
-CONFIG_REED_SOLOMON_DEC8=y
-CONFIG_TEXTSEARCH=y
-CONFIG_TEXTSEARCH_KMP=m
-CONFIG_TEXTSEARCH_BM=m
-CONFIG_TEXTSEARCH_FSM=m
-CONFIG_BTREE=y
-CONFIG_INTERVAL_TREE=y
-CONFIG_ASSOCIATIVE_ARRAY=y
-CONFIG_HAS_IOMEM=y
-CONFIG_HAS_IOPORT_MAP=y
-CONFIG_HAS_DMA=y
-CONFIG_CHECK_SIGNATURE=y
-CONFIG_CPU_RMAP=y
-CONFIG_DQL=y
-CONFIG_GLOB=y
-# CONFIG_GLOB_SELFTEST is not set
-CONFIG_NLATTR=y
-CONFIG_ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE=y
-CONFIG_LRU_CACHE=m
-CONFIG_CLZ_TAB=y
-CONFIG_CORDIC=m
-# CONFIG_DDR is not set
-CONFIG_MPILIB=y
-CONFIG_OID_REGISTRY=y
-CONFIG_UCS2_STRING=y
-CONFIG_FONT_SUPPORT=y
-# CONFIG_FONTS is not set
-CONFIG_FONT_8x8=y
-CONFIG_FONT_8x16=y
-# CONFIG_SG_SPLIT is not set
-CONFIG_ARCH_HAS_SG_CHAIN=y
-CONFIG_ARCH_HAS_PMEM_API=y
-CONFIG_ARCH_HAS_MMIO_FLUSH=y
diff --git a/tools/pharos-validator/src/pxe_initrd/vmlinux/vmlinuz_build.sh b/tools/pharos-validator/src/pxe_initrd/vmlinux/vmlinuz_build.sh
deleted file mode 100755
index e69de29b..00000000
--- a/tools/pharos-validator/src/pxe_initrd/vmlinux/vmlinuz_build.sh
+++ /dev/null
diff --git a/tools/pharos-validator/src/validation_tool/.gitignore b/tools/pharos-validator/src/validation_tool/.gitignore
deleted file mode 100644
index 5559a0a2..00000000
--- a/tools/pharos-validator/src/validation_tool/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-tests/*.xml
-build/
diff --git a/tools/pharos-validator/src/validation_tool/LICENSE b/tools/pharos-validator/src/validation_tool/LICENSE
deleted file mode 100644
index 7a7c11af..00000000
--- a/tools/pharos-validator/src/validation_tool/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2016 Todd Gaunt and others
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/tools/pharos-validator/src/validation_tool/bin/pharos-validator-node b/tools/pharos-validator/src/validation_tool/bin/pharos-validator-node
deleted file mode 100755
index e81bc1bf..00000000
--- a/tools/pharos-validator/src/validation_tool/bin/pharos-validator-node
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env/python3
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import argparse
-import os
-import sys
-import logging
-
-from pharosvalidator import node
-
-def main():
- """Run validation tests on machine, then send results back to server
- on jump host"""
- args = parse_args()
-
- logger = configure_root_logger(0)
-
- if args["test"] == "hardware":
- result = node.hardware_test()
- elif args["test"] == "network":
- result = node.network_test()
- else:
- logger.error("Invalid test name chosen, please choose \"hardware\" or \"network\"")
- quit()
-
- logger.debug("TEST RESULTS\n" + "#"*50 + '\n' + result + "#"*50 + '\n')
- logger.info("Sending results to host...")
- node.send_result(args["host"], args["port"], result)
-
-def configure_root_logger(loglevel):
- # Add a file handler to the default logger
- formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
-
- # Configure the root logger
- stdout_handler = logging.StreamHandler(sys.stdout)
- stdout_handler.setLevel(loglevel)
- stdout_handler.setFormatter(formatter)
-
- root_logger = logging.getLogger()
- root_logger.addHandler(stdout_handler)
- root_logger.setLevel(loglevel)
-
- return root_logger
-
-def parse_args():
- """
- parse_args: parse the commandline arguments and configuration file into
- a dictionary that can be easily passed and referenced by other functions
-
- input: None
-
- output: Dictionary of all commandline arguments and configuration file
- settings
- """
- logger = logging.getLogger(__name__)
-
- parser = argparse.ArgumentParser( \
- description='evaluates a system against the pharos specification')
-
- parser.add_argument('--version',
- action="store_true", default=False,
- help='display version then exit')
-
- # Address that the client should connect to
- parser.add_argument('-H', '--host',
- type=str, default="0.0.0.0",
- help='Address of the server results should be \
- uploaded to')
-
- # Port that the client should connect to
- parser.add_argument('-p', '--port',
- type=str, default=0,
- help='Port of the server results will be uploaded to')
-
- # Specify which test to run on the node
- parser.add_argument('test', metavar='test',
- type=str,
- help='Which test should be run ["hardware", "network"]')
-
- args = vars(parser.parse_args())
-
- return args
-
-if __name__ == "__main__":
- main()
diff --git a/tools/pharos-validator/src/validation_tool/bin/pharos-validator-server b/tools/pharos-validator/src/validation_tool/bin/pharos-validator-server
deleted file mode 100755
index ac9e4f88..00000000
--- a/tools/pharos-validator/src/validation_tool/bin/pharos-validator-server
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env/python3
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import yaml
-import threading
-import queue
-import argparse
-import logging
-import os
-import sys
-
-from pharosvalidator import receiver
-from pharosvalidator import util
-from pharosvalidator import dhcp
-from pharosvalidator import ipmi
-from pharosvalidator import config
-from pharosvalidator import server
-
-def main():
- """PXE boot each nodes, then start up server to receive results"""
- # The logger instance for this function
- logger = logging.getLogger("pharosvalidator")
- args = parse_args()
-
- # Determine the logging level
- loglevel = logging.INFO
- if args["verbose"]:
- loglevel = logging.DEBUG
- if args["quiet"]:
- loglevel = logging.CRITICAL
-
- configure_root_logger(loglevel, args["logfile"])
-
- # Create a new logger strictly for logging test results to a file
- test_logger = logging.getLogger('test_logger')
- test_logger.setLevel(logging.INFO)
- tl_handler = logging.FileHandler(args["test_log"])
- tl_handler.setFormatter(logging.Formatter("%(message)s"))
- test_logger.addHandler(tl_handler)
-
- # Open up the inventory file
- invconf = config.Inventory(args["inventoryfile"])
-
- # Open up the network configuration fil
- netconf = config.Topology(args["networkfile"])
-
- # Assign yourself an ip
- #bring_up_admin_ip(netconf.networks["admin"].installer_ip)
-
- # Start dhcp server
- dhcp.gen_dhcpd_file(args["dhcpdfile"], invconf.nodes, netconf.networks["admin"])
- if dhcp.start_server() != 0:
- logger.error("Halting, cannot bring up dhcp server")
- quit()
-
-
- # Queue for holding test logs, makes program thread safe
- logs_q = queue.Queue()
-
- # Start a new thread for the server that receives results
- threading.Thread(target=receiver.start, \
- args=(invconf.nodecount(), args["server-port"], logs_q), \
- daemon=True).start()
-
- failed_nodes = ipmi.power_nodes(invconf.nodes, "on")
-
- # If the failed nodes list is not empty, then fail
- if failed_nodes != []:
- logger.error("Halting, {} were unable to be powered on".format(", ".join(failed_nodes)))
- quit()
-
- admin_network = netconf.networks["admin"]
-
- ip_range = util.gen_ip_range(admin_network.cidr, [admin_network.installer_ip], admin_network.usable_ip_range.minimum, \
- admin_network.usable_ip_range.maximum)
-
- logger.info(ip_range)
-
- available_ips = server.ping_network(ip_range_list=ip_range, ipcnt=len(invconf.nodes), passes=20)
-
- logger.info(available_ips)
-
- # Start a thread to run tests on each different node, and setup
- # their NICs
- for ip in available_ips:
- threading.Thread( \
- target=server.ssh_thread, \
- args=(str(ip), str(admin_network.installer_ip), str(args["port"]), 200), \
- daemon=True).start()
-
- while True:
- logger.info("Awaiting test result...")
- test_logger.info(logs_q.get())
- logger.info("Logging test result...")
- if logs_q.empty():
- break
-
- logger.info("Finished test, check {} and {}".format(args["logfile"], args["test_log"]))
-
-
-def configure_root_logger(loglevel, logfile):
- # Add a file handler to the default logger
- formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
-
- # Configure the root logger
- stdout_handler = logging.StreamHandler(sys.stdout)
- stdout_handler.setLevel(loglevel)
- stdout_handler.setFormatter(formatter)
- rl_handler = logging.FileHandler(logfile)
- rl_handler.setFormatter(formatter)
-
- root_logger = logging.getLogger()
- root_logger.addHandler(rl_handler)
- root_logger.addHandler(stdout_handler)
- root_logger.setLevel(loglevel)
-
-def parse_args():
- """
- parse_args: parse the commandline arguments and configuration file into
- a dictionary that can be easily passed and referenced by other functions
-
- input: None
-
- output: Dictionary of all commandline arguments and configuration file
- settings
- """
- logger = logging.getLogger(__name__)
-
- parser = argparse.ArgumentParser( \
- description='evaluates a system against the pharos specification')
-
- parser.add_argument('--version',
- action="store_true", default=False,
- help='display version then exit')
-
- parser.add_argument('-q', '--quiet',
- action="store_true", default=False,
- help='disable console output')
-
- parser.add_argument('-v', '--verbose',
- action="store_true", default=False,
- help='Enable debugging level output')
-
- parser.add_argument('-o', '--output',
- type=str, default="yaml",
- help='Define which machine readable format to output')
-
- # port that the client should connect to
- parser.add_argument('-c', '--config',
- type=str, default="/etc/pharosvalidator/config.yaml",
- help='Configuration file to read')
-
- # port that the server should use
- parser.add_argument('-p', '--port',
- type=str, default=12121,
- help='flag to determine if server or client behavior \
- should be used')
-
- args = vars(parser.parse_args())
-
- # Read the configuration file first to get extra information
- if os.path.isfile(args["config"]):
- with open(args["config"], 'r') as fd:
- conf = yaml.load(fd.read())
- else:
- logger.error("Halting, unable to load configuration file")
- quit(1)
-
- for field in args:
- conf[field] = args[field]
- args = conf
-
- return args
-
-if __name__ == "__main__":
- main()
diff --git a/tools/pharos-validator/src/validation_tool/doc/config.yaml b/tools/pharos-validator/src/validation_tool/doc/config.yaml
deleted file mode 100644
index f2146ff9..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/config.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
----
-###################
-# Program arguments
-###################
-#
-# Define path of output file
-output: "yaml" # Values are ("yaml", )
-
-#################
-# Server settings
-#################
-
-# Address that the client should attempt to connect to. Has no effect on server
-server-addr: "0.0.0.0"
-# Server port, shouldn't need to change
-server-port: 12121
-
-#############################
-# Filepaths for program files
-#############################
-
-# Define directory for the program to monitor files pushed back by clients
-tmppath: "/tmp/pharosqa"
-# dhcpd configuration output filepath
-confpath: "/etc/pharosqa/"
-# Examplepath
-sharepath: "/usr/share/pharosqa/"
-# Path to inventory file
-inventoryfile: "/etc/pharosvalidator/inventory.yaml"
-# Path to network file
-networkfile: "/etc/pharosvalidator/network.yaml"
-# Path to logfile
-logfile: "/tmp/runtime_logs.txt"
-# Path to test logs
-test_log: "/tmp/results.yaml"
-# Path to dhcpd configuration file
-dhcpdfile: "/etc/dhcp/dhcpd.conf"
diff --git a/tools/pharos-validator/src/validation_tool/doc/example/config.yaml b/tools/pharos-validator/src/validation_tool/doc/example/config.yaml
deleted file mode 100644
index f2146ff9..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/example/config.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
----
-###################
-# Program arguments
-###################
-#
-# Define path of output file
-output: "yaml" # Values are ("yaml", )
-
-#################
-# Server settings
-#################
-
-# Address that the client should attempt to connect to. Has no effect on server
-server-addr: "0.0.0.0"
-# Server port, shouldn't need to change
-server-port: 12121
-
-#############################
-# Filepaths for program files
-#############################
-
-# Define directory for the program to monitor files pushed back by clients
-tmppath: "/tmp/pharosqa"
-# dhcpd configuration output filepath
-confpath: "/etc/pharosqa/"
-# Examplepath
-sharepath: "/usr/share/pharosqa/"
-# Path to inventory file
-inventoryfile: "/etc/pharosvalidator/inventory.yaml"
-# Path to network file
-networkfile: "/etc/pharosvalidator/network.yaml"
-# Path to logfile
-logfile: "/tmp/runtime_logs.txt"
-# Path to test logs
-test_log: "/tmp/results.yaml"
-# Path to dhcpd configuration file
-dhcpdfile: "/etc/dhcp/dhcpd.conf"
diff --git a/tools/pharos-validator/src/validation_tool/doc/example/inventory.yaml b/tools/pharos-validator/src/validation_tool/doc/example/inventory.yaml
deleted file mode 100644
index 2ba768ab..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/example/inventory.yaml
+++ /dev/null
@@ -1,38 +0,0 @@
----
-nodes:
- - name: node0
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:11:11" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.2
- user: root
- pass: root
- - name: node1
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:22:22" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.3
- user: root
- pass: root
- - name: node2
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:33:33" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.4
- user: root
- pass: root
diff --git a/tools/pharos-validator/src/validation_tool/doc/example/network.yaml b/tools/pharos-validator/src/validation_tool/doc/example/network.yaml
deleted file mode 100644
index 91c1be91..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/example/network.yaml
+++ /dev/null
@@ -1,207 +0,0 @@
----
-# This configuration file defines Network Environment for a
-# Baremetal Deployment of OPNFV. It contains default values
-# for 5 following networks:
-#
-# - admin
-# - tenant*
-# - external*
-# - storage*
-# - api*
-# *) optional networks
-#
-# Optional networks will be consolidated with the admin network
-# if not explicitly configured.
-#
-# See short description of the networks in the comments below.
-#
-# "admin" is the short name for Control Plane Network.
-# During OPNFV deployment it is used for node provisioning which will require
-# PXE booting as well as running a DHCP server on this network. Be sure to
-# disable any other DHCP/TFTP server on this network.
-#
-# "tenant" is the network used for tenant traffic.
-#
-# "external" is the network which should have internet or external
-# connectivity. External OpenStack networks will be configured to egress this
-# network. There can be multiple external networks, but only one assigned as
-# "public" which OpenStack public API's will register.
-#
-# "storage" is the network for storage I/O.
-#
-# "api" is an optional network for splitting out OpenStack service API
-# communication. This should be used for IPv6 deployments.
-
-network-config-metadata: # Meta data for the network configuration
- title: LF-POD-1 Network config #
- version: 0.1 #
- created: Mon Dec 28 2015 #
- comment: None #
-
-# yamllint disable rule:line-length
-networks: # Network configurations
- admin: # Admin configuration (pxe and jumpstart),
- enabled: true
- vlan: native # VLAN tag to use for Overcloud hosts on this network
- installer_vm: # Network settings for the Installer VM on admin network
- nic_type: interface # Indicates if this VM will be bridged to an interface, or to a bond
- members:
- - em1 # Member Interface to bridge to for installer VM (use multiple values for bond)
- vlan: 29 # VLAN tag to use for this network on Installer VM, native means none
- ip: 10.10.25.100 # IP to assign to Installer VM on this network
- usable_ip_range: 10.10.25.0,10.10.25.100 # Usable ip range, if empty entire range is usable, ex. 192.168.1.10,192.168.1.20
- gateway: 192.0.2.1 # Gateway (only needed when public_network is disabled)
- cidr: 10.10.25.0/24 # Subnet in CIDR format 192.168.1.0/24
- dhcp_range: 10.10.25.50,10.10.25.99 # DHCP range for the admin network, if empty it will be automatically provisioned
- dns-domain: opnfvlf.org # Admin network dns domain
- dns-search: opnfvlf.org # Admin network dns-search domain
- dns-upstream: # Admin network upstream dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp-upstream: # Admin upstream ntp servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: native # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: native
- members:
- - eth1
- - eth2
-
- tenant: # Tenant network configuration
- enabled: true
- cidr: 11.0.0.0/24 # Subnet in CIDR format 192.168.1.0/24
- vlan: 10 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- overlay_id_range: 2,65535 # Tenant network Overlay segmentation ID range: VNI, VLAN-ID, etc.
-
- segmentation_type: vxlan # Tenant network segmentation type:vlan, vxlan, gre
-
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 10 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 10
- members:
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
-
- external: # Can contain 1 or more external networks
- - public: # "public" network will be the network the installer VM attaches to
- enabled: true
- vlan: native
- mtu: 64000 # Public network MTU
- installer_vm: # Network settings for the Installer VM on admin network (note only valid on 'public' external network)
- nic_type: interface # Indicates if this VM will be bridged to an interface, or to a bond
- members:
- - em1 # Member Interface to bridge to for installer VM (use multiple values for bond)
- ip: 192.168.37.12 # IP to assign to Installer VM on this network
- cidr: 192.168.37.0/24
- gateway: 192.168.37.1
- floating_ip_range: 192.168.37.200,192.168.37.220 # Range to allocate to floating IPs for the public network with Neutron
- usable_ip_range: 192.168.37.10,192.168.37.199 # Usable IP range on the public network, usually this is a shared subnet
- dns_nameservers: # External dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp: # External upstream NTP servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- syslog: # External Syslog server
- server: 10.128.1.24 #
- transport: 'tcp' #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 10 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 10
- members:
- - eth1
- external_overlay: # External network to be created in OpenStack by Services tenant
- name: Public_internet
- type: flat
- gateway: 192.168.37.1
- - private_cloud: # another external network
- enabled: false
- vlan: 101
- mtu: 64000
- cidr: 192.168.38.0/24
- gateway: 192.168.38.1
- floating_ip_range: 192.168.38.200,192.168.38.220 # Range to allocate to floating IPs for the public network with Neutron
- usable_ip_range: 192.168.38.10,192.168.38.199 # Usable IP range on the public network, usually this is a shared subnet
- dns_nameservers: # External dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp: # External upstream NTP servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- syslog: # External Syslog server
- server: 10.128.1.24 #
- transport: 'tcp' #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 101 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 101
- members:
- - eth1
- external_overlay: # External network to be created in OpenStack by Services tenant
- name: private_cloud
- type: vlan
- segmentation_id: 101
- gateway: 192.168.38.1
-
- storage: # Storage network configuration
- enabled: true
- cidr: 12.0.0.0/24 # Subnet in CIDR format
- vlan: 12 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: bond # Physical interface type (interface or bond)
- vlan: 12 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth3 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- - eth4
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: 12
- members:
- - eth3
- - eth4
-
- api: # API network configuration
- enabled: false
- cidr: fd00:fd00:fd00:4000::/64 # Subnet in CIDR format
- vlan: 13 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: bond # Physical interface type (interface or bond)
- vlan: 13 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth3 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- - eth4
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: 13
- members:
- - eth3
- - eth4
-# yamllint enable rule:line-length
diff --git a/tools/pharos-validator/src/validation_tool/doc/inventory.yaml b/tools/pharos-validator/src/validation_tool/doc/inventory.yaml
deleted file mode 100644
index 2ba768ab..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/inventory.yaml
+++ /dev/null
@@ -1,38 +0,0 @@
----
-nodes:
- - name: node0
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:11:11" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.2
- user: root
- pass: root
- - name: node1
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:22:22" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.3
- user: root
- pass: root
- - name: node2
- tags: control # optional param, other valid value "compute"
- arch: "x86_64"
- mac_address: "de:ad:be:ef:33:33" # pxe boot interface mac
- cpus: 2 # required only for virtual deployments
- memory: 2048 # required only for virtual deployments
- disk: 40 # required only for virtual deployments
- power:
- type: ipmi
- address: 10.4.7.4
- user: root
- pass: root
diff --git a/tools/pharos-validator/src/validation_tool/doc/network.yaml b/tools/pharos-validator/src/validation_tool/doc/network.yaml
deleted file mode 100644
index 705e8486..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/network.yaml
+++ /dev/null
@@ -1,221 +0,0 @@
----
-# This configuration file defines Network Environment for a
-# Baremetal Deployment of OPNFV. It contains default values
-# for 5 following networks:
-#
-# - admin
-# - tenant*
-# - external*
-# - storage*
-# - api*
-# *) optional networks
-#
-# Optional networks will be consolidated with the admin network
-# if not explicitly configured.
-#
-# See short description of the networks in the comments below.
-#
-# "admin" is the short name for Control Plane Network.
-# During OPNFV deployment it is used for node provisioning which will require
-# PXE booting as well as running a DHCP server on this network. Be sure to
-# disable any other DHCP/TFTP server on this network.
-#
-# "tenant" is the network used for tenant traffic.
-#
-# "external" is the network which should have internet or external
-# connectivity. External OpenStack networks will be configured to egress this
-# network. There can be multiple external networks, but only one assigned as
-# "public" which OpenStack public API's will register.
-#
-# "storage" is the network for storage I/O.
-#
-# "api" is an optional network for splitting out OpenStack service API
-# communication. This should be used for IPv6 deployments.
-
-# yamllint disable rule:line-length
-network-config-metadata: # Meta data for the network configuration
- title: LF-POD-1 Network config #
- version: 0.1 #
- created: Mon Dec 28 2015 #
- comment: None #
-
-
-networks: # Network configurations
- admin: # Admin configuration (pxe and jumpstart),
- enabled: true
- vlan: native # VLAN tag to use for Overcloud hosts on this network (Admin network is required to be native / untagged for PXE booting)
- installer_vm: # Network settings for the Installer VM on admin network
- nic_type: interface # Indicates if this VM will be bridged to an interface, or to a bond
- members:
- - em1 # Member Interface to bridge to for installer VM (use multiple values for bond)
- vlan: native # VLAN tag to use for this network on Installer VM, native means none
- ip: 192.0.2.1 # IP to assign to Installer VM on this network
- usable_ip_range: 192.0.2.11,192.0.2.99 # Usable ip range, if empty entire range is usable, ex. 192.168.1.10,192.168.1.20
- gateway: 192.0.2.1 # Gateway (only needed when public_network is disabled)
- cidr: 192.0.2.0/24 # Subnet in CIDR format 192.168.1.0/24
- dhcp_range: 192.0.2.2,192.0.2.10 # DHCP range for the admin network, if empty it will be automatically provisioned
- dns-domain: opnfvlf.org # Admin network dns domain
- dns-search: opnfvlf.org # Admin network dns-search domain
- dns-upstream: # Admin network upstream dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp-upstream: # Admin upstream ntp servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: native # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: native
- members:
- - eth1
- - eth2
-
- tenant: # Tenant network configuration
- enabled: true
- cidr: 11.0.0.0/24 # Subnet in CIDR format 192.168.1.0/24
- vlan: 10 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- overlay_id_range: 2,65535 # Tenant network Overlay segmentation ID range: VNI, VLAN-ID, etc.
-
- segmentation_type: vxlan # Tenant network segmentation type: vlan, vxlan, gre
-
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 10 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 10
- members:
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
-
- external: # Can contain 1 or more external networks
- - public: # "public" network will be the network the installer VM attaches to
- enabled: true
- vlan: native
- mtu: 64000 # Public network MTU
- installer_vm: # Network settings for the Installer VM on admin network (note only valid on 'public' external network)
- nic_type: interface # Indicates if this VM will be bridged to an interface, or to a bond
- members:
- - em1 # Member Interface to bridge to for installer VM (use multiple values for bond)
- ip: 192.168.37.12 # IP to assign to Installer VM on this network
- cidr: 192.168.37.0/24
- gateway: 192.168.37.1
- floating_ip_range: 192.168.37.200,192.168.37.220 # Range to allocate to floating IPs for the public network with Neutron
- usable_ip_range: 192.168.37.10,192.168.37.199 # Usable IP range on the public network, usually this is a shared subnet
- dns_nameservers: # External dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp: # External upstream NTP servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- syslog: # External Syslog server
- server: 10.128.1.24 #
- transport: 'tcp' #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 10 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 10
- members:
- - eth1
- external_overlay: # External network to be created in OpenStack by Services tenant
- name: Public_internet
- type: flat
- gateway: 192.168.37.1
- - private_cloud: # another external network
- enabled: false
- vlan: 101
- mtu: 64000
- cidr: 192.168.38.0/24
- gateway: 192.168.38.1
- floating_ip_range: 192.168.38.200,192.168.38.220 # Range to allocate to floating IPs for the public network with Neutron
- usable_ip_range: 192.168.38.10,192.168.38.199 # Usable IP range on the public network, usually this is a shared subnet
- dns_nameservers: # External dns servers
- - 8.8.8.8 #
- - 8.8.4.4 #
- ntp: # External upstream NTP servers
- - 0.se.pool.ntp.org #
- - 1.se.pool.ntp.org #
- syslog: # External Syslog server
- server: 10.128.1.24 #
- transport: 'tcp' #
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: interface # Physical interface type (interface or bond)
- vlan: 101 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth1 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: interface
- vlan: 101
- members:
- - eth1
- external_overlay: # External network to be created in OpenStack by Services tenant
- name: private_cloud
- type: vlan
- segmentation_id: 101
- gateway: 192.168.38.1
-
- storage: # Storage network configuration
- enabled: true
- cidr: 12.0.0.0/24 # Subnet in CIDR format
- vlan: 12 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: bond # Physical interface type (interface or bond)
- vlan: 12 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth3 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- - eth4
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: 12
- members:
- - eth3
- - eth4
-
- api: # API network configuration
- enabled: false
- cidr: fd00:fd00:fd00:4000::/64 # Subnet in CIDR format
- vlan: 13 # VLAN tag to use for Overcloud hosts on this network
- mtu: 64000 # Tenant network MTU
- nic_mapping: # Mapping of network configuration for Overcloud Nodes
- compute: # Mapping for compute profile (nodes that will be used as Compute nodes)
- phys_type: bond # Physical interface type (interface or bond)
- vlan: 13 # VLAN tag to use with this NIC
- members: # Physical NIC members of this mapping (Single value allowed for interface phys_type)
- - eth3 # Note, for Apex you may also use the logical nic name (found by nic order), such as "nic1"
- - eth4
- controller: # Mapping for controller profile (nodes that will be used as Controller nodes)
- phys_type: bond
- vlan: 13
- members:
- - eth3
- - eth4
-
-# JOID specific settings
-joid:
-
-# Compass specific settings
-compass:
-
-# Apex specific settings
-apex:
- networks:
- admin:
- introspection_range: 192.0.2.100,192.0.2.120 # Range used for introspection phase (examining nodes)
-# Fuel specific settings
-fuel:
diff --git a/tools/pharos-validator/src/validation_tool/doc/pharos-validator.1 b/tools/pharos-validator/src/validation_tool/doc/pharos-validator.1
deleted file mode 100644
index c76176f1..00000000
--- a/tools/pharos-validator/src/validation_tool/doc/pharos-validator.1
+++ /dev/null
@@ -1,54 +0,0 @@
-.TH pharoscmp 1 "2016-06-01" "version 0.1"
-.SH NAME
-pharoscmp - Testing tool for Pharos specification
-
-.SH SYNOPSIS
-.B pharoscmp [ options ]
-
-.SH DESCRIPTION
-This tool uses the virtual Linux filesystems to determine a computer's hardware and evaluate it against the Pharos specification to see if said computer is sufficient for being a node as part of the pod.
-
-.SH OPTIONS
-.BR \-h ", " \-\-help
-Show a help message and exit
-.PP
-.BR \-\-version
-Show program version
-.PP
-.BR \-c ", " \-\-color
-Enable colored console output
-.PP
-.BR \-v ", " \-\-verbose
-Enable more verbose console output
-.PP
-.BR \-q ", " \-\-quiet
-Disable console output
-.PP
-.BR \-o ", " \-\-output
-Define path to output file for yaml file of results
-.PP
-.BR \-f ", " \-\-force
-Forcefully override file defined by --output
-.PP
-.BR \-P ", " \-\-path
-Path to the directory where the test should poll for the file called "nodeinfo.yaml" to arrive in. This file is usually sent via ssh to wherever the test is running
-.PP
-
-.SH BUGS
-Please report bugs to https://tommy.iol.unh.edu/redmine/projects/pharoscmp
-
-.SH AUTHOR
-AUTHOR INFO GOES HERE
-
-.SH LICENSE
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/tools/pharos-validator/src/validation_tool/requirements.txt b/tools/pharos-validator/src/validation_tool/requirements.txt
deleted file mode 100644
index dd5a2e6d..00000000
--- a/tools/pharos-validator/src/validation_tool/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-netifaces
-watchdog
-pytest
diff --git a/tools/pharos-validator/src/validation_tool/setup.py b/tools/pharos-validator/src/validation_tool/setup.py
deleted file mode 100755
index 6b00b388..00000000
--- a/tools/pharos-validator/src/validation_tool/setup.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#! /usr/bin/env python3
-
-from distutils.core import setup
-
-import subprocess
-
-setup(
- name = 'pharos-validator',
- description = 'Testing tool for Pharos spec compliance',
- author = 'Todd Gaunt',
- url = '',
- download_url = '',
- author_email = 'singularik@iol.unh.edu',
- version = '0.1',
- license = 'TBD',
-
- packages = ['pharosvalidator',
- 'pharosvalidator.test'],
-
- package_dir = {'pharosvalidator':'src',
- 'pharosvalidator.test':'src/test'},
-
- # Change these per distribution
- data_files = [('share/man/man1/', ['doc/pharos-validator.1']),
- ('share/licenses/pharos-validator/LICENSE', ['LICENSE']),
- ('share/pharos-validator/', ['doc/config.yaml', 'doc/inventory.yaml', 'doc/network.yaml']),
- ],
-
- scripts = ['bin/pharos-validator-node',
- 'bin/pharos-validator-server']
- )
diff --git a/tools/pharos-validator/src/validation_tool/src/__init__.py b/tools/pharos-validator/src/validation_tool/src/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/tools/pharos-validator/src/validation_tool/src/__init__.py
+++ /dev/null
diff --git a/tools/pharos-validator/src/validation_tool/src/config.py b/tools/pharos-validator/src/validation_tool/src/config.py
deleted file mode 100644
index 443467ee..00000000
--- a/tools/pharos-validator/src/validation_tool/src/config.py
+++ /dev/null
@@ -1,176 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import logging
-import sys
-import os
-import yaml
-import struct
-import socket
-
-from pharosvalidator import util
-from collections import namedtuple
-
-class Topology():
- """
- Topology: Class to store any number of Network classes
- and metadata about them
- """
- def __init__(self, yaml_config):
- # Dictionary of available networks
- self.logger = logging.getLogger(__name__)
- self.networks = {}
- self.external_networks = []
-
- # Fill the above dictionaries
- self.parse_yaml(yaml_config)
-
- def parse_yaml(self, yaml_config):
- """
- parse_yaml: parses the yaml configuration file this program uses
- for all the network and node information
- """
- config = safe_yaml_read(yaml_config)
- for network in config["networks"]:
- self.logger.info("Reading network section {}".format(network))
- if network == "admin":
- self.networks[network] = Network(config["networks"][network])
- #TODO
- elif network == "external":
- for external_network in config["networks"][network]:
- self.external_networks.append(Network(external_network))
-
-class Network():
- """
- Network: Class to store all information on a given network
- """
- def __init__(self, network):
- try:
- self.logger = logging.getLogger(__name__)
-
- # Some generic settings
- self.enabled = network["enabled"]
- self.vlan = network["vlan"]
-
- # VM settings
- self.installer_nic_type = network["installer_vm"]["nic_type"]
- self.installer_members = network["installer_vm"]["members"]
- self.installer_ip = network["installer_vm"]["ip"]
-
- # Tuple containing the minimum and maximum
- self.usable_ip_range = self.parse_ip_range(network["usable_ip_range"])
- self.gateway = network["gateway"]
- self.cidr = network["cidr"]
- self.dhcp_range = network["dhcp_range"]
- self.dns_domain = network["dns-domain"]
- self.dns_search = network["dns-search"]
-
- subnet, netmask = self.split_cidr(network["cidr"])
- self.subnet = subnet
- self.netmask = netmask
-
- # List of all dns servers
- self.dns_upstream = network["dns-upstream"]
-
- self.nic_mapping = {}
- except KeyError as e:
- self.logger.error("Field {} not available in network configuration file".format(e))
-
- def split_cidr(self, cidr):
- """
- split_cidr: Split up cidr notation subnets into a subnet string and a
- netmask string
-
- input: cidr notation of a subnet
-
- output: Subnet string; Netmask string
- """
- split = cidr.split('/')
- host_bits = int(split[1])
- netmask = self.cidr_to_netmask(host_bits)
- subnet = split[0]
-
- return subnet, netmask
-
- def parse_ip_range(self, ip_range_string):
- """
- parse_ip_range: Create a named tuple object that contains the lowest
- ip address and the highest ip address from a configuration file
-
- input: String formatted like so "min, max" where min/max are ip addresses
-
- output: Named tuple object containing a minimum and maximum field
- """
- rp = ip_range_string.split(",")
- ip_range = namedtuple("ip_range", ['minimum', 'maximum'])(minimum=min(rp), maximum=max(rp))
- return ip_range
-
- def cidr_to_netmask(self, cidr):
- bits = 0xffffffff ^ (1 << 32 - cidr) - 1
- netmask = socket.inet_ntoa(struct.pack('>I', bits))
- self.logger.debug("Netmask generated from cidr '{}': '{}'".format(cidr, netmask))
- return netmask
-
-class Inventory():
- """
- Inventory: Class to hold configuration file data
- """
- def __init__(self, yaml_config):
- # Create the class logger
- self.logger = logging.getLogger(__name__)
-
- self.nodes = []
-
- # Fill the above list
- self.parse_yaml(yaml_config)
-
- def parse_yaml(self, yaml_config):
- config = safe_yaml_read(yaml_config)
- nodes = []
- for node in config["nodes"]:
- self.nodes.append(Node(node))
-
- def nodecount(self):
- return len(self.nodes)
-
-class Node():
- """
- Node: Class to hold
- """
- def __init__(self, node):
- self.logger = logging.getLogger(__name__)
- try:
- self.name = node["name"]
- self.tags = node["tags"]
- self.arch = node["arch"]
- self.mac_address = node["mac_address"] # ipmi mac address
- self.cpus = node["cpus"]
- self.memory = node["memory"]
- self.disk = node["disk"]
- except KeyError as e:
- self.logger.error("Field {} not available in inventory file".format(e))
-
- # Power sub section
- if node["power"]["type"] == "ipmi":
- try:
- self.ipmi_addr = node["power"]["address"]
- self.ipmi_user = node["power"]["user"]
- self.ipmi_pass = node["power"]["pass"]
- except KeyError as e:
- self.logger.error("Field {} not available in inventory file".format(e))
- else:
- pass
-
-def safe_yaml_read(yamlfile):
- logger = logging.getLogger(__name__)
- if os.path.isfile(yamlfile) == False:
- logger.critical("Could not open find {}".format(yamlfile))
- quit(1)
- with open(yamlfile, 'r') as fd:
- return yaml.load(fd.read())
diff --git a/tools/pharos-validator/src/validation_tool/src/const.py b/tools/pharos-validator/src/validation_tool/src/const.py
deleted file mode 100644
index a204a964..00000000
--- a/tools/pharos-validator/src/validation_tool/src/const.py
+++ /dev/null
@@ -1,48 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-## Various constant strings used throughout program
-HARDWARE_TEST="pharos-validator-node"
-
-## Pharos hardware specification
-# memory
-MIN_MEMSIZE = 32000000 # In Kb
-
-# cpu
-MIN_CPUFREQ = 1800.000 # In Mhz
-MIN_CORECOUNT = 4
-
-# storage
-MIN_DISKCOUNT = 3
-MIN_SSDCOUNT = 1
-MIN_HDDSIZE = 1000 # In Gb
-MIN_SSDSIZE = 100 # In Gb
-# Smallest possible disk size
-MIN_DISKSIZE = min(MIN_HDDSIZE, MIN_SSDSIZE)
-
-# Virtual deployments
-# Requirements are per node
-APEX_REQ = {"cores": 2, \
- "ram": 8000000, \
- "disk": 40}
-
-# Requirements are per node
-COMPASS_REQ = {"cores": 4, \
- "ram": 4000000, \
- "disk": 100}
-
-# Requirements are per node
-JOID_REQ = {"cores": 4, \
- "ram": 4000000, \
- "disk": 100}
-
-# Requirements are per node
-FUEL_REQ = {"cores": 4, \
- "ram": 4000000, \
- "disk": 100}
diff --git a/tools/pharos-validator/src/validation_tool/src/dhcp.py b/tools/pharos-validator/src/validation_tool/src/dhcp.py
deleted file mode 100644
index 26c42f84..00000000
--- a/tools/pharos-validator/src/validation_tool/src/dhcp.py
+++ /dev/null
@@ -1,102 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import yaml
-import netifaces
-import subprocess
-import copy
-import re
-import os
-import logging
-
-from pharosvalidator.specification import *
-from pharosvalidator import util
-
-init_cmd = ["systemctl", "start", "dhcpd.service"]
-
-def gen_dhcpd_file(dhcpdfile, nodes, network):
- """Generates and associates incremental ip addresses to
- MAC addresses according to restrictions placed by network
- configuration file. Writes all of this out in dhcpd.conf format"""
- logger = logging.getLogger(__name__)
- logger.info("Generating dhcpfile...")
-
- header = "default-lease-time 86400;\n\
- max-lease-time 604800;\n\
- max-lease-time 604800;\n\
- \n\
- allow booting;\n\
- authoritative;\n\
- \n"
-
- # Skip this network if it is disabled
- if network.enabled == False:
- logger.info("Admin network is disabled, please change the configuration to \"enabled\" if you would like this test to run")
- quit()
-
- # Not explicitly in the cofiguration file
- broadcastaddr = "0.0.0.0"
- next_server = "0.0.0.0"
-
- ip_range = util.gen_ip_range(network.cidr, [network.installer_ip], network.usable_ip_range.minimum, \
- network.usable_ip_range.maximum)
-
- tab = ' '
- subnetconf = "subnet {} netmask {} {{\n".format(network.subnet, network.netmask)\
- + tab+"range {} {};\n".format(network.usable_ip_range.minimum, network.usable_ip_range.maximum)\
- + tab+"option broadcast-address {};\n".format(broadcastaddr)\
- + tab+'filename "pxelinux.0";\n'\
- + tab+"next-server {};\n".format(next_server)
-
- # For now no static addresses are assigned
- """
- static_addrs = []
- for node in nodes:
- # Skip the node if it doesn't have a name or mac address specified
- if not node.name or not node.mac_address:
- continue
-
- if node.ipmi_addr in ip_range:
- ip_range.remove(node.ipmi_addr)
-
- static_line = "host {node} {{ hardware ethernet {ipmi_mac}; fixed-address {ip_addr}; }}\n".format\
- (node=node.name, ipmi_mac=node.mac_address, ip_addr=ip_range[0])
- ip_range = ip_range[1::] # Remove the assigned ip address
- static_addrs.append(static_line)
-
- # Now add all statically assigned ip addresses
- for addr in static_addrs:
- subnetconf += tab+addr
- """
-
- subnetconf += "}\n" # Just the closing bracket
-
- # The final text to be written out to a file
- dhcpdtext = header + subnetconf
-
- with open(dhcpdfile, "w+") as fd:
- logger.info("Writing out dhcpd file to {}".format(dhcpdfile))
- fd.write(dhcpdtext)
-
- return dhcpdtext
-
-def start_server():
- logger = logging.getLogger(__name__)
- global init_cmd
- cmd = init_cmd
- with open(os.devnull, 'w') as fn:
- status = subprocess.Popen(cmd, stdout=fn, stderr=fn).wait()
- if int(status) != 0:
- logger.error("Could not bring up dhcpd server")
- else:
- logger.info("Dhcp server brought up")
- return status
-
-if __name__ == "__main__":
- split("inventory.yaml", "eth0")
diff --git a/tools/pharos-validator/src/validation_tool/src/ipmi.py b/tools/pharos-validator/src/validation_tool/src/ipmi.py
deleted file mode 100644
index 44be207a..00000000
--- a/tools/pharos-validator/src/validation_tool/src/ipmi.py
+++ /dev/null
@@ -1,63 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import os
-import subprocess
-import logging
-
-def power_nodes(nodes, action):
- """ Attempts to power on all nodes specified in a list, then returns a list
- of the names of all failures. The list will be empty if no failures."""
- failed_nodes = []
- logger = logging.getLogger(__name__)
- if not nodes:
- logger.info("No nodes, is empty list")
- for node in nodes:
- # -I flag must be 'lanplus', 'lan' by itself doesn't work with
- # the most recent idrac/ipmi version
- if action == "on":
- pass
- elif action == "off":
- pass
- else:
- logger.error("Invalid ipmi command")
-
- cmd = ["ipmitool", \
- "-I", "lanplus", \
- "-H ", "'"+node.ipmi_addr+"'", \
- "-U ", "'"+node.ipmi_user+"'", \
- "-P ", "'"+node.ipmi_pass+"'", \
- "power", action]
-
- logger.debug("Running: \"{}\"".format(' '.join(cmd)))
- try:
- with open(os.devnull, 'w') as fn:
- status = subprocess.check_call(" ".join(cmd), \
- stdout=fn, stderr=fn, shell=True)
- except subprocess.CalledProcessError as e:
- status = e.returncode
- logger.error("{} could not be accessed at {} (exit code {})".format(\
- node.name, node.ipmi_addr, status))
- failed_nodes.append(node.name)
- if int(status) == 0:
- logger.info("{} successfully powered {}".format(node.name, action))
-
- return failed_nodes
-
-def status(node, ipaddr, username, passwd):
- # -I flag must be 'lanplus', 'lan' by itself doesn't work with
- # the most recent idrac/ipmi version
- chkcmd = ["ipmitool", \
- "-I", "lanplus", \
- "-H", ipaddr, \
- "-U", username, \
- "-P", passwd, \
- "chassis", "status"]
- print(chkcmd)
- subprocess.Popen(chkcmd)
diff --git a/tools/pharos-validator/src/validation_tool/src/jenkins.py b/tools/pharos-validator/src/validation_tool/src/jenkins.py
deleted file mode 100644
index 443a6157..00000000
--- a/tools/pharos-validator/src/validation_tool/src/jenkins.py
+++ /dev/null
@@ -1,8 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
diff --git a/tools/pharos-validator/src/validation_tool/src/node.py b/tools/pharos-validator/src/validation_tool/src/node.py
deleted file mode 100644
index 280abb7f..00000000
--- a/tools/pharos-validator/src/validation_tool/src/node.py
+++ /dev/null
@@ -1,85 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import logging
-import socket
-import yaml
-import os
-
-import pharosvalidator.test.probe as probe
-import pharosvalidator.test.evaluate as evaluate
-from pharosvalidator.util import send_msg
-
-def hardware_test():
- """
- hardware_test: Run hardware probing/testing functions
-
- input: None
-
- output: String in YAML format of the tests that were run
- """
- logger = logging.getLogger(__name__)
- logger.info("Beginning hardware test")
-
- # Run test scripts
- results = []
- results.append(testinterpreter("CPU test", evaluate.cpu, probe.cpu()))
- results.append(testinterpreter("Memory test", evaluate.memory, probe.memory()))
- results.append(testinterpreter("Storage test", evaluate.storage, probe.storage()))
-
- # Start generating the yaml file
- yamltext = ""
- for result in results:
- yamltext += yaml.dump(result, default_flow_style=False)
- return yamltext
-
-def network_test(networkfile):
- logger = logging.getLogger(__name__)
- logger.info("Beginning network test")
- logger.info("Ending network test")
- pass
-
-def send_result(host, port, result):
- """
- send_result: Send the final test result to the central test server
-
- input: Host address of target; Port of target; String to send to server
-
- output: None
- """
- logger = logging.getLogger(__name__)
- logger.info("Sending test result")
-
- # Format the results properly
- linecount = 0
- for c in result:
- if c == "\n":
- linecount += 1
-
- result = str(linecount) + "\n" + result
-
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.connect((host, int(port)))
- send_msg(sock, result)
-
-def testinterpreter(name, test, dataset):
- """High level function for test functions within this module to print out
- their results in an ordered function while also writing out logs,
- expects a list of testresults objects"""
-
- # Start the yaml file contents
- data = {name:[]}
-
- # test the dataset
- results = test(dataset)
-
- for result in results:
- data[name].append(result)
-
- return data
diff --git a/tools/pharos-validator/src/validation_tool/src/receiver.py b/tools/pharos-validator/src/validation_tool/src/receiver.py
deleted file mode 100644
index 07d968e7..00000000
--- a/tools/pharos-validator/src/validation_tool/src/receiver.py
+++ /dev/null
@@ -1,46 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import socket
-import threading
-import logging
-
-from pharosvalidator.util import read_msg
-
-def start(nodecount, port, q):
- """Start a server to retrieve the files from the nodes. Server will run
- indefinetely until the parent process ends"""
- logging.basicConfig(level=0)
- logger = logging.getLogger(__name__)
-
- address = "" # Empty means act as a server on all available interfaces
-
- logger.info("Bringing up receiver server...")
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.bind((address, port))
- sock.listen(nodecount) # Max connections is the amount of nodes
-
- while True:
- # Receive a descriptor for the client socket, cl stands for client
- (clsock, claddress) = sock.accept()
- logger.info("Received client connection...")
- client_thread = threading.Thread(target=_server_accept_thread, \
- args=(clsock, claddress, q), daemon=True)
- # Start a new thread to read the new client socket connection
- client_thread.start()
-
- socket.close()
- logger.info("Bringing down receiver server...")
-
-def _server_accept_thread(clsock, claddress, q):
- """Read from the socket into the queue, then close the connection"""
- logger = logging.getLogger(__name__)
- q.put(read_msg(clsock))
- logger.info("Retreived message from socket")
- clsock.close()
diff --git a/tools/pharos-validator/src/validation_tool/src/server.py b/tools/pharos-validator/src/validation_tool/src/server.py
deleted file mode 100644
index 91c9a4f2..00000000
--- a/tools/pharos-validator/src/validation_tool/src/server.py
+++ /dev/null
@@ -1,111 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import logging
-import os
-import subprocess
-import time
-
-# Constant definitions
-from pharosvalidator.const import *
-
-def ssh_thread(remoteaddr, returnaddr, port, passes):
- """
- ssh_thread: the main loop of a thread the server spawns to connect to a node
- over ssh.
-
- input: remoteaddr, returnaddr, and port to forward to run_remote_test;
- passes to specify how many attempts should be made
- """
- for i in range(passes):
- status = run_remote_test(remoteaddr, returnaddr, port)
- time.sleep(1)
-
-def run_remote_test(remoteaddr, returnaddr, port):
- """
- run_remote_tests: ssh to a give remote address, and run a test program
- on the remote machine specifying the address and port of where the results
- should be sent (usually back to the machine this program was run on)
-
- input: ip address of the ssh target; Adress of the test results target;
- Port of the test results target
-
- output: 0 if the test ran over ssh perfectly, non-zero if the test faild
- """
- #TODO add way to keep attempting to ssh until service is up and running aka ping part 2
- logger = logging.getLogger(__name__)
-
- cmd = ["ssh", "root@"+remoteaddr, HARDWARE_TEST, \
- "-p", port, "-H", returnaddr, "hardware"]
-
- logger.debug("Running: {}".format(" ".join(cmd)))
- try:
- with open(os.devnull, 'w') as fn:
- status = subprocess.check_call(" ".join(cmd), stdout=fn, stderr=fn, shell=True)
- except subprocess.CalledProcessError as e:
- status = e.returncode
- logger.error("ssh attempt to '{}' failed".format(remoteaddr))
-
- return status
-
-def ping_network(ip_range_list, ipcnt, passes):
- """
- ping_network: Ping a range of ips until the amount of successful pings
- reaches a number n
-
- input: List of ip addresses to be pinged; Counter for threshold
- of successful pings; Number of iterations to pass
-
- output: List of ip addresses that were found to be up
- """
- logger = logging.getLogger("pharosvalidator")
- assert isinstance(ip_range_list, list)
- ips_found = 0
- alive_ips = []
- for t in range(passes):
- for addr in list(ip_range_list):
- cmd = [ \
- "ping", \
- "-c", "1", \
- "-w", "1", \
- addr]
- logger.debug("Running: \"{}\"".format(' '.join(cmd)))
- try:
- with open(os.devnull, 'w') as fn:
- status = subprocess.check_call(" ".join(cmd), \
- stdout=fn, stderr=fn, shell=True)
- except subprocess.CalledProcessError as e:
- status = e.returncode
- logger.error("Ping at '{}' failed".format(addr))
- # If the ip address was pinged successfully, then remove it from future attempts
- if status == 0:
- ips_found += 1
- logger.info("{} is up, {} total nodes up".format(addr, ips_found))
-
- # Remove the ip that was successfully pinged from being tested again
- ip_range_list.remove(addr)
-
- # Add the successfully pinged node to a list of successful pings
- alive_ips.append(addr)
-
- if ips_found >= ipcnt:
- break
-
- if ips_found >= ipcnt:
- break
-
- return alive_ips
-
-def bring_up_admin_ip(ipaddr):
- """
- Assign the machine this test is running on an address according to the
- configuration file
- """
- cmd = [""]
- subprocess.Popen(cmd)
diff --git a/tools/pharos-validator/src/validation_tool/src/test/__init__.py b/tools/pharos-validator/src/validation_tool/src/test/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/tools/pharos-validator/src/validation_tool/src/test/__init__.py
+++ /dev/null
diff --git a/tools/pharos-validator/src/validation_tool/src/test/evaluate.py b/tools/pharos-validator/src/validation_tool/src/test/evaluate.py
deleted file mode 100644
index 81a837d6..00000000
--- a/tools/pharos-validator/src/validation_tool/src/test/evaluate.py
+++ /dev/null
@@ -1,159 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import logging
-
-from pharosvalidator.util import approxsize
-
-# Constant macros
-from pharosvalidator.const import *
-
-def cpu(cpudata):
- """Compares system cpu against the pharos specification"""
- results = []
-
- # Architecture evaluation, a value of 63 or greater indicates at least a 64-bit OS
- if cpudata["bitsize"] >= 63:
- val = True
- else:
- val = False
- result = {"architecture": {
- "pass": val,
- "description": str(cpudata["architecture"])}}
- results.append(result)
-
- # Core evaluation
- if cpudata["cores"] < MIN_CORECOUNT:
- val = False
- else:
- val = True
- desc = "Have {0}, Need at least {1}".format(cpudata["cores"], MIN_CORECOUNT)
- result = {"corecount": {
- "pass": val,
- "description": desc}}
- results.append(result)
-
- # Speed evaluation
- i = 0
- for cpufreq in cpudata["frequency"]:
- # Cpufrequency was not read if this is the result
- if cpufreq == -1:
- desc = "(Cpu freuency could not be read)"
- else:
- if approxsize(cpufreq, MIN_CPUFREQ, 5) or cpufreq > MIN_CPUFREQ:
- val = True
- else:
- val = False
- desc = "Have {:.2f}Mhz, Need at least ~= {:.2f}Mhz".format( \
- cpufreq, MIN_CPUFREQ)
- result = {"cpu"+str(i): {
- "pass": val,
- "description": desc}}
- results.append(result)
- i += 1
-
- return results
-
-def memory(memdata):
- """Compares system meminfo object against the pharos specification"""
- logger = logging.getLogger(__name__)
-
- results = []
-
- logger.debug("required memory: {}, detected memory: {}".format(\
- MIN_MEMSIZE, memdata["size"]))
- # Capacity evaluation
- if approxsize(memdata["size"], MIN_MEMSIZE, 5) or memdata["size"] > MIN_MEMSIZE:
- val = True
- else:
- val = False
-
- desc = "Have {:.2f}G, Need at least ~= {:.2f}G".format( \
- memdata["size"], MIN_MEMSIZE/1000000)
-
- result = {"memory capacity": {
- "pass": val,
- "description": desc}}
- results.append(result)
-
- return results
-
-def storage(diskdata):
- """Compares system storage against the Pharos specification"""
- def sizecmp(a, b, unit):
- if approxsize(a, b, 10) or a > b:
- val = True
- else:
- val = False
- desc = "capacity is {:.2f}{}, Need at least ~= {:.2f}{}".format(a, \
- unit, b, unit)
- return (val,desc)
-
- results = []
- # Disk size evaluation (also counts the disks)
- diskcount = {"ssd":0, "non-ssd":0}
- for disk in diskdata["names"]:
- if diskdata["rotational"][disk]:
- disktype = "non-ssd"
- diskcount["non-ssd"] += 1
- else:
- disktype = "ssd"
- diskcount["ssd"] += 1
- val, desc = sizecmp(diskdata["sizes"][disk], MIN_SSDSIZE, 'G')
- data = diskdata["sizes"][disk]
- result = {disk: {
- "pass": val,
- "description": "Disk type: disktype; " + desc}}
- results.append(result)
-
- # Disk number evaluation
- if sum(diskcount.values()) >= 3 and diskcount["ssd"] >= 1:
- val = True
- else:
- val = False
- desc = "Have {0} drives, Need at least {1} drives and {3} ssds".format( \
- sum(diskcount.values()), MIN_DISKCOUNT, \
- diskcount["ssd"], MIN_SSDCOUNT)
-
- data = diskcount
- result = {"diskcount": {
- "pass": val,
- "description": desc}}
- results.append(result)
- return results
-
-"""
-def netinterfaces(netfaces):
- results = []
- for netface in netfaces:
- if netface.status <= 0:
- val = False
- state = "down"
- else:
- val = True
- state = "up"
- try:
- MACaddr = netface.MAC[0]["addr"]
- except IndexError:
- MACaddr = "no MAC"
- if len(netface.addrs) > 0:
- addrs = ""
- for addr in netface.addrs:
- if len(addrs) > 0:
- addrs += ", "
- addrs += addr['addr']
- addrs = "addresses: " + addrs
- else:
- addrs = "no address"
- desc = "({0} is {1} with {2})".format(netface.name, state, addrs)
- data = MACaddr
- results.append(gen_yamltext(netface.name, val, desc, data))
- return results
- """
-
diff --git a/tools/pharos-validator/src/validation_tool/src/test/probe.py b/tools/pharos-validator/src/validation_tool/src/test/probe.py
deleted file mode 100644
index daeccbc0..00000000
--- a/tools/pharos-validator/src/validation_tool/src/test/probe.py
+++ /dev/null
@@ -1,137 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import os
-import re
-import sys
-import platform
-import subprocess
-import netifaces
-import logging
-
-from pharosvalidator.util import cd # Contains the pharos specification values
-
-# Static vars
-mempath="/proc/meminfo"
-cpuinfopath="/proc/cpuinfo"
-cpupath="/sys/devices/system/cpu/"
-diskpath="/sys/block/"
-
-def readproc(path):
- """Reads and parses /proc from [path] argument files
- and returns a hashmap of values"""
- logger = logging.getLogger(__name__)
- # Fail if path does not exist
- try:
- hashmap = {}
- with open(path) as fd:
- logger.debug("Reading {}".format(path))
- for line in fd:
- data = line.split(":")
- if len(data) == 2:
- # Strip trailing characters from hashmap names and entries
- # for less junk
- hashmap[data[0].strip()] = data[1].strip()
- return hashmap
- except IOError:
- logger.error("Path to file does not exist: {}".format(path))
- quit(1)
-
-def cpu():
- logger = logging.getLogger(__name__)
- cpudata = {}
- cpuinfo = readproc(cpuinfopath)
- cpudata["bitsize"] = sys.maxsize.bit_length()
- cpudata["architecture"] = platform.architecture()[0]
- cpudata["cores"] = int(cpuinfo["cpu cores"])
- cpudata["frequency"] = []
- for i in range(cpudata["cores"]):
- freqpath = "{0}/cpu{1}/cpufreq/cpuinfo_max_freq".format(cpupath, \
- str(i))
- try:
- with open(freqpath) as fd:
- logger.debug("Opening {}".format(freqpath))
- cpufreq = (float(fd.read(-1)))/1000
- except IOError:
- # Less accurate way of getting cpu information as
- # this frequency may change during operation,
- # if dynamic frequency scaling is enabled,
- # however it is better than nothing.
- logger.error("Path to file does not exist: {}".format(freqpath))
- logger.error("Reading cpu frequency from {} instead".format(freqpath))
- cpufreq = float(cpuinfo["cpu MHz"])
-
- if cpufreq < 0:
- cpudata["frequency"].append(0)
- else:
- cpudata["frequency"].append(cpufreq)
-
- return cpudata
-
-def memory():
- logger = logging.getLogger(__name__)
- meminfo=readproc(mempath)
- # Create the memory object to store memory information
- memdata = {}
- memdata["size"] = (int(meminfo["MemTotal"].split(' ')[0]))/1000000
- return memdata
-
-def storage():
- """Gather's disk information"""
- logger = logging.getLogger(__name__)
- diskdata = {"names":[],"rotational":{},"sizes":{}}
- for disk in os.listdir(diskpath):
- #sdX is the naming schema for IDE/SATA interfaces in Linux
- if re.match(r"sd\w",disk):
- logger.debug("Found disk {}".format(disk))
- diskdata["names"].append(disk)
- sizepath = "{0}/{1}/size".format(diskpath, disk)
- try:
- with open(sizepath) as fd:
- size = int(fd.read(-1))
- except IOError:
- size = -1
- # If the read was successful
- if size != -1:
- # Converts the value to Gb
- diskdata["sizes"][disk] = (size * 512)/1000000000
-
- rotationalpath = "{0}/{1}/queue/rotational".format(diskpath, disk)
- try:
- with open(rotationalpath) as fd:
- rotational = int(fd.read(-1))
- except IOError:
- rotational = -1
- if rotational == 0:
- diskdata["rotational"][disk] = False
- else:
- diskdata["rotational"][disk] = True
-
- return diskdata
-
-def netinterfaces(nodeinfo):
- """Uses netifaces to probe the system for network interface information"""
- netfaces = []
- for interface in netifaces.interfaces():
- netface = netdata()
- netface.name = interface
- tmp = netifaces.ifaddresses(interface)
- # If the interface is up and has at least one ip address
- if netifaces.AF_INET in tmp:
- netface.status = 1 # 1 stands for "up"
- netface.addrs = tmp[netifaces.AF_INET]
- # If the interface is down
- else:
- netface.status = 0 # 0 stands for "down"
- # The file /proc/net/arp may also be used to read MAC addresses
- if netifaces.AF_LINK in tmp:
- netface.MAC = tmp[netifaces.AF_LINK]
- netfaces.append(netface)
-
- return netfaces
diff --git a/tools/pharos-validator/src/validation_tool/src/util.py b/tools/pharos-validator/src/validation_tool/src/util.py
deleted file mode 100644
index 67a75a56..00000000
--- a/tools/pharos-validator/src/validation_tool/src/util.py
+++ /dev/null
@@ -1,107 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Todd Gaunt and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import ipaddress
-import logging
-import os
-
-class cd:
- """Context manager for changing the current working directory"""
- def __init__(self, new_path):
- self.new_path = os.path.expanduser(new_path)
-
- def __enter__(self):
- self.saved_path = os.getcwd()
- os.chdir(self.new_path)
-
- def __exit__(self, etype, value, traceback):
- os.chdir(self.saved_path)
-
-def approxsize(x, y, deviation):
- """Approximately compares 'x' to 'y' with in % of 'deviation'"""
- logger = logging.getLogger(__name__)
-
- dev = (y * .01 * deviation)
-
- if x >= round(y - dev, 0) and x <= round(y + dev, 0):
- logger.debug("{} is approximately {}".format(x, y))
- return True
- else:
- logger.debug("{} is not approximately {}".format(x, y))
- return False
-
-def read_line(sock):
- """Reads from a socket until a \n character or 512 bytes have been read,
- whichever comes first"""
- c = ""
- recvline = ""
- reads = 0
- while (c != "\n" and reads < 512):
- # Decode bytes to str, sockets output bytes which aren't pretty
- c = sock.recv(1).decode("utf-8")
- #print("char: '" + c + "'") # Debugging code
- recvline += c
- reads += 1
- return recvline
-
-def read_msg(sock):
- """Reads a message prefixed with a number and a newline char, eg. "20\n"
- then reads x lines, where x is equal to the number in the first line."""
- # Read the socket once initially for the line count
- buf = read_line(sock)
- buf = buf[:-1] # Cut off the '\n' character
- length = int(buf)
-
- lines = []
- for i in range(length):
- lines.append(read_line(sock))
- return "".join(lines)
-
-def send_msg(sock, msg):
- """Sends a message to a socket"""
- # Encode python str to bytes beforehand, sockets only deal in bytes
- msg = bytes(msg, "utf-8")
- totalsent = 0
- while totalsent < len(msg):
- sent = sock.send(msg[totalsent:])
- if sent == 0:
- return -1
- totalsent = totalsent + sent
- return totalsent
-
-def get_addr(interface):
- """Get the address of the machine that this program is running on"""
- return netifaces.ifaddresses(interface)[netifaces.AF_INET][0]["addr"]
-
-def gen_ip_range(cidr, excluded, minimum, maximum ):
- """Takes a network cidr number, and then a min max value, and creates a list
- of ip addresses avalable on [a,b]. Also removes "excluded" addresses
- from the range"""
- logger = logging.getLogger(__name__)
- # Generate a list of available ip addresses for the dhcp server
- ip_range = list(map(lambda x: x.exploded, ipaddress.ip_network(cidr).hosts()))
-
- for addr in excluded:
- # Remove the value from the list, if it isn't in the list then whatever
- try:
- ip_range.remove(addr)
- except ValueError:
- logger.debug("{} not in ip_range, cannot remove".format(addr))
-
- # Remove all values before the minimum usable value
- for i in range(len(ip_range)):
- if ip_range[i] == minimum:
- ip_range = ip_range[i::]
- break
- # Remove all values after the maximum usable value
- for i in range(len(ip_range)):
- if ip_range[i] == maximum:
- ip_range = ip_range[0:i+1]
- break
- return ip_range
diff --git a/tools/pharos-validator/src/validation_tool/tests/test_node.py b/tools/pharos-validator/src/validation_tool/tests/test_node.py
deleted file mode 100644
index e69de29b..00000000
--- a/tools/pharos-validator/src/validation_tool/tests/test_node.py
+++ /dev/null
diff --git a/tools/pharos-validator/src/validation_tool/tests/test_probe.py b/tools/pharos-validator/src/validation_tool/tests/test_probe.py
deleted file mode 100644
index e69de29b..00000000
--- a/tools/pharos-validator/src/validation_tool/tests/test_probe.py
+++ /dev/null
diff --git a/tools/pharos-validator/src/validation_tool/tests/test_server.py b/tools/pharos-validator/src/validation_tool/tests/test_server.py
deleted file mode 100644
index 35388b6d..00000000
--- a/tools/pharos-validator/src/validation_tool/tests/test_server.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python3
-
-def test_ssh_thread():
- """Test to see if ssh connections are attempted the proper amount of times"""
- from pharosvalidator.server import ssh_thread
- ssh_thread("127.0.0.1", "0.0.0.0", "1", 10)
-
-
-
diff --git a/tools/pharos-validator/src/validation_tool/tests/test_util.py b/tools/pharos-validator/src/validation_tool/tests/test_util.py
deleted file mode 100644
index 9ce939bd..00000000
--- a/tools/pharos-validator/src/validation_tool/tests/test_util.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python3
-
-import random
-
-def test_approxsize():
- from pharosvalidator.util import approxsize
- assert approxsize(100, 95, 5) == True
- assert approxsize(100, 105, 5) == True
-
- assert approxsize(100, 94, 5) == False
- assert approxsize(100, 106, 5) == False
-