forked from uyuni-project/sumaform
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.tf.libvirt-testsuite.example
156 lines (140 loc) · 4.79 KB
/
main.tf.libvirt-testsuite.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
terraform {
required_version = "1.0.10"
required_providers {
libvirt = {
source = "dmacvicar/libvirt"
version = "0.6.3"
}
}
}
provider "libvirt" {
uri = "qemu:///system"
}
module "cucumber_testsuite" {
source = "./modules/cucumber_testsuite"
# see https://github.com/uyuni-project/sumaform/blob/master/README_ADVANCED.md
# for more information on product_version values
# you should use the same version here as in git_repo below
# if you work on Uyuni you should use e.g. uyuni-master here and uyuni as git_repo below
# if you work on SUMA you should use e.g. 4.3-nightly, 4.3-released, 4.3-VM-nightly, 4.3-VM-released, or head and spacewalk as git_repo below
product_version = "uyuni-master"
# SUSE SCC credentials
cc_username = ""
cc_password = ""
# define what images should be used and uploaded
# https://github.com/uyuni-project/sumaform/blob/master/backend_modules/libvirt/README.md#only-upload-a-subset-of-available-images
# the following images are e.g. available:
# "almalinux8o", "almalinux9o",
# "amazonlinux2o",
# "centos7o", "centos8o", "centos9o",
# "libertylinux9o",
# "opensuse155o", "opensuse156o",
# "oraclelinux9o",
# "rocky8o", "rocky9o",
# "sles12sp5o", "sles15sp2o", "sles15sp3o", "sles15sp4o", "sles15sp5o", "sles15sp6o",
# "ubuntu2004o", "ubuntu2204o"
# to see what VM uses what image, have a look at the image variable in the cucumber_module definition
# https://github.com/uyuni-project/sumaform/blob/master/modules/cucumber_testsuite/main.tf
# images = ["rocky9o", "opensuse154o", "sles15sp4o", "ubuntu2204o"]
use_avahi = true
name_prefix = "prefix-"
domain = "tf.local"
from_email = "email@domain.com"
# git credentials and repository that will be checked out on the controller
# you have to use a personal access token as password and not your actual GitHub password
# see https://github.com/settings/tokens
# here you can specify a branch where you e.g. develop a new test
# be aware to use the same version (e.g. Uyuni or SUMA) as git_repo as set in product_version above
git_repo = "https://github.com/uyuni-project/uyuni.git"
git_username = "nogit"
git_password = "nogit"
branch = "master"
# Required to select a container server in the testsuite
container_server = true
# In case you use an authentication registry
auth_registry = "registry.mgr.suse.de:5000/cucutest"
auth_registry_username = ""
auth_registry_password = ""
git_profiles_repo = "https://github.com/uyuni-project/uyuni.git#:testsuite/features/profiles/internal_prv"
# define which VMs should be created and adjust their settings.
# if you do not need a minion just comment it out.
# example:
# suse-minion = {
# image = "sles15sp4o"
# name = "minion"
# provider_settings = {
# mac = "aa:bb:cc:dd:ee:ff"
# memory = 1024
# vcpu = 2
# }
# additional_repos = {
# Test_repo = "http://download.suse.de/ibs/Devel:/Galaxy:/Manager:/TEST/SLE_15_SP4/"
# }
# additional_packages = [ "vim" ]
# }
# also have a look at the image matrix that is used in our test suite:
# https://github.com/SUSE/susemanager-ci#used-image-versions-in-the-ci-test-suite
host_settings = {
controller = {
name = "controller"
}
server_containerized = {
name = "server"
# Uncomment to run the container on k3s rather than podman
# runtime = "k3s"
# Override where to get the containers from
# container_repository = "registry.opensuse.org/systemsmanagement/uyuni/master/containers/uyuni"
# Override where to get the helm chart from
# helm_chart_url = "oci://registry.opensuse.org/systemsmanagement/uyuni/master/charts/uyuni/server-helm"
}
proxy = {
name = "proxy"
}
suse-client = {
image = "sles15sp4o"
}
suse-minion = {
image = "sles15sp4o"
name = "minion"
}
suse-sshminion = {
image = "sles15sp4o"
name = "sshminion"
}
redhat-minion = {
image = "centos7o"
name = "centos"
}
debian-minion = {
image = "ubuntu2204o"
name = "ubuntu"
}
build-host = {
image = "sles15sp4o"
name = "build"
}
pxeboot-minion = {
image = "sles15sp4o"
name = "pxeboot"
}
kvm-host = {
image = "opensuse156o"
name = "kvmhost"
}
monitoring-server = {
image = "sles15sp4o"
name = "monitoring"
}
}
# special settings for adjusting e.g. the pool name or defining another IP range
#provider_settings = {
# uri = "qemu:///system"
# pool = "username_disks"
# bridge = "br0"
# additional_network = "xxx.xxx.xxx.xxx/24"
#}
}
# This will generate the outputs on-screen and will store them in the terraform.tfstate file
output "configuration" {
value = module.cucumber_testsuite.configuration
}