Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
2e686ef
Redoing layer installs so that if you are importing from an existing …
travisbcotton May 19, 2025
734d1ed
rename installer functions correctly
travisbcotton May 19, 2025
83b49e5
fix error where package manager is put before the container name
travisbcotton May 19, 2025
640bf91
rinse and repeat the previous step...
travisbcotton May 19, 2025
8e510e8
tell GPG add commands to use the container...
travisbcotton May 19, 2025
c0894c9
working through buildah run errors
travisbcotton May 19, 2025
d7a8c53
fixing buildah run command format
travisbcotton May 19, 2025
4ffbfda
add gpgcheck option
travisbcotton May 20, 2025
ede02a0
fix weird buildah formatting issue
travisbcotton May 20, 2025
23b9a2b
handle package group names with spaces properly
travisbcotton May 20, 2025
8f671b0
cosmetic changes...
travisbcotton May 21, 2025
0493506
fix gpg check flags order; set default value to False
travisbcotton May 21, 2025
761ba5b
actually set the default gpg check value to true...
travisbcotton May 21, 2025
a8bcddc
increase subuid/subgid
travisbcotton May 21, 2025
0bf166e
reconfig how container gets built and how the build is run inside the…
travisbcotton Jun 4, 2025
c2d6d91
make entrypoint executable
travisbcotton Jun 4, 2025
be6482b
Attempting additional permission adjustments for uidmap/gidmap
alexlovelltroy Jun 8, 2025
9679b34
Update entrypoint to expect running as normal user
alexlovelltroy Jun 8, 2025
23b8f85
simplify entrypoint for both root and non-root invocation
alexlovelltroy Jun 8, 2025
4840e8f
Revert "make entrypoint executable"
travisbcotton Jul 13, 2025
ddd5959
signoff working?
travisbcotton Jul 13, 2025
9283d72
Revert "Attempting additional permission adjustments for uidmap/gidmap"
travisbcotton Jul 13, 2025
ae92a56
Revert "simplify entrypoint for both root and non-root invocation"
travisbcotton Jul 13, 2025
2d960dc
Update dockerfiles/dnf/Dockerfile
travisbcotton Aug 8, 2025
282388e
update entrypoint to make subuid/gid rnage dynamoc
travisbcotton Aug 28, 2025
8f5fdfb
fix layer calling the correct copyfiles and runmd functions
travisbcotton Aug 28, 2025
982d6e7
removing import of unused Setting python library
travisbcotton Sep 17, 2025
5cabea7
removing uneeded args to install_packages layer.py
travisbcotton Sep 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,20 @@ The recommended and official way to run `image-build` is using the `ghcr.io/open
To build an image using the container, the config file needs to be mapped into the container, as well as the FUSE filesystem device:

```
podman run \
--rm \
podman run --rm \
--device /dev/fuse \
--userns keep-id:uid=1002,gid=1002 \
-v /path/to/config.yaml:/home/builder/config.yaml \
--network host \
--cap-add=SYS_ADMIN \
--cap-add=SETUID \
--cap-add=SETGID \
--security-opt seccomp=unconfined \
--security-opt label=disable \
--userns=keep-id \
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
--userns=keep-id \
--userns=keep-id:uid=1000,gid=1000 \

I've been using this to ensure my outside UID is mapped to builder so that builder can access things like 0600 files.

-v /opt/workdir/images/test-rocky-9.5.yaml:/home/builder/config.yaml \
ghcr.io/openchami/image-build:latest \
image-build --config config.yaml
image-build --config config.yaml --log-level DEBUG
```

If you are building EL9 images, use the `ghcr.io/openchami/image-build-el9:latest` image.
Expand Down
15 changes: 6 additions & 9 deletions dockerfiles/dnf/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,23 +21,20 @@ RUN pip3.11 install -r /requirements.txt
COPY src/ /usr/local/bin/
RUN chmod -R 0755 /usr/local/bin/

COPY entrypoint.sh /entrypoint.sh
RUN chmod 0755 /entrypoint.sh

# Allow non-root to run buildah commands
RUN setcap cap_setuid=ep "$(command -v newuidmap)" && \
setcap cap_setgid=ep "$(command -v newgidmap)" &&\
chmod 0755 "$(command -v newuidmap)" && \
chmod 0755 "$(command -v newgidmap)" && \
rpm --restore shadow-utils && \
echo "builder:2000:50000" > /etc/subuid && \
echo "builder:2000:50000" > /etc/subgid
rpm --restore shadow-utils

# Create local user for rootless image builds
RUN useradd --uid 1002 builder && \
RUN useradd --uid 1000 builder && \
chown -R builder /home/builder

# Make builder the default user when running container
USER builder
WORKDIR /home/builder

ENV BUILDAH_ISOLATION=chroot

ENTRYPOINT ["/usr/bin/buildah", "unshare"]
ENTRYPOINT ["/entrypoint.sh"]
14 changes: 5 additions & 9 deletions dockerfiles/dnf/Dockerfile.minimal
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ RUN microdnf install -y \
microdnf clean all

# Create local user for rootless image builds
RUN echo "builder:x:1002:1002::/home/builder:/bin/bash" >> /etc/passwd && \
echo "builder:x:1002:" >> /etc/group && \
RUN echo "builder:x:1000:1000::/home/builder:/bin/bash" >> /etc/passwd && \
echo "builder:x:1000:" >> /etc/group && \
mkdir -p /home/builder && \
chown -R 1002:1002 /home/builder
chown -R 1000:1000 /home/builder

# Add our custom scripts
COPY src/ /usr/local/bin/
Expand All @@ -28,20 +28,16 @@ RUN chmod -R 0755 /usr/local/bin/
RUN setcap cap_setuid=ep "$(command -v newuidmap)" && \
setcap cap_setgid=ep "$(command -v newgidmap)" && \
chmod 0755 "$(command -v newuidmap)" && \
chmod 0755 "$(command -v newgidmap)" && \
echo "builder:2000:50000" > /etc/subuid && \
echo "builder:2000:50000" > /etc/subgid
chmod 0755 "$(command -v newgidmap)"

# Set up environment variables
ENV BUILDAH_ISOLATION=chroot

# Switch to non-root user
USER builder
# Verify Python functionality
RUN python3.11 -m pip install --no-cache-dir --upgrade pip && \
python3.11 -m pip install --no-cache-dir PyYAML ansible==11.1.0 ansible-base ansible-bender boto3 dnspython requests jinja2_ansible_filters

WORKDIR /home/builder

# Default entrypoint
ENTRYPOINT ["/usr/bin/buildah", "unshare"]
ENTRYPOINT ["/entrypoint.sh"]
13 changes: 13 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash
set -euo pipefail

if [[ -w /etc/subuid && -w /etc/subgid ]]; then
echo "builder:1001:${USERNS_RANGE:-65536}" > /etc/subuid
echo "builder:1001:${USERNS_RANGE:-65536}" > /etc/subgid
fi

# Make sure builder owns its homedir (optional if baked into image)
chown -R builder /home/builder || true

# Run buildah directly
exec su builder -c "buildah unshare $*"
1 change: 1 addition & 0 deletions src/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def process_args(terminal_args, config_options):
processed_args['pkg_man'] = terminal_args.pkg_man or config_options.get('pkg_manager')
if not processed_args['pkg_man']:
raise ValueError("'pkg_man' required when 'layer_type' is base")
processed_args['gpgcheck'] = terminal_args.gpgcheck or config_options.get('gpgcheck', True)
elif processed_args['layer_type'] == "ansible":
processed_args['ansible_groups'] = terminal_args.group_list or config_options.get('groups', [])
processed_args['ansible_pb'] = terminal_args.pb or config_options.get('playbooks', [])
Expand Down
3 changes: 2 additions & 1 deletion src/image-build
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def main():
parser.add_argument('--config', type=str, required=True, help='Configuration file is required')
parser.add_argument('--repo', type=str, required=False)
parser.add_argument('--pkg-manager', dest="pkg_man", type=str, required=False)
parser.add_argument('--gpgcheck', dest="gpgcheck", type=bool, required=False)
parser.add_argument('--groups', dest='group_list', action='store', nargs='+', type=str, default=[], help='List of groups')
parser.add_argument('--vars', dest='vars', action='store', nargs='+', type=str, default=[], help='List of variables')
parser.add_argument('--pb', type=str)
Expand All @@ -56,7 +57,7 @@ def main():
level = getattr(logging, args['log_level'].upper(), 10)
logging.basicConfig(format='%(levelname)s - %(message)s',level=level)

print_args(args)
print_args(args)

except argparse.ArgumentError as e:
print(f"Argument error: {e}")
Expand Down
120 changes: 99 additions & 21 deletions src/installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
from utils import cmd

class Installer:
def __init__(self, pkg_man, cname, mname):
def __init__(self, pkg_man, cname, mname, gpgcheck=True):
self.pkg_man = pkg_man
self.cname = cname
self.mname = mname
self.gpgcheck = gpgcheck

# Create temporary directory for logs, cache, etc. for package manager
os.makedirs(os.path.join(mname, "tmp"), exist_ok=True)
Expand All @@ -21,7 +22,7 @@ def __init__(self, pkg_man, cname, mname):
# DNF complains if the log directory is not present
os.makedirs(os.path.join(self.tdir, "dnf/log"))

def install_repos(self, repos, repo_dest, proxy):
def install_scratch_repos(self, repos, repo_dest, proxy):
# check if there are repos passed for install
if len(repos) == 0:
logging.info("REPOS: no repos passed to install\n")
Expand Down Expand Up @@ -92,7 +93,7 @@ def install_repos(self, repos, repo_dest, proxy):
if rc != 0:
raise Exception("Failed to install gpg key for", r['alias'], "at URL", r['gpg'])

def install_base_packages(self, packages, registry_loc, proxy):
def install_scratch_packages(self, packages, registry_loc, proxy):
# check if there are packages to install
if len(packages) == 0:
logging.warn("PACKAGES: no packages passed to install\n")
Expand Down Expand Up @@ -134,19 +135,7 @@ def install_base_packages(self, packages, registry_loc, proxy):
if rc == 107:
logging.warn("one or more RPM postscripts failed to run")

def remove_base_packages(self, remove_packages):
# check if there are packages to remove
if len(remove_packages) == 0:
logging.warn("REMOVE PACKAGES: no package passed to remove\n")
return

logging.info(f"REMOVE PACKAGES: removing these packages from container {self.cname}")
logging.info("\n".join(remove_packages))
for p in remove_packages:
args = [self.cname, '--', 'rpm', '-e', '--nodeps', p]
cmd(["buildah","run"] + args)

def install_base_package_groups(self, package_groups, registry_loc, proxy):
def install_scratch_package_groups(self, package_groups, registry_loc, proxy):
# check if there are packages groups to install
if len(package_groups) == 0:
logging.warn("PACKAGE GROUPS: no package groups passed to install\n")
Expand Down Expand Up @@ -175,7 +164,7 @@ def install_base_package_groups(self, package_groups, registry_loc, proxy):
if rc == 104:
raise Exception("Installing base packages failed")

def install_base_modules(self, modules, registry_loc, proxy):
def install_scratch_modules(self, modules, registry_loc, proxy):
# check if there are modules groups to install
if len(modules) == 0:
logging.warn("PACKAGE MODULES: no modules passed to install\n")
Expand Down Expand Up @@ -205,8 +194,97 @@ def install_base_modules(self, modules, registry_loc, proxy):
if rc != 0:
raise Exception("Failed to run module cmd", mod_cmd, ' '.join(mod_list))

def install_repos(self, repos, proxy):
# check if there are repos passed for install
if len(repos) == 0:
logging.info("REPOS: no repos passed to install\n")
return

logging.info(f"REPOS: Installing these repos to {self.cname}")
for r in repos:
logging.info(r['alias'] + ': ' + r['url'])
if self.pkg_man == "zypper":
if 'priority' in r:
priority = r['priority']
else:
priority = 99
rargs = ' addrepo -f -p ' + priority + ' ' + r['url'] + ' ' + r['alias']
elif self.pkg_man == "dnf":
rargs = ' config-manager --save --add-repo ' + r['url']

args = [self.cname, '--', 'bash', '-c', self.pkg_man + rargs]
rc = cmd(["buildah","run"] + args)
if rc != 0:
raise Exception("Failed to install repo", r['alias'], r['url'])
# Set Proxy if using DNF
if proxy != "":
if r['url'].endswith('.repo'):
repo_name = r['url'].split('/')[-1].split('.repo')[0] + "*"
elif r['url'].startswith('https'):
repo_name = r['url'].split('https://')[1].replace('/','_')
elif r['url'].startswith('http'):
repo_name = r['url'].split('http://')[1].replace('/','_')
pargs = ' config-manager --save --setopt=*.proxy= ' + proxy + ' ' + repo_name

args = [self.cname, '--', 'bash', '-c', self.pkg_man + pargs]
rc = cmd(["buildah","run"] + args)
if rc != 0:
raise Exception("Failed to set proxy for repo", r['alias'], r['url'], proxy)

if "gpg" in r:
# Using rpm apparently works for both Yum- and Zypper-based distros.
gargs = [self.cname, '--', 'bash', '-c', 'rpm --import ' + r['gpg']]
if proxy != "":
arg_env = os.environ.copy()
arg_env['https_proxy'] = proxy
rc = cmd(["buildah","run"] + gargs)
if rc != 0:
raise Exception("Failed to install gpg key for", r['alias'], "at URL", r['gpg'])

def install_packages(self, packages):
if len(packages) == 0:
logging.warn("PACKAGE GROUPS: no package groups passed to install\n")
return
logging.info(f"PACKAGES: Installing these packages to {self.cname}")
logging.info("\n".join(packages))
args = [self.cname, '--', 'bash', '-c']
pkg_cmd = [self.pkg_man]
if self.gpgcheck is not True:
if self.pkg_man == 'dnf':
pkg_cmd.append('--nogpgcheck')
elif self.pkg_man == 'zypper':
pkg_cmd.append('--no-gpg-checks')
args.append(" ".join(pkg_cmd + [ 'install', '-y'] + packages))
cmd(["buildah","run"] + args)

def install_package_groups(self, package_groups):
if len(package_groups) == 0:
logging.warn("PACKAGE GROUPS: no package groups passed to install\n")
return
logging.info(f"PACKAGES: Installing these package groups to {self.cname}")
logging.info("\n".join(package_groups))
args = [self.cname, '--', 'bash', '-c']
pkg_cmd = [self.pkg_man, 'groupinstall', '-y']
if self.pkg_man == "zypper":
logging.warn("zypper does not support package groups")
if self.gpgcheck is not True:
pkg_cmd.append('--nogpgcheck')
args.append(" ".join(pkg_cmd + [f'"{pg}"' for pg in package_groups]))
cmd(["buildah","run"] + args)

def remove_packages(self, remove_packages):
# check if there are packages to remove
if len(remove_packages) == 0:
logging.warn("REMOVE PACKAGES: no package passed to remove\n")
return

logging.info(f"REMOVE PACKAGES: removing these packages from container {self.cname}")
logging.info("\n".join(remove_packages))
for p in remove_packages:
args = [self.cname, '--', 'rpm', '-e', '--nodeps', p]
cmd(["buildah","run"] + args)

def install_base_commands(self, commands):
def install_commands(self, commands):
# check if there are commands to install
if len(commands) == 0:
logging.warn("COMMANDS: no commands passed to run\n")
Expand All @@ -228,9 +306,9 @@ def install_base_commands(self, commands):
loglevel = logging.error
else:
loglevel = logging.error
out = cmd(build_cmd + args, stderr_handler=loglevel)
cmd(["buildah","run"] + args, stderr_handler=loglevel)

def install_base_copyfiles(self, copyfiles):
def install_copyfiles(self, copyfiles):
if len(copyfiles) == 0:
logging.warn("COPYFILES: no files to copy\n")
return
Expand All @@ -242,4 +320,4 @@ def install_base_copyfiles(self, copyfiles):
args.extend(o.split())
logging.info(f['src'] + ' -> ' + f['dest'])
args += [ self.cname, f['src'], f['dest'] ]
out=cmd(["buildah","copy"] + args)
cmd(["buildah","copy"] + args)
Loading