Skip to content

Commit

Permalink
Implemented some operations on disk
Browse files Browse the repository at this point in the history
  • Loading branch information
MrMatAP committed Jan 2, 2024
1 parent aba245a commit f6ace34
Show file tree
Hide file tree
Showing 4 changed files with 119 additions and 22 deletions.
29 changes: 24 additions & 5 deletions src/kaso_mashin/common/applied/base_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@


class BinaryScale(enum.StrEnum):
KB = 'Kilobytes'
MB = 'Megabytes'
GB = 'Gigabytes'
TB = 'Terabytes'
k = 'Kilobytes'
M = 'Megabytes'
G = 'Gigabytes'
T = 'Terabytes'
P = 'Petabytes'
E = 'Exabytes'


class Base(DeclarativeBase): # pylint: disable=too-few-public-methods
Expand Down Expand Up @@ -184,4 +186,21 @@ class BinarySizedValue(ValueObject):
A sized binary value object
"""
value: int = dataclasses.field(default=0)
scale: BinaryScale = dataclasses.field(default=BinaryScale.GB)
scale: BinaryScale = dataclasses.field(default=BinaryScale.G)

def __str__(self):
return f'{self.value}{self.scale.name}'


class KMException(Exception):

def __init__(self, code: int, msg: str) -> None:
super().__init__()
self._code = code
self._msg = msg

def __str__(self) -> str:
return f'[{self._code}] {self._msg}'

def __repr__(self) -> str:
return f'{self.__class__.__name__}(code={self._code}, msg={self._msg})'
69 changes: 65 additions & 4 deletions src/kaso_mashin/common/applied/disks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,17 @@
from sqlalchemy.orm import Mapped, mapped_column

from .base_types import (
UniqueIdentifier, BinaryScale,
UniqueIdentifier, BinaryScale, KMException,
AggregateRoot, BinarySizedValue,
Base, AggregateRootModel, Repository, T_AggregateRoot, T_AggregateRootModel)
from .images import ImageEntity

import os
import subprocess


class DiskException(KMException):
pass


@dataclasses.dataclass
Expand All @@ -20,8 +28,61 @@ class DiskEntity(AggregateRoot):
name: str
path: pathlib.Path
id: UniqueIdentifier = dataclasses.field(default_factory=lambda: str(uuid.uuid4()))
size: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(value=5, scale=BinaryScale.GB))

size: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(value=5, scale=BinaryScale.G))

@staticmethod
def create(name: str, path: pathlib.Path, size: BinarySizedValue) -> 'DiskEntity':
if path.exists():
raise DiskException(code=400, msg=f'Disk at {path} already exists')
path.parent.mkdir(parents=True, exist_ok=True)
disk = DiskEntity(name=name, path=path, size=size)
try:
subprocess.run(['/opt/homebrew/bin/qemu-img',
'create',
'-f', 'raw',
disk.path,
str(size)],
check=True)
except subprocess.CalledProcessError as e:
raise DiskException(code=500, msg=f'Failed to create disk: {e.output}') from e
return disk

@staticmethod
def create_from_image(name: str, path: pathlib.Path, size: BinarySizedValue, image: ImageEntity):
if path.exists():
raise DiskException(code=400, msg=f'Disk at {path} already exists')
path.parent.mkdir(parents=True, exist_ok=True)
disk = DiskEntity(name=name, path=path, size=size)
try:
subprocess.run(['/opt/homebrew/bin/qemu-img',
'create',
'-f', 'qcow2',
'-F', 'qcow2',
'-b', image.path,
'-o', 'compat=v3',
disk.path,
str(size)],
check=True)
except subprocess.CalledProcessError as e:
raise DiskException(code=500, msg=f'Failed to create disk from image: {e.output}') from e
return disk

def resize(self, new_size: BinarySizedValue):
try:
subprocess.run(['/opt/homebrew/bin/qemu-img',
'resize',
self.path,
str(new_size)],
check=True)
self.size = new_size
except subprocess.CalledProcessError as e:
raise DiskException(code=500, msg=f'Failed to resize disk: {e.output}') from e

def remove(self):
if not self.path.exists():
return
os.unlink(self.path)


class DiskModel(AggregateRootModel, Base):
"""
Expand All @@ -31,7 +92,7 @@ class DiskModel(AggregateRootModel, Base):
name: Mapped[str] = mapped_column(String(64))
path: Mapped[str] = mapped_column(String())
size: Mapped[int] = mapped_column(Integer, default=0)
size_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.GB)
size_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.G)


class DiskRepository(Repository[DiskEntity, DiskModel]):
Expand Down
9 changes: 4 additions & 5 deletions src/kaso_mashin/common/applied/images.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import typing
import uuid
import dataclasses
import pathlib
Expand All @@ -22,8 +21,8 @@ class ImageEntity(AggregateRoot):
path: pathlib.Path
id: UniqueIdentifier = dataclasses.field(default_factory=lambda: str(uuid.uuid4()))
min_vcpu: int = dataclasses.field(default=0)
min_ram: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(0, BinaryScale.GB))
min_disk: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(0, BinaryScale.GB))
min_ram: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(0, BinaryScale.G))
min_disk: BinarySizedValue = dataclasses.field(default_factory=lambda: BinarySizedValue(0, BinaryScale.G))


class ImageModel(AggregateRootModel, Base):
Expand All @@ -35,9 +34,9 @@ class ImageModel(AggregateRootModel, Base):
path: Mapped[str] = mapped_column(String())
min_vcpu: Mapped[int] = mapped_column(Integer, default=0)
min_ram: Mapped[int] = mapped_column(Integer, default=0)
min_ram_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.GB)
min_ram_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.G)
min_disk: Mapped[int] = mapped_column(Integer, default=0)
min_disk_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.GB)
min_disk_scale: Mapped[str] = mapped_column(Enum(BinaryScale), default=BinaryScale.G)


class ImageRepository(Repository[ImageEntity, ImageModel]):
Expand Down
34 changes: 26 additions & 8 deletions tests/test_applied.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,38 +7,56 @@

def test_applied_disks(applied_session):
repo = DiskRepository(DiskModel, applied_session)
disk = DiskEntity(name="Test Disk",
path=pathlib.Path(__file__),
size=BinarySizedValue(value=1, scale=BinaryScale.GB))
disk = DiskEntity.create('Test Disk',
path=pathlib.Path(__file__).parent / 'build' / 'test.qcow2',
size=BinarySizedValue(1, BinaryScale.G))
try:
repo.create(disk)
loaded = repo.get_by_id(disk.id)
assert disk == loaded
disk.size = BinarySizedValue(2, scale=BinaryScale.GB)
disk.size = BinarySizedValue(2, scale=BinaryScale.G)
updated = repo.modify(disk)
assert disk == updated
listed = repo.list()
assert len(listed) == 1
assert disk == listed[0]
finally:
disk.remove()
repo.remove(disk.id)
assert repo.get_by_id(disk.id) is None
assert not disk.path.exists()


def test_applied_disks_from_image(applied_session):
repo = DiskRepository(DiskModel, applied_session)
image = ImageEntity(name='jammy', path=pathlib.Path('/Users/imfeldma/var/kaso/images/jammy.qcow2'))
disk = DiskEntity.create_from_image('Test Disk',
path=pathlib.Path(__file__).parent / 'build' / 'test.qcow2',
size=BinarySizedValue(1, BinaryScale.G),
image=image)
try:
repo.create(disk)
disk.resize(BinarySizedValue(2, BinaryScale.G))
repo.modify(disk)
finally:
disk.remove()
assert not disk.path.exists()


def test_applied_images(applied_session):
repo = ImageRepository(ImageModel, applied_session)
image = ImageEntity(name='Test Image',
path=pathlib.Path(__file__),
min_vcpu=2,
min_ram=BinarySizedValue(2, BinaryScale.GB),
min_disk=BinarySizedValue(1, BinaryScale.GB))
min_ram=BinarySizedValue(2, BinaryScale.G),
min_disk=BinarySizedValue(1, BinaryScale.G))
try:
repo.create(image)
loaded = repo.get_by_id(image.id)
assert image == loaded
image.min_vcpu = 99
image.min_ram = BinarySizedValue(99, BinaryScale.TB)
image.min_disk = BinarySizedValue(99, BinaryScale.TB)
image.min_ram = BinarySizedValue(99, BinaryScale.T)
image.min_disk = BinarySizedValue(99, BinaryScale.T)
updated = repo.modify(image)
assert image == updated
listed = repo.list()
Expand Down

0 comments on commit f6ace34

Please sign in to comment.