This commit is contained in:
Holger Sielaff
2025-08-27 09:55:55 +02:00
commit 90c0ff61ed
107 changed files with 8535 additions and 0 deletions

24
lib/__init__.py Normal file
View File

@@ -0,0 +1,24 @@
def ip_in_net(ip, network):
ipaddr = int(''.join(['%02x' % int(x) for x in ip.split('.')]), 16)
netaddr, bits = network.split('/')
netmask = int(''.join(['%02x' % int(x) for x in netaddr.split('.')]), 16)
mask = (0xffffffff << (32 - int(bits))) & 0xffffffff
return (ipaddr & mask) == (netmask & mask)
def human_size(num, suffix=''):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f %s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
class FactoryMixin:
_instance = None
@classmethod
def factory(cls, *args, **kwargs):
if not cls._instance:
cls._instance = cls(*args, **kwargs)
return cls._instance

95
lib/db.py Normal file
View File

@@ -0,0 +1,95 @@
import logging
from django.db import models
from django.db.models.query import QuerySet
from django.forms import model_to_dict
BOOLEAN_CHOICES = (
(False, 'No'),
(True, 'Yes'),
)
BOOLEAN_CHOICES_CHAR = (
('false', 'No'),
('true', 'Yes'),
)
JOB_STATUS_CHOICES = (
('pending', 'Pending'),
('running', 'Running'),
('success', 'Success'),
('error', 'Error'),
)
class TaskAwareQuerySet(QuerySet):
def delete(self, task=None):
try:
if task:
task.add_entry(f'Deleting {self.model.__name__}s via Queryset')
except Exception as e:
logging.error(f'Failed to add task entry for {self.model.__name__}s: {e}')
finally:
return super().delete()
class TaskAwareModelMixin(models.Model):
class Meta:
abstract = True
objects = TaskAwareQuerySet.as_manager()
def delete(self, task=None, using=None, keep_parents=False):
try:
if task:
task.add_entry(f'Deleting {self.__class__.__name__} {self}')
except Exception as e:
logging.error(f'Failed to add task entry for {self.__class__.__name__} {self}: {e}')
finally:
super().delete(using=using, keep_parents=keep_parents)
class DateAwareMixin(models.Model):
class Meta:
abstract = True
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
class BaseModel(DateAwareMixin):
_old_values = {}
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._old_values = self.to_json
internal_id = models.BigAutoField(primary_key=True)
@property
def to_json(self):
return model_to_dict(self)
def write(self, throw_on_error=False, **kwargs):
logging.debug(f'Writing {self} to DB with {kwargs}')
for field, value in kwargs.items():
if hasattr(self, field):
logging.debug(f'Setting {field} to {value} for {self}')
setattr(self, field, value)
else:
if throw_on_error:
raise AttributeError(f'Could not find {field} in {self.__class__.__name__}')
logging.warning(f'Could not find {field} in {self.__class__.__name__}')
if not self._state.adding:
self.save(update_fields=kwargs.keys())
else:
logging.warning(f'Trying to write {self} to DB - object is not yet saved')
return self
class SearchableMixin:
@classmethod
def term_filter(cls, search_string):
raise NotImplemented('"{cls.__class__.__name__}.search_by_term" not implemented')
@classmethod
def term_search(cls, search_string):
if search_string:
return cls.objects.filter(cls.term_filter(search_string))
return cls.objects.all()

83
lib/decorators.py Normal file
View File

@@ -0,0 +1,83 @@
import logging
from functools import wraps
from django.db.models import Model
from django_proxmox_mikrotik.configs import MIKROTIK_READONLY, MikrotikConfig, PROXMOX_READONLY, ProxmoxConfig
def readonly(func):
"""Decorator to temporarily enable READONLY for both Proxmox and Mikrotik"""
@wraps(func)
def wrapper(*args, **kwargs):
pm_initial = ProxmoxConfig.READONLY
mk_initial = MikrotikConfig.READONLY
ProxmoxConfig.READONLY = True
MikrotikConfig.READONLY = True
logging.debug(f"READONLY: Setting ProxmoxConfig.READONLY from {pm_initial} to {ProxmoxConfig.READONLY} "
f"and MikrotikConfig.READONLY from {mk_initial} to {MikrotikConfig.READONLY} for {func.__name__}")
try:
return func(*args, **kwargs)
finally:
logging.debug(f"READONLY: Resetting ProxmoxConfig.READONLY from {pm_initial} to {ProxmoxConfig.READONLY} "
f"and MikrotikConfig.READONLY from {mk_initial} to {MikrotikConfig.READONLY} for {func.__name__}")
ProxmoxConfig.READONLY = PROXMOX_READONLY
MikrotikConfig.READONLY = MIKROTIK_READONLY
return wrapper
def force_write(func):
"""Decorator to temporarily disable READONLY for both Proxmox and Mikrotik"""
@wraps(func)
def wrapper(*args, **kwargs):
pm_initial = ProxmoxConfig.READONLY
mk_initial = MikrotikConfig.READONLY
ProxmoxConfig.READONLY = False
MikrotikConfig.READONLY = False
logging.debug(f"FORCE WRITE: Setting ProxmoxConfig.READONLY from {pm_initial} to {ProxmoxConfig.READONLY} "
f"and MikrotikConfig.READONLY from {mk_initial} to {MikrotikConfig.READONLY} for {func.__name__}")
try:
return func(*args, **kwargs)
finally:
logging.debug(f"FORCE WRITE: Resetting ProxmoxConfig.READONLY from {pm_initial} to {ProxmoxConfig.READONLY} "
f"and MikrotikConfig.READONLY from {mk_initial} to {MikrotikConfig.READONLY} for {func.__name__}")
ProxmoxConfig.READONLY = PROXMOX_READONLY
MikrotikConfig.READONLY = MIKROTIK_READONLY
return wrapper
def skip_signal(signaltype='post_save', **kwargs):
"""This should be used as decorator for signal handlers to prevent recursion.
Mostly used for post_save and post_delete.
The signaltype is just for logging."""
def _decorator(signal_handler):
@wraps(signal_handler)
def _wrapper(sender, instance: Model, **kwargs):
if getattr(instance, '_skip_signal', False):
logging.debug(
f'Skip signal handler for {signaltype} : {signal_handler.__name__} - {sender.__name__} - {instance.__class__.__name__}')
return
instance._skip_signal = True
try:
return signal_handler(sender, instance, **kwargs)
finally:
try:
del instance._skip_signal
except AttributeError:
logging.debug(
f'{instance.__class__.__name__} instance has no attribute "_skip_signal" - could not delete it.')
except Exception as e:
logging.exception('WTF????', str(e))
return _wrapper
return _decorator

94
lib/ldap.py Normal file
View File

@@ -0,0 +1,94 @@
import logging
import re
import ldap
from django.contrib.auth.models import Group, User
from django_proxmox_mikrotik.configs import AuthLDAPConfig
groupmember_re = re.compile('^uid=([^,]+),')
class Ldap:
possible_groups = ['root', 'intern', 'extern']
def __init__(self):
self.initialize()
def initialize(self):
self.conn = ldap.initialize(AuthLDAPConfig.HOST)
self.conn.simple_bind_s(AuthLDAPConfig.BIND_DN, AuthLDAPConfig.BIND_PASSWORD)
def __enter__(self):
self.initialize()
return self
def __exit__(self, *args):
self.conn.unbind_s()
def search(self, base, filterstr='(objectClass=*)', attrlist=None):
logging.debug(f'LDAP to {base} with filter {filterstr} - {attrlist if attrlist else "all attributes"}')
return self.conn.search_s(base, ldap.SCOPE_SUBTREE, filterstr, attrlist)
def __getattr__(self, item):
if hasattr(ldap, item):
return getattr(ldap, item)
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{item}'")
def get_groups(self, filterstr='(objectClass=*)', attrlist=None):
return self.search(AuthLDAPConfig.GROUP_SEARCH_BASE, filterstr, attrlist)
def get_users(self, filterstr='(objectClass=inetOrgPerson', attrlist=None):
return self.search(AuthLDAPConfig.USER_BASE, filterstr, attrlist)
def get_user(self, username):
if userdata := self.get_users(f'(uid={username})'):
return {k: (v[0].decode('utf-8') if v else None) for k, v in userdata[0][1].items()}
return {}
def get_user_groups(self, username, attrlist=None):
filterstr = (f'(&'
f'(objectClass=groupOfNames)'
f'(member=uid={username},{AuthLDAPConfig.USER_BASE})'
f')')
grps = self.get_groups(filterstr, attrlist)
return [data.get('ou')[0].decode('utf-8') for dn, data in grps]
def get_group_members(self, groupname, attrlist=None):
if found := self.search(f'ou={groupname},{AuthLDAPConfig.GROUP_SEARCH_BASE}',
'(objectClass=groupOfNames)', attrlist=['member']):
return [groupmember_re.sub(r'\1', m.decode('utf-8')) for m in found[0][1].get('member')]
return []
def create_initial_groups(self):
return [
Group.objects.get_or_create(name=name)[0] for name in self.possible_groups
]
def set_user_groups(self, userinstance: User, save_instance=True):
"""This does NOT save the user instance!"""
root_group, intern_group, extern_group = self.create_initial_groups()
try:
ldap_user = self.get_user(userinstance.username)
if ldap_user:
logging.debug(f'LDAP-User found: {ldap_user}')
groups = self.get_user_groups(userinstance.username)
if 'root' in groups and (userinstance.is_superuser is False or userinstance.is_staff is False):
logging.debug(f'LDAP-User is root: {ldap_user}')
userinstance.groups.add(root_group)
userinstance.is_superuser = True
userinstance.is_staff = True
elif 'intern' in groups and userinstance.is_staff is False:
logging.debug(f'LDAP-User is intern: {ldap_user}')
userinstance.groups.add(intern_group)
userinstance.is_staff = True
elif 'extern' in groups:
logging.debug(f'LDAP-User is extern: {ldap_user}')
userinstance.groups.add(extern_group)
else:
raise Exception(f'LDAP-User not found: {userinstance.username}')
except Exception as e:
logging.error(f"LDAP-Fehler: {e}")
raise e
if save_instance:
userinstance.save()

13
lib/messages.py Normal file
View File

@@ -0,0 +1,13 @@
from django_middleware_global_request import get_request
from django.contrib import messages
import logging
def __getattr__(name):
def wrapper(*args, **kwargs):
request = get_request()
getattr(logging, name, logging.info)(*args)
return getattr(messages, name)(request, *args, **kwargs)
if hasattr(messages, name):
return wrapper
raise AttributeError(f"'{__name__}' object has no attribute '{name}'")

379
lib/mikrotik.py Normal file
View File

@@ -0,0 +1,379 @@
import collections
import logging
from functools import cached_property
import routeros_api
from django.db import models
from django.db.utils import IntegrityError
from django.forms import model_to_dict
from django.forms.models import ValidationError
from routeros_api.api_structure import StringField
from routeros_api.exceptions import RouterOsApiCommunicationError
from django_proxmox_mikrotik.settings import MikrotikConfig
from lib import FactoryMixin
from lib.db import BOOLEAN_CHOICES_CHAR
from lib.router_abstract import RoutedModelAbstract, RouterAbstract
ip_8 = MikrotikConfig.IP_8
_logger = logging.getLogger(__name__)
def is_local_ip(ip):
return ip[:3] in ip_8
class MikrotikApi(FactoryMixin):
def __init__(self):
# Create fresh connection for each instance - no shared state
self.connection = None
self.api = None
self._connect()
def _connect(self):
"""Create a new connection and API instance"""
try:
self.connection = routeros_api.RouterOsApiPool(
MikrotikConfig.HOST,
username=MikrotikConfig.USER,
password=MikrotikConfig.PASS,
port=8728,
plaintext_login=True,
use_ssl=False,
ssl_verify=True,
ssl_verify_hostname=True,
ssl_context=None,
)
self.api = self.connection.get_api()
except Exception as e:
_logger.error(f"Failed to create Mikrotik connection: {e}")
self.connection = None
self.api = None
raise
def disconnect(self):
"""Safely disconnect the connection"""
if self.connection:
try:
self.connection.disconnect()
except (OSError, AttributeError, BrokenPipeError) as e:
_logger.debug(f"Error during disconnect (expected): {e}")
finally:
self.connection = None
self.api = None
@property
def _default_structure(self):
return collections.defaultdict(lambda: StringField(encoding='windows-1250'))
def resource(self, route):
if not self.api:
raise ConnectionError("No active Mikrotik connection")
return self.api.get_resource(route, self._default_structure)
class Mikrotik(RouterAbstract):
_instances = {}
class _resource:
def __init__(self, route):
self._route = route
def __getattr__(self, item):
"""Dynamic method creation for RouterOS API calls"""
def method_wrapper(*args, **kwargs):
api = None
max_retries = 3
for attempt in range(max_retries):
try:
api = MikrotikApi()
resource = api.resource(self._route)
method = getattr(resource, item)
result = method(*args, **kwargs)
return result
except (OSError, BrokenPipeError, ConnectionError, AttributeError) as e:
_logger.warning(f"Connection error in _resource.{item}() attempt {attempt + 1}/{max_retries}: {e}")
if attempt == max_retries - 1:
raise
finally:
if api:
api.disconnect()
return method_wrapper
""""
def call(self, *args, **kwargs):
api = MikrotikApi()
try:
resource = api.resource(self._route)
return resource.call(*args, **kwargs)
finally:
api.connection.disconnect()
"""
def __init__(self, route):
self._route = route
@staticmethod
def pool(route):
if isinstance(route, MikrotikModelMixin):
route = route.router_base
return Mikrotik._instances.setdefault(route, Mikrotik(route))
def initialize(self, route):
if isinstance(route, MikrotikModelMixin):
self._route = route.router_base
else:
self._route = route
Mikrotik._instances.setdefault(route, self)
def get(self, **kwargs):
mikrotik_kwargs = {}
additional_queries = []
for k in list(kwargs.keys()):
if '__' in k:
v = kwargs.pop(k)
field, lookup = k.split('__', 1)
if lookup == 'startswith':
# Mikrotik verwendet ~"^pattern" für startswith
mikrotik_kwargs[field] = f'^{v}'
elif lookup == 'contains':
# Mikrotik verwendet ~"pattern" für contains
mikrotik_kwargs[field] = f'{v}'
elif lookup == 'endswith':
# Mikrotik verwendet ~"pattern$" für endswith
mikrotik_kwargs[field] = f'{v}$'
else:
# Unbekannter Lookup-Typ, behalte den ursprünglichen Wert bei
kwargs[k] = v
_logger.debug(f'Getting {self._route} with transformed kwargs: {mikrotik_kwargs}')
for field, pattern in mikrotik_kwargs.items():
additional_queries.append(f'{field}~"{pattern}"')
logging.info(f'Getting {self._route}/print with kwargs: {kwargs} and additional queries: {additional_queries}')
return self._resource(self._route).call('print', queries=kwargs, additional_queries=additional_queries)
def set(self, **kwargs):
assert 'id' in kwargs, "id must be set"
if MikrotikConfig.READONLY:
_logger.warning(f'Trying to set {self._route} to {kwargs} on read-only router')
return True # Simulate success in readonly mode
else:
return self._resource(self._route).set(**kwargs)
def add(self, **kwargs):
kwargs.pop('id', None)
if MikrotikConfig.READONLY:
_logger.warning(f'Trying to add {self._route} to {kwargs} on read-only router')
return '*READONLY' # Simulate success with fake ID in readonly mode
else:
return self._resource(self._route).add(**kwargs)
def remove(self, **kwargs):
assert 'id' in kwargs, "id must be set"
if MikrotikConfig.READONLY:
_logger.warning(f'Trying to remove {self._route} with {kwargs} on read-only router')
return True # Simulate success in readonly mode
else:
return self._resource(self._route).remove(id=kwargs['id'])
class MikrotikModelMixin(RoutedModelAbstract):
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._old_values = self.to_json
internal_id = models.BigAutoField(primary_key=True)
disabled = models.CharField(max_length=10, null=True, blank=True, default='false', choices=BOOLEAN_CHOICES_CHAR)
comment = models.TextField(null=True, blank=True, default='')
"""
Those we need for configuration
"""
@property
def router_base(self):
raise NotImplemented('Not implemented')
@property
def router_object_without_id(self):
_logger.warning(f'Deprecated - use router_object instead of router_object_without_id for {self}')
return self.router_object()
"""
Common stuff - may be overwritten
"""
@property
def no_mikrotik_props(self):
"""Props, that are not send to router
"""
return ['internal_id', 'dynamic']
@property
def router(self):
return Mikrotik.pool(self)
@classmethod
def class_props(cls):
return [p for p in model_to_dict(cls()).keys() if p not in ('internal_id', 'dynamic')]
@classmethod
def get_all_as_object(cls):
all = cls().router_get_all
print(all, type(all))
return [cls.from_dict(**r) for r in all]
@classmethod
def translate_keys(cls, **kwargs):
return {k.replace('-', '_'): v for k, v in kwargs.items()}
def router_object(self, translate_keys=True):
if self.id:
args = {'id': self.id}
else:
args = {}
for k in self.unique_on_router:
# here, we take the first one set
if isinstance(k, (tuple, list)):
for k2 in k:
if v2 := getattr(self, k2, None):
args[k2] = v2
break
else:
if v := getattr(self, k, None):
args[k] = v
if not args:
raise ValueError(f"Empty args to get info from router for {self}")
data = self.router.get(**args)
if data:
return self.translate_keys(**data[0]) if translate_keys else data[0]
return None
def sync_from_router(self, data=None):
if data := data or self.router_object():
_logger.debug(f'Syncing {self} from router with {data}')
self.assign(**data)
else:
_logger.debug(f'Could not sync {self} from router')
return self
def assign(self, **kwargs):
updatefields = []
for k, v in kwargs.items():
if hasattr(self, k):
if v != getattr(self, k):
updatefields.append(k)
setattr(self, k, v)
return self
def sync_all_from_router(self):
for obj in self.router_get_all:
self.from_dict(**obj)
def delete_from_router(self):
if self.id:
return self.router.remove(id=self.id)
return True
@classmethod
def from_dict(cls, **kwargs):
self_props = cls.class_props()
args = {}
for k, v in cls.translate_keys(**kwargs).items():
if k not in self_props:
_logger.warning(f'Unknown property {k} for {cls.__class__.__name__}')
else:
args[k] = v
try:
obj = cls.objects.get(id=args['id'])
_logger.debug(f'Found {obj} from {kwargs}')
except cls.DoesNotExist:
obj = cls.objects.create(**args)
_logger.debug(f'Created {obj} from {kwargs}')
except Exception as e:
_logger.error(f'Could not create {cls.__class__.__name__} from {kwargs} - {e}')
raise e
return obj
@property
def mikrotik_send_params(self):
return {k: v for k, v in self.to_json.items() if k not in self.no_mikrotik_props}
def sync_to_router(self, created=False):
data = self.mikrotik_send_params
_logger.debug(f'Syncing {self.__dict__}')
if self.id:
_logger.debug(f'Syncing {self} to router with {data}')
return self.router_set(**data)
_logger.debug(f'Adding {self} to router with {data}')
return self.router_add(**data)
@cached_property
def router_get_all(self):
return self.router.get()
def router_get(self, **kwargs):
response = self.router.get(**kwargs)
_logger.debug(f'Got {self} from router with {response}')
return response
def router_set(self, **kwargs):
kwargs['id'] = self.id
response = self.router.set(**kwargs)
_logger.debug(f'Set {self} to router with {response}')
return response
def router_add(self, **kwargs):
if self.id:
_logger.warning(f'Trying to add {self} to router - already has id {self.id}')
return True
kwargs.pop('id', None)
try:
response = self.router.add(**kwargs)
except RouterOsApiCommunicationError as e:
_logger.error(f'Could not add {self} to router - {e}')
routerdata = self.router_object()
if routerdata:
return self.sync_from_router(data=routerdata)
raise ValidationError(f'Could not add {self} to router - {e}')
try:
new_on_router = self.router_object()
_logger.debug(f'Got {new_on_router} from router')
self.id = new_on_router['id']
_logger.debug(f'Added {self} to router with {response}')
self.save()
except (IndexError, KeyError, NotImplementedError) as e:
_logger.info(f'Could not set id for {self} - git no id {e}')
return response
def sync_from_mikrotik(classname):
_st = classname()
for i in _st.router_get_all:
i = {k.replace('-', '_'): v for k, v in i.items()}
try:
existing = classname.objects.get(id=i['id'])
for k, v in i.items():
if hasattr(existing, k):
_logger.debug(f'Updating {k} to {v} - {existing}')
setattr(existing, k, v)
existing.save()
except classname.DoesNotExist:
_logger.info(f'Creating {i["id"]}')
try:
classname.objects.create(**i)
except IntegrityError as e:
_logger.error(f'Could not create {i["id"]}, already exists')
_logger.exception(e)

214
lib/proxmox.py Normal file
View File

@@ -0,0 +1,214 @@
import json
import logging
import time
import proxmoxer
from django_proxmox_mikrotik.settings import ProxmoxConfig
def get_comma_separated_values(value):
_vlist = [v.strip().split('=', 1) for v in value.split(',') if '=' in v] if value else []
return {k: v for k, v in _vlist}
class PMDict(dict):
def __getattr__(self, name):
if name in self:
return self[name]
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
class PMCollection(list):
def append(self, item):
assert isinstance(item, dict)
if not isinstance(item, PMDict):
item = PMDict(**item)
super().append(item)
def __getattr__(self, item):
if self and hasattr(self[0], item):
for i in self:
yield getattr(i, item)
raise AttributeError(f"'{self.__class__.__name__}' object (or its content[0]) has no attribute '{item}'")
class Proxmox:
def __init__(self, node=ProxmoxConfig.NODE):
self.initialize(node)
def initialize(self, node=ProxmoxConfig.NODE, ):
self.api = proxmoxer.ProxmoxAPI(
ProxmoxConfig.HOST,
user=ProxmoxConfig.USER,
password=ProxmoxConfig.PASS,
verify_ssl=False
)
self.nodepath = f'nodes/{node}'
return self
def __enter__(self):
return self.initialize()
def __exit__(self, *args):
"""Actually, this is a no-op, just for the with statement :)"""
return
def nodes(self, route=''):
return self.api(f'{self.nodepath}/{route.lstrip("/")}')
def lxc(self, route=''):
return self.nodes(f'lxc/{route.lstrip("/")}')
def storage(self, route=''):
return self.nodes(f'storage/{route.lstrip("/")}')
def qemu(self, route=''):
return self.nodes(f'qemu/{route.lstrip("/")}')
def cluster(self, route=''):
return self.api(f'cluster/{route.lstrip("/")}')
@property
def next_vmid(self):
return int(self.cluster_get('nextid'))
def __getattr__(self, name):
"""This makes a 'magic' trick
We can call the proxmox api like this:
* proxmox.lxc_115_config_get()
* proxmox.lxc_115_get('config')
* proxmox.lxc_get('115/config')
* proxmox.lxc('115/config').get()
* ...
seems handy at the moment ...
The first in *args will always be taken as route!
"""
if "_" in name:
nameparts = name.split("_")
action = nameparts.pop()
if action not in ["get", "post", "put", "delete"]:
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}' - 1")
if hasattr(self, nameparts[0]):
base_method = getattr(self, nameparts.pop(0))
else:
base_method = self.nodes
route_part = "/".join(nameparts)
def wrapper(*args, **kwargs):
if args:
route = str(args[0]).rstrip('/')
args = args[1:]
else:
route = ''
if route_part:
route = f'{route_part}/{route}'
if ProxmoxConfig.READONLY and action != 'get':
logging.warning(f'PROXMOX_READONLY is set - not calling {route} '
f'with method {base_method.__name__}.{action}'
f'({args}, {kwargs})')
# Return appropriate mock response based on action
if action == 'post':
return 'UPID:READONLY:00000000:00000000:00000000:vzcreate:readonly:root@pam:'
elif action == 'put':
return None
elif action == 'delete':
return None
return {}
logging.debug(f'Calling {base_method.__name__}.{action}({route}, {args}, {kwargs})')
return getattr(base_method(route), action)(*args, **kwargs)
return wrapper
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
@classmethod
def get_task_status(cls, taskhash, sleeptime=10, pm=None):
cls = pm or cls()
logging.debug(f"Getting task status for {taskhash}")
maxtime = ProxmoxConfig.CREATE_LXC_TIMEOUT
response = cls.nodes_get(f'tasks/{taskhash}/status')
logging.debug(f"Status response for {taskhash}: {response}")
while True:
response = cls.nodes_get(f'tasks/{taskhash}/status')
logging.debug(f"Status response for {taskhash}: {response}")
yield response
logging.debug(f"Status response for {taskhash}: {response}")
status = response['status']
if status == 'stopped':
if not response.get('exitstatus') == 'OK':
raise ValueError(f"Exitstatus is {response.get('exitstatus')}")
break
time.sleep(sleeptime)
maxtime -= sleeptime
if maxtime <= 0:
raise TimeoutError("Took to long")
def get_all_lxc(self, *filterargs, as_dict=True, **filterkwargs):
logging.debug(f"Getting all LXC with filter {filterargs} and {filterkwargs}")
from proxmox.models import Lxc
lxc_filter = {}
_raw = self.lxc_get()
logging.debug(f"All LXC: {_raw}")
if not _raw:
return []
all = _raw
comps = {}
for key in list(_raw[0].keys()):
if key in filterkwargs:
comps[key] = filterkwargs.pop(key)
for key, comp in comps.items():
all = filter(lambda x: (
logging.debug(f'{key} of lxc is {x[key]}, must be {comp}' if key in x else f"{key} not in {x}")
or key not in x
or x[key] == comp
), all)
if not all:
logging.debug(f"No LXC found with filter {filterargs} and {filterkwargs}")
logging.debug(f"All LXC: {json.dumps(all, indent=2, default=str)}")
return []
if filterargs:
for prop, c, v in filterargs:
invert = False
if c.startswith('!'):
invert = True
if c.endswith('='):
comparer = lambda x: x != v if invert else x == v
elif c.endswith('in'):
comparer = lambda x: x not in v if invert else x in v
elif c.endswith('startswith'):
comparer = lambda x: v.startswith(v) if invert else v.startswith(v)
elif c.endswith('endswith'):
comparer = lambda x: v.endswith(v) if invert else v.endswith(v)
lxc_filter[prop] = comparer
if filterkwargs:
for k, v in filterkwargs.items():
lxc_filter[k] = lambda x: x == v
def filter_out(lxc_):
if not lxc_filter:
return True
for prop, comparer in lxc_filter.items():
if not prop in lxc_:
continue
if not comparer(lxc_.get(prop)):
logging.debug(f"Filter out {lxc_} because {prop} is {lxc_.get(prop)}")
return False
return True
ret = []
for lxc in filter(filter_out, all):
lxc_config = self.lxc_get(f'{lxc["vmid"]}/config')
_lx_data = lxc | lxc_config
if as_dict:
ret.append(PMDict(**_lx_data))
# yield PMDict(**_lx_data)
else:
ret.append(Lxc().from_proxmox(**_lx_data))
# yield Lxc().from_proxmox(**_lx_data)
if not ret:
logging.warning(f"Found no LXC with filter {filterargs} and {filterkwargs}")
return ret

342
lib/router_abstract.py Normal file
View File

@@ -0,0 +1,342 @@
import json
import logging
from copy import copy
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.forms import model_to_dict
from lib import FactoryMixin
class RouterObjectCollection(set, FactoryMixin):
_objectclass = None
def _check_member_class(self, member):
if self._objectclass:
if not isinstance(member, self._objectclass):
raise ValueError(
f'Only {self._objectclass.__class__.__name__} can be added to {self.__class__.__name__}')
else:
if not isinstance(member, RouterObjectAbstract):
raise ValueError(
f'Only {RouterObjectAbstract.__class__.__name__} can be added to {self.__class__.__name__}')
self._objectclass = member.__class__
def add(self, *args, **kwargs):
"""Override add() to check if object is of correct type
we need a __hash__() function for this"""
self._check_member_class(args[0])
return super().add(*args, **kwargs)
def filter(self, *args, **kwargs):
"""This returns a new object filtered by some criteria
"""
_new = self.__class__()
for obj in self:
if obj.filtered(*args, **kwargs):
_new.add(obj)
return _new
def all(self):
"""This returns all objects - a copy"""
return self.copy()
def first(self, *args, **kwargs):
"""This returns the first object
from a copy of self"""
if args or kwargs:
objects = self.filter(*args, **kwargs)
else:
objects = self.all()
return objects[0] if objects else None
def remove(self, *args, **kwargs):
"""Override remove() to not throw KeyError (like discard())"""
try:
super().remove(*args, **kwargs)
except KeyError:
pass
return self
def values_list(self, keyset: list, flat=False):
if flat:
_ret = []
else:
_ret = self.__class__()
for obj in self:
new_obj = obj.__class__(**{k: v for k, v in copy(obj).items() if k in keyset})
if flat:
valuesdict = {}
for k, v in new_obj.items():
valuesdict.setdefault(k, []).append(v)
return list(valuesdict.items())
else:
_ret.add(new_obj)
return _ret
class RoutedModelAbstract(models.Model):
class Meta:
abstract = True
internal_id = models.BigAutoField(primary_key=True)
@property
def router_object(self):
return self._router_object
@router_object.setter
def router_object(self, value):
self._router_object = value
def __init__(self, *args, **kwargs):
self._router_object = kwargs.pop('router_object', None)
super().__init__(*args, **kwargs)
self._old_values = self.to_json
@property
def to_json(self):
return model_to_dict(self)
@property
def unique_on_router(self):
raise NotImplemented(f'{self.__class__.__name__} must implement unique_on_router')
class RouterObjectAbstract(dict, FactoryMixin):
_router_instance = None
_model_class = None
_model_instance = None
def __init__(self, router=None, model=None, **kwargs):
if router:
self.set_router(router)
if model:
self.set_model(model)
super().__init__(**kwargs)
class DoesNotExistsOnRouter(ObjectDoesNotExist):
pass
@property
def model_class(self):
return self._model_class
@property
def get_from_db(self):
raise NotImplemented(f"{self.__class__.__name__} must implement get_from_db")
@model_class.setter
def model_class(self, model):
if isinstance(model, RoutedModelAbstract):
self._model_class = model.__class__
elif issubclass(model, RoutedModelAbstract):
self._model_class = model
else:
raise ValueError(
f"model must be of type {RoutedModelAbstract.__class__.__name__}, not {model.__class__.__name__}")
@property
def model_instance(self):
return self._model_instance
@model_instance.setter
def model_instance(self, model):
if self._model_class and not issubclass(model.__class__, self._model_class):
raise ValueError(
f'Model {model.__class__.__name__} must be of type {self._model_class.__class__.__name__}')
if not isinstance(model, RoutedModelAbstract):
raise ValueError(
f"model must be of type {RoutedModelAbstract.__class__.__name__}, not {model.__class__.__name__}")
self._model_instance = model
self._model_class = model.__class_
model.router_object = self
def set_model(self, model):
if isinstance(model, RoutedModelAbstract):
self.model_instance = model
else:
self.model_class = model.__class__
if not self._model_instance:
logging.debug(f'Creating new unsaved {self._model_class.__name__} from {self}')
self._model_instance = self.model_class(**({'router_object': self} | self))
return self
def set_router(self, router):
assert isinstance(router,
RouterAbstract), f"router must be of type {RouterAbstract.__class__.__name__}, not {router.__class__.__name__}"
self._router_instance = router
return self
def to_db_object(self, raise_on_keyerror=False) -> models.Model:
"""This returns a dict representation of the object"""
if raise_on_keyerror:
_data = {k: self.get(k, '') for k in model_to_dict(self._model_class).keys()}
else:
_errors = []
_data = {}
for k in model_to_dict(self._model_class):
try:
_data[k] = self[k]
except KeyError as e:
_errors.append(str(e))
if _errors:
raise KeyError(f'Could not convert {self.__class__.__name__} to DB object - missing keys: {_errors}')
return self._model_class(**_data)
@classmethod
def from_db_object(cls, db_object):
return cls(**model_to_dict(db_object))
@property
def router(self):
return self._router_instance
def __hash__(self):
return len(self.to_json)
def _filter_or(self, **kwargs):
for k, v in kwargs.items():
if self.get(k) == v:
return True
return False
def _filter_and(self, **kwargs):
for k, v in kwargs.items():
if self.get(k) != v:
return False
return True
def filtered(self, mode='or', raise_on_failure=False, **kwargs):
"""This returns objects filtered by some criteria
Return self if criterias match, else None
"""
assert mode in ('or', 'and'), f"mode must be 'or' or 'and', not {mode}"
if getattr(self, f'_filter_{mode}')(**kwargs):
return self
if raise_on_failure:
raise self.DoesNotExists(f'Object {self} does not match criteria {kwargs}')
return None
def __getattr__(self, name):
"""This makes a 'magic' trick"""
if name in self:
return self[name]
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
def to_json(self, **dump_params):
"""This returns a dict representation of the object"""
if default_fun := dump_params.pop('default', None):
pass
else:
default_fun = lambda o: str(o)
return json.loads(json.dumps(self, default=default_fun, **dump_params))
@classmethod
def router_get(cls, *args, **kwargs):
"""This returns a RouterObjectCollection of objects"""
raise NotImplemented(f"{cls.__name__} must implement router_get()")
def router_post(self):
"""This adds an object to the router"""
raise NotImplemented(f"{self.__class__.__name__} must implement router_post()")
def router_put(self, **kwargs):
"""This removes an object from the router"""
raise NotImplemented(f"{self.__class__.__name__} must implement router_put()")
def router_delete(self):
"""This removes an object from the router"""
raise NotImplemented(f"{self.__class__.__name__} must implement router_delete()")
class DNSStaticAbstract(RoutedModelAbstract):
"""The DNSStatic Object"""
class Meta:
abstract = True
@property
def get_name(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_name')
@property
def get_regex(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_name')
@property
def get_ip4(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_ip4')
@property
def get_ip6(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_ip6')
class LeaseAbstract(RoutedModelAbstract):
"""The IP Lease Object"""
class Meta:
abstract = True
@property
def get_mac(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_mac')
@property
def get_ip4(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_ip4')
@property
def get_ip6(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_ip6')
@property
def get_status(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_status')
class IPAddressAbstract(RoutedModelAbstract):
"""The Address Pool"""
class Meta:
abstract = True
@property
def get_address(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_address')
@property
def get_network(self):
raise NotImplemented(f'{self.__class__.__name__} must implement get_network')
class RouterAbstract:
def __init__(self, *args, **kwargs):
self.initialize(*args, **kwargs)
@property
def api(self):
raise NotImplemented(f"{self.__class__.__name__} must implement api or init with api parameter")
def initialize(self, *args, **kwargs):
"""This initializes the connection to the router"""
raise NotImplemented(f"{self.__class__.__name__} must implement initialize()")
def get(self, **kwargs):
raise NotImplemented(f"{self.__class__.__name__} must implement get()")
def add(self, **kwargs):
raise NotImplemented(f"{self.__class__.__name__} must implement post()")
def set(self, **kwargs):
raise NotImplemented(f"{self.__class__.__name__} must implement put()")
def remove(self, **kwargs):
raise NotImplemented(f"{self.__class__.__name__} must implement delete()")

111
lib/task_decorator.py Normal file
View File

@@ -0,0 +1,111 @@
"""
Einfache Proxmox Task-Funktionen mit TaskLogger-Integration
"""
import logging
from lib.proxmox import Proxmox
from tasklogger.models import TaskFactory
def start_container_with_task(task_uuid: str, vmid: int, request=None) -> bool:
"""Container starten mit TaskLogger-Monitoring"""
task = TaskFactory(task_uuid=task_uuid, request=request)
task.add_entry(f"Starting container {vmid}...")
def _start_container():
with Proxmox() as pm:
return pm.lxc_post(f'{vmid}/status/start')
# Wrap the proxmox function - this handles UPID monitoring synchronously
task.wrap_proxmox_function(_start_container)
task.add_entry(f"Container {vmid} started successfully")
task.unset_as_current()
return True
def stop_container_with_task(task_uuid: str, vmid: int, request=None) -> bool:
"""Container stoppen mit TaskLogger-Monitoring"""
task = TaskFactory(task_uuid=task_uuid, request=request)
task.add_entry(f"Stopping container {vmid}...")
def _stop_container():
with Proxmox() as pm:
return pm.lxc_post(f'{vmid}/status/stop')
# Wrap the proxmox function - this handles UPID monitoring synchronously
task.wrap_proxmox_function(_stop_container)
task.add_entry(f"Container {vmid} stopped successfully")
task.unset_as_current()
return True
def resize_container_disk_with_task(task_uuid: str, vmid: int, disk_size: int, request=None) -> bool:
"""Container-Disk vergrößern mit TaskLogger-Monitoring"""
task = TaskFactory(task_uuid=task_uuid, request=request)
task.add_entry(f"Resizing disk for container {vmid} to {disk_size}GB...")
def _resize_container_disk():
with Proxmox() as pm:
return pm.lxc_put(f'{vmid}/resize', disk='rootfs', size=f'{disk_size}G')
# Wrap the proxmox function - this handles UPID monitoring synchronously
task.wrap_proxmox_function(_resize_container_disk)
task.add_entry(f"Container {vmid} disk resized to {disk_size}GB successfully")
task.unset_as_current()
return True
def create_container_with_task(task_uuid: str, clone_container, request=None) -> bool:
"""Container erstellen mit TaskLogger-Monitoring"""
try:
# CloneContainer.execute() uses the tasklogger directly now
clone_container.execute(task_uuid_override=task_uuid, request=request)
return True
except Exception as e:
logging.exception(f"Container creation failed: {e}")
return False
def delete_container_with_task(task_uuid: str, container, request=None) -> bool:
"""Container löschen mit TaskLogger-Monitoring"""
task = TaskFactory(task_uuid=task_uuid, request=request)
task.add_entry(f"Deleting container {container.name}...")
try:
# Delete Proxmox LXC (if needed)
task.add_entry(f"Deleting Proxmox container {container.lxc.vmid}...")
# Wrap the proxmox function - this handles UPID monitoring synchronously
task.wrap_proxmox_function(container.delete, task=task)
task.add_entry("Container deleted successfully!")
task.status = 'completed'
task.save()
return True
except Exception as e:
task.add_entry(f"Error deleting container: {str(e)}")
task.status = 'error'
task.save()
logging.exception(f"Container deletion failed: {e}")
return False
finally:
task.unset_as_current()
def update_container_config_sync(vmid: int, task=None,**config_params) -> bool:
"""Synchrone Container-Config-Updates (Memory, Cores) - kein Task-Monitoring nötig"""
try:
with Proxmox() as pm:
result = pm.lxc_put(f'{vmid}/config', **config_params)
logging.info(f"Updated container {vmid} config: {config_params}")
if task:
task.add_entry(f"Updated container {vmid} config: {config_params}")
return True
except Exception as e:
logging.error(f"Failed to update container {vmid} config: {e}")
if task:
task.add_entry(f"Failed to update container {vmid} config: {e}")
return False

44
lib/utils.py Normal file
View File

@@ -0,0 +1,44 @@
import time
import logging
import functools
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django_middleware_global_request import get_request
from django_proxmox_mikrotik import settings
def measure_time(func):
"""Decorator, der die Ausführungszeit einer Funktion misst und mit logging.debug ausgibt."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
execution_time = end_time - start_time
logging.debug(f"Funktion '{func.__name__}' wurde in {execution_time:.4f} Sekunden ausgeführt")
return result
return wrapper
def paginator(queryset, page: int = 1, per_page: int = 18, request=None):
request = request or get_request()
per_page = request.GET.get('per_page',
request.POST.get('per_page', getattr(settings, 'PAGINATOR_PER_PAGE', per_page)))
paginator = Paginator(queryset, per_page)
page = request.GET.get('page', request.POST.get('page', page))
try:
return paginator.page(page)
except PageNotAnInteger:
return paginator.page(1)
except EmptyPage:
return paginator.page(paginator.num_pages)
class PaginatedModel:
def __init__(self, model):
self.model = model
def paginate(self, domainfilter, page: int = 1, per_page: int = 18, request=None):
queryset = self.model.objects.filter(domainfilter)
return paginator(queryset, page, per_page, request)