diff --git a/commands/clean-asset.py b/commands/clean-asset.py index 212381dcfbbc2e3b1394ec6ee9f3e6d849b45b97..825a35e507e90f35906309d4625cc39ad06f5750 100644 --- a/commands/clean-asset.py +++ b/commands/clean-asset.py @@ -24,7 +24,9 @@ class Command(BaseCommand): clean.add(obj.id) def clean_managed_object(self, object): - for o in Object.objects.filter(data__management__managed_object=object.id): + for o in Object.objects.filter( + data__match={"interface": "management", "attr": "managed_object", "value": object.id} + ): self.clean_obj(o) def clean_obj(self, obj): diff --git a/commands/inventory.py b/commands/inventory.py index 4fb4fc36cf7c5edccbceb34550cbc0a02b986192..9b3f5135dcfa2f8ac78d9bf02e9e06173192623d 100644 --- a/commands/inventory.py +++ b/commands/inventory.py @@ -29,7 +29,9 @@ class Command(BaseCommand): def handle_find_serial(self, serials): connect() for serial in serials: - for obj in Object.objects.filter(data__asset__serial=serial): + for obj in Object.objects.filter( + data__match={"interface": "asset", "attr": "serial", "value": serial} + ): self.print("@@@ Serial %s" % serial) self.dump_object(obj) diff --git a/fixes/fix_object_coordinates.py b/fixes/fix_object_coordinates.py index 6138bcc6557be63016620e136c58b5fb2173034a..fae83577067929368ba8c3d7b312e626103d5f05 100644 --- a/fixes/fix_object_coordinates.py +++ b/fixes/fix_object_coordinates.py @@ -10,6 +10,8 @@ from noc.inv.models.object import Object def fix(): - for d in Object._get_collection().find({"data.geopoint.x": {"$exists": True}}, {"_id": 1}): + for d in Object._get_collection().find( + {"data": {"$elemMatch": {"interface": "geopoint", "attr": "x"}}}, {"_id": 1} + ): o = Object.get_by_id(d["_id"]) o.save() diff --git a/inv/migrations/0020_object_data_scope.py b/inv/migrations/0020_object_data_scope.py new file mode 100644 index 0000000000000000000000000000000000000000..a7faec7a25d0c9fe5325820007307a869a108ac7 --- /dev/null +++ b/inv/migrations/0020_object_data_scope.py @@ -0,0 +1,35 @@ +# ---------------------------------------------------------------------- +# Migrate Object.data +# ---------------------------------------------------------------------- +# Copyright (C) 2007-2020 The NOC Project +# See LICENSE for details +# ---------------------------------------------------------------------- + +# Third-party modules +from pymongo import UpdateOne + +# NOC modules +from noc.core.migration.base import BaseMigration + + +class Migration(BaseMigration): + MAX_BULK_SIZE = 500 + + def migrate(self): + coll = self.mongo_db["noc.objects"] + bulk = [] + for doc in coll.find({}, no_cursor_timeout=True): + data = doc.get("data") + if not data or isinstance(data, list): + continue # No data or already converted + new_data = [] + for mi in data: + for attr, value in data[mi].items(): + new_data += [{"scope": "", "interface": mi, "attr": attr, "value": value}] + bulk += [UpdateOne({"_id": doc["_id"]}, {"$set": {"data": new_data}})] + if len(bulk) >= self.MAX_BULK_SIZE: + coll.bulk_write(bulk) + bulk = [] + # Write rest of data + if bulk: + coll.bulk_write(bulk) diff --git a/inv/models/modelinterface.py b/inv/models/modelinterface.py index 618255a9f64e60b939439666a635bf69281f443a..c92484c8e20c3fa62f74b6cb51f16d2decb920b5 100644 --- a/inv/models/modelinterface.py +++ b/inv/models/modelinterface.py @@ -9,6 +9,7 @@ import os from threading import Lock import operator +from typing import Optional # Third-party modules from mongoengine.document import Document, EmbeddedDocument @@ -114,10 +115,21 @@ class ModelInterface(Document): uuid = UUIDField(binary=True) _id_cache = cachetools.TTLCache(1000, 10) + _name_cache = cachetools.TTLCache(1000, 10) def __str__(self): return self.name + @classmethod + @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) + def get_by_id(cls, id) -> Optional["ModelInterface"]: + return ModelInterface.objects.filter(id=id).first() + + @classmethod + @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) + def get_by_name(cls, name: str) -> Optional["ModelInterface"]: + return ModelInterface.objects.filter(name=name).first() + def get_attr(self, name): for a in self.attrs: if a.name == name: diff --git a/inv/models/object.py b/inv/models/object.py index ea7d948866ec31544cdc425d63977011fdfcf87b..41258cd2a9ae70c6fb84628f89c93822deda79c2 100644 --- a/inv/models/object.py +++ b/inv/models/object.py @@ -10,16 +10,17 @@ import datetime import operator from threading import Lock from collections import namedtuple +from typing import Optional, Any, Dict, Union, List, Set # Third-party modules from mongoengine.document import Document, EmbeddedDocument from mongoengine.fields import ( StringField, - DictField, ListField, PointField, LongField, EmbeddedDocumentField, + DynamicField, ) from mongoengine import signals import cachetools @@ -59,6 +60,18 @@ class ObjectConnectionData(EmbeddedDocument): return self.name +class ObjectAttr(EmbeddedDocument): + interface = StringField() + attr = StringField() + value = DynamicField() + scope = StringField() + + def __str__(self): + if self.scope: + return "%s.%s@%s = %s" % (self.interface, self.attr, self.scope, self.value) + return "%s.%s = %s" % (self.interface, self.attr, self.value) + + @bi_sync @on_save @datastream @@ -82,14 +95,13 @@ class Object(Document): "data", "container", ("name", "container"), - ("model", "data.asset.serial"), - "data.management.managed_object", + ("data.interface", "data.attr", "data.value"), ], } name = StringField() model = PlainReferenceField(ObjectModel) - data = DictField() + data = ListField(EmbeddedDocumentField(ObjectAttr)) container = PlainReferenceField("self", required=False) comment = GridVCSField("object_comment") # Map @@ -112,12 +124,12 @@ class Object(Document): @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) - def get_by_id(cls, id): + def get_by_id(cls, id) -> Optional["Object"]: return Object.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) - def get_by_bi_id(cls, id): + def get_by_bi_id(cls, id) -> Optional["Object"]: return Object.objects.filter(bi_id=id).first() def iter_changed_datastream(self, changed_fields=None): @@ -143,37 +155,35 @@ class Object(Document): def set_point(self): from noc.gis.map import map + # Reset previous data self.layer = None self.point = None - geo = self.data.get("geopoint") - if not geo: - return + # Get points + x, y, srid = self.get_data_tuple("geopoint", ("x", "y", "srid")) + if x is None or y is None: + return # No point data + # Get layer layer_code = self.model.get_data("geopoint", "layer") if not layer_code: return layer = Layer.get_by_code(layer_code) if not layer: return - x = geo.get("x") - y = geo.get("y") - srid = geo.get("srid") - if x and y: - self.layer = layer - self.point = map.get_db_point(x, y, srid=srid) + # Update actual data + self.layer = layer + self.point = map.get_db_point(x, y, srid=srid) def on_save(self): def get_coordless_objects(o): r = {str(o.id)} for co in Object.objects.filter(container=o.id): - g = co.data.get("geopoint") - if g and g.get("x") and g.get("y"): - continue - else: + cx, cy = co.get_data_tuple("geopoint", ("x", "y")) + if cx is None and cy is None: r |= get_coordless_objects(co) return r - geo = self.data.get("geopoint") - if geo and geo.get("x") and geo.get("y"): + x, y = self.get_data_tuple("geopoint", ("x", "y")) + if x is not None and y is not None: # Rebuild connection layers for ct in self.REBUILD_CONNECTIONS: for c, _, _ in self.get_genderless_connections(ct): @@ -184,7 +194,7 @@ class Object(Document): mos = get_coordless_objects(self) if mos: ManagedObject.objects.filter(container__in=mos).update( - x=geo.get("x"), y=geo.get("y"), default_zoom=self.layer.default_zoom + x=x, y=y, default_zoom=self.layer.default_zoom ) if self._created: if self.container: @@ -213,7 +223,7 @@ class Object(Document): new_pop.update_pop_links() @cachetools.cached(_path_cache, key=lambda x: str(x.id), lock=id_lock) - def get_path(self): + def get_path(self) -> List[str]: """ Returns list of parent segment ids :return: @@ -222,36 +232,131 @@ class Object(Document): return self.container.get_path() + [self.id] return [self.id] - def get_data(self, interface, key): + def get_data(self, interface: str, key: str, scope: Optional[str] = None) -> Any: attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: # Lookup model return self.model.get_data(interface, key) - else: - v = self.data.get(interface, {}) - return v.get(key) + for item in self.data: + if item.interface == interface and item.attr == key: + if not scope or item.scope == scope: + return item.value + return None + + def get_data_dict( + self, interface: str, keys: Iterable, scope: Optional[str] = None + ) -> Dict[str, Any]: + """ + Get multiple keys from single interface. Returns dict with values for every given key. + If key is missed, return None value + + :param interface: + :param keys: Iterable contains key names + :param scope: + :return: + """ + kset = set(keys) + r = {k: None for k in kset} + for item in self.data: + if item.interface == interface and item.attr in kset: + if not scope or item.scope == scope: + r[item.attr] = item.value + return r + + def get_data_tuple( + self, interface: str, keys: Union[List, Tuple], scope: Optional[str] = None + ) -> Tuple[Any, ...]: + """ + Get multiple keys from single interface. Returns tuple with values for every given key. + If key is missed, return None value - def set_data(self, interface, key, value): + :param interface: + :param keys: List or tuple with key names + :param scope: + :return: + """ + r = self.get_data_dict(interface, keys, scope) + return tuple(r.get(k) for k in keys) + + def get_effective_data(self) -> List[ObjectAttr]: + """ + Return effective object data, including the model's defaults + :return: + """ + seen: Set[Tuple[str, str, str]] = set() # (interface, attr, scope + r: List[ObjectAttr] = [] + # Object attributes + for item in self.data: + k = (item.interface, item.attr, item.scope or "") + if k in seen: + continue + r += [item] + seen.add(k) + # Model attributes + for i in self.model.data: + for a in self.model.data[i]: + k = (i, a, "") + if k in seen: + continue + r += [ObjectAttr(interface=i, attr=a, scope="", value=self.model.data[i][a])] + seen.add(k) + # Sort according to interface + sorting_keys: Dict[str, str] = {} + for ni, i in enumerate(sorted(set(x[0] for x in seen))): + mi = ModelInterface.get_by_name(i) + if not mi: + continue + for na, a in enumerate(mi.attrs): + sorting_keys["%s.%s" % (i, a.name)] = "%06d.%06d" % (ni, na) + # Return sorted result + return list( + sorted( + r, + key=lambda oa: "%s.%s" + % (sorting_keys.get("%s.%s" % (oa.interface, oa.attr), "999999.999999"), oa.scope), + ) + ) + + def set_data(self, interface: str, key: str, value: Any, scope: Optional[str] = None) -> None: attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: raise ModelDataError("Cannot set read-only value") value = attr._clean(value) - # @todo: Check interface restrictions - if interface not in self.data: - self.data[interface] = {} - self.data[interface][key] = value - - def reset_data(self, interface, key): - attr = ModelInterface.get_interface_attr(interface, key) - if attr.is_const: + for item in self.data: + if item.interface == interface and item.attr == key: + if not scope or item.scope == scope: + item.value = value + break + else: + # Insert new item + self.data += [ + ObjectAttr(interface=interface, attr=attr.name, value=value, scope=scope or "") + ] + + def reset_data( + self, interface: str, key: Union[str, Iterable], scope: Optional[str] = None + ) -> None: + if isinstance(key, str): + kset = {key} + else: + kset = set(key) + v = [ModelInterface.get_interface_attr(interface, k).is_const for k in kset] + if any(v): raise ModelDataError("Cannot reset read-only value") - if interface in self.data and key in self.data[interface]: - del self.data[interface][key] + self.data += [ + item + for item in self.data + if item.interface != interface + or (scope and item.scope != scope) + or item.attr not in kset + ] def has_connection(self, name): return self.model.has_connection(name) - def get_p2p_connection(self, name): + def get_p2p_connection( + self, name: str + ) -> Tuple[Optional["ObjectConnection"], Optional["Object"], Optional[str]]: """ Get neighbor for p2p connection (s and mf types) Returns connection, remote object, remote connection or @@ -267,7 +372,9 @@ class Object(Document): # Strange things happen return None, None, None - def get_genderless_connections(self, name): + def get_genderless_connections( + self, name: str + ) -> List[Tuple["ObjectConnection", "Object", str]]: r = [] for c in ObjectConnection.objects.filter( __raw__={"connection": {"$elemMatch": {"object": self.id, "name": name}}} @@ -277,7 +384,7 @@ class Object(Document): r += [[c, x.object, x.name]] return r - def disconnect_p2p(self, name): + def disconnect_p2p(self, name: str): """ Remove connection *name* """ @@ -286,7 +393,14 @@ class Object(Document): self.log("'%s' disconnected" % name, system="CORE", op="DISCONNECT") c.delete() - def connect_p2p(self, name, remote_object, remote_name, data, reconnect=False): + def connect_p2p( + self, + name: str, + remote_object: "Object", + remote_name: str, + data: Dict[str, Any], + reconnect: bool = False, + ) -> Optional["ObjectConnection"]: lc = self.model.get_model_connection(name) if lc is None: raise ConnectionError("Local connection not found: %s" % name) @@ -320,7 +434,7 @@ class Object(Document): if reconnect: if r_object.id == remote_object.id and r_name == remote_name: # Same connection exists - n_data = deep_merge(ec.data, data) + n_data = deep_merge(ec.data, data) # Merge ObjectConnection if n_data != ec.data: # Update data ec.data = n_data @@ -348,7 +462,13 @@ class Object(Document): return c def connect_genderless( - self, name, remote_object, remote_name, data=None, type=None, layer=None + self, + name: str, + remote_object: "Object", + remote_name: str, + data: Dict[str, Any] = None, + type: Optional[str] = None, + layer: Optional[Layer] = None, ): """ Connect two genderless connections @@ -388,7 +508,7 @@ class Object(Document): "%s:%s -> %s:%s" % (self, name, remote_object, remote_name), system="CORE", op="CONNECT" ) - def put_into(self, container): + def put_into(self, container: "Object"): """ Put object into container """ @@ -403,14 +523,12 @@ class Object(Document): # Connect to parent self.container = container.id if container else None # Reset previous rack position - if self.data.get("rackmount"): - for k in ("position", "side", "shift"): - if k in self.data["rackmount"]: - del self.data["rackmount"][k] + self.reset_data("rackmount", ("position", "side", "shift")) + # self.save() self.log("Insert into %s" % (container or "Root"), system="CORE", op="INSERT") - def get_content(self): + def get_content(self) -> "Object": """ Returns all items directly put into container """ @@ -421,7 +539,7 @@ class Object(Document): return ro.get_local_name_path() + [rn] return [] - def get_name_path(self): + def get_name_path(self) -> List[str]: """ Return list of container names """ @@ -458,7 +576,7 @@ class Object(Document): def get_log(self): return ObjectLog.objects.filter(object=self.id).order_by("ts") - def get_lost_and_found(self): + def get_lost_and_found(self) -> Optional["Object"]: m = ObjectModel.get_by_name("Lost&Found") c = self.container while c: @@ -483,7 +601,7 @@ class Object(Document): else: o.put_into(target) - def iter_connections(self, direction): + def iter_connections(self, direction: Optional[str]) -> Iterable[Tuple[str, "Object", str]]: """ Yields connections of specified direction as tuples of (name, remote_object, remote_name) @@ -546,7 +664,7 @@ class Object(Document): c.connection = left c.save() - def get_pop(self): + def get_pop(self) -> Optional["Object"]: """ Find enclosing PoP :returns: PoP instance or None @@ -558,7 +676,7 @@ class Object(Document): c = c.container return None - def get_coordinates_zoom(self): + def get_coordinates_zoom(self) -> Tuple[Optional[float], Optional[float], Optional[int]]: """ Get managed object's coordinates # @todo: Speedup? @@ -567,8 +685,7 @@ class Object(Document): c = self while c: if c.point and c.layer: - x = c.get_data("geopoint", "x") - y = c.get_data("geopoint", "y") + x, y = c.get_data_tuple("geopoint", ("x", "y")) zoom = c.layer.default_zoom or 11 return x, y, zoom if c.container: @@ -587,10 +704,12 @@ class Object(Document): """ if hasattr(mo, "id"): mo = mo.id - return cls.objects.filter(data__management__managed_object=mo) + return cls.objects.filter( + data__match={"interface": "management", "attr": "managed_object", "value": mo} + ) @classmethod - def get_by_path(cls, path, hints=None): + def get_by_path(cls, path: List[str], hints=None) -> Optional["Object"]: """ Get object by given path. :param path: List of names following to path @@ -608,7 +727,7 @@ class Object(Document): return Object.get_by_id(h) return current - def update_pop_links(self, delay=20): + def update_pop_links(self, delay: int = 20): call_later("noc.inv.util.pop_links.update_pop_links", delay, pop_id=self.id) @classmethod @@ -620,7 +739,7 @@ class Object(Document): if "container" in values and values["container"]: document._cache_container = values["container"] - def get_address_text(self): + def get_address_text(self) -> Optional[str]: """ Return first found address.text value upwards the path :return: Address text or None @@ -636,7 +755,7 @@ class Object(Document): break return None - def get_object_serials(self, chassis_only=True): + def get_object_serials(self, chassis_only: bool = True) -> List[str]: """ Gettint object serialNumber :param chassis_only: With serial numbers inner objects diff --git a/inv/models/objectmodel.py b/inv/models/objectmodel.py index 579240182fae250cb347896c9e4c37927de1d8ba..033df28f0a4aa570a98f31573ad8da2b665f2b63 100644 --- a/inv/models/objectmodel.py +++ b/inv/models/objectmodel.py @@ -10,6 +10,7 @@ import os from threading import Lock import operator import re +from typing import Optional, List, Tuple, Union # Third-party modules from mongoengine.document import Document, EmbeddedDocument @@ -152,15 +153,15 @@ class ObjectModel(Document): @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) - def get_by_id(cls, id): + def get_by_id(cls, id) -> Optional["ObjectModel"]: return ObjectModel.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) - def get_by_name(cls, name): + def get_by_name(cls, name) -> Optional["ObjectModel"]: return ObjectModel.objects.filter(name=name).first() - def get_data(self, interface, key): + def get_data(self, interface: str, key: str): v = self.data.get(interface, {}) return v.get(key) @@ -170,14 +171,14 @@ class ObjectModel(Document): # Exclude all part numbers from unknown models self.clear_unknown_models() - def has_connection(self, name): + def has_connection(self, name: str) -> bool: if self.get_model_connection(name) is None: # Check twinax virtual connection return self.get_data("twinax", "twinax") and self.get_data("twinax", "alias") == name else: return True - def get_connection_proposals(self, name): + def get_connection_proposals(self, name: str) -> List[Tuple["ObjectModel", str]]: """ Return possible connections for connection name as (model id, connection name) @@ -192,14 +193,14 @@ class ObjectModel(Document): r += [(cc.model, cc.name)] return r - def get_model_connection(self, name): + def get_model_connection(self, name: str) -> Optional["ObjectModelConnection"]: for c in self.connections: if c.name == name or (c.internal_name and c.internal_name == name): return c return None @classmethod - def get_model(cls, vendor, part_no): + def get_model(cls, vendor: "Vendor", part_no: Union[List[str], str]) -> Optional["ObjectModel"]: """ Get ObjectModel by part part_no, Search order: @@ -218,7 +219,7 @@ class ObjectModel(Document): @classmethod @cachetools.cachedmethod(operator.attrgetter("_model_cache"), lock=lambda _: id_lock) - def _get_model(cls, vendor, part_no): + def _get_model(cls, vendor: "Vendor", part_no: str) -> Optional["ObjectModel"]: """ Get ObjectModel by part part_no, Search order: @@ -335,7 +336,7 @@ class ModelConnectionsCache(Document): collection.insert(nc) @classmethod - def update_for_model(cls, model): + def update_for_model(cls, model: "ObjectModel"): """ Update connection cache for object model :param model: ObjectModel instance diff --git a/inv/util/pop_links.py b/inv/util/pop_links.py index 901988e30128c4ff7f92d817a0b6efa21d88ee2f..1a87911da659373d6011691fbad26f2e9fc8576d 100644 --- a/inv/util/pop_links.py +++ b/inv/util/pop_links.py @@ -64,7 +64,13 @@ class LinkedPoP(object): {"_id": {"$in": list(r_ifaces)}}, {"_id": 0, "managed_object": 1} ) ) - for o in Object.objects.filter(data__management__managed_object__in=r_mos): + for o in Object.objects.filter( + data__match={ + "interface": "management", + "attr": "managed_object", + "value__in": list(r_mos), + } + ): pop = o.get_pop() if pop: linked.add(pop) diff --git a/sa/models/managedobject.py b/sa/models/managedobject.py index fc4f26869844be512412472628a91ecc3842b521..ecd99b80532e6d977db506bdb7736c65b6c0c2ae 100644 --- a/sa/models/managedobject.py +++ b/sa/models/managedobject.py @@ -90,7 +90,7 @@ from .objectstatus import ObjectStatus from .objectdata import ObjectData # Increase whenever new field added or removed -MANAGEDOBJECT_CACHE_VERSION = 23 +MANAGEDOBJECT_CACHE_VERSION = 24 CREDENTIAL_CACHE_VERSION = 2 Credentials = namedtuple( @@ -942,7 +942,11 @@ class ManagedObject(NOCModel): """ from noc.inv.models.object import Object - return list(Object.objects.filter(data__management__managed_object=self.id)) + return list( + Object.objects.filter( + data__match={"interface": "management", "attr": "managed_object", "value": self.id} + ) + ) def run_discovery(self, delta=0): """ diff --git a/scripts/build-pop-links.py b/scripts/build-pop-links.py deleted file mode 100755 index 8af5ff5c66dc0fbc08883fb70468b4d2dd70ecac..0000000000000000000000000000000000000000 --- a/scripts/build-pop-links.py +++ /dev/null @@ -1,100 +0,0 @@ -#!./bin/python -# ---------------------------------------------------------------------- -# Rebuild inter-pop links -# ---------------------------------------------------------------------- -# Copyright (C) 2007-2019 The NOC Project -# See LICENSE for details -# --------------------------------------------------------------------- - -# NOC modules -from noc.inv.models.object import Object -from noc.inv.models.objectconnection import ObjectConnection -from noc.inv.models.link import Link - -pop_map = {} # object -> PoP -mo_pop = {} # managed object id -> pop - - -def get_pop(o): - """ - Find PoP of the object. - Returns pop object or None - """ - pop = pop_map.get(o) - if pop: - return pop - if o.get_data("pop", "level"): - # PoP itself - pop_map[o] = o - return o - if not o.container: - pop_map[o] = None - return None - else: - parent = Object.objects.get(id=o.container) - pop = get_pop(parent) - pop_map[o] = pop - return pop - - -def load_managed_objects(): - print("Load managed objects") - for o in Object.objects.filter(data__management__managed_object__exists=True): - mo_pop[o.get_data("management", "managed_object")] = get_pop(o) - - -def build_links(): - print("Building links") - links = {} - for l in Link.objects.all(): - mos = set() - for i in l.interfaces: - mos.add(i.managed_object.id) - if len(mos) == 2: - o1 = mos.pop() - o2 = mos.pop() - pop1 = mo_pop.get(o1) - pop2 = mo_pop.get(o2) - if pop1 and pop2 and pop1 != pop2: - if pop1.id > pop2.id: - pop1, pop2 = pop2, pop1 - level = min(pop1.get_data("pop", "level"), pop2.get_data("pop", "level")) // 10 - if (pop1, pop2) not in links: - links[pop1, pop2] = {"level": level} - return links - - -def gen_db_links(): - print("Loading DB links") - for oc in ObjectConnection.objects.filter(type="pop_link"): - pops = [c.object for c in oc.connection] - pop1, pop2 = pops - if pop1.id > pop2.id: - pop1, pop2 = pop2, pop1 - yield oc, pop1, pop2, {"level": oc.data["level"]} - - -def update_links(): - links = build_links() - for oc, pop1, pop2, data in gen_db_links(): - level = data["level"] - if (pop1, pop2) in links: - if links[pop1, pop2]["level"] != level: - level = links[pop1, pop2]["level"] - print("Updating %s - %s level to %d" % (pop1, pop2, level)) - oc.data["level"] = level - oc.save() - del links[pop1, pop2] - else: - print("Unlinking %s - %s" % (pop1, pop2)) - oc.delete() - # New links - for pop1, pop2 in links: - level = links[pop1, pop2]["level"] - print("Linking %s - %s (level %d)" % (pop1, pop2, level)) - pop1.connect_genderless("links", pop2, "links", {"level": level}, type="pop_link") - - -if __name__ == "__main__": - load_managed_objects() - update_links() diff --git a/services/card/cards/monmap.py b/services/card/cards/monmap.py index 9e7d15e7d13809da053923191a644b563e726b3c..6699a829d99b44060f74bfe7eacbd50290fd47ed 100644 --- a/services/card/cards/monmap.py +++ b/services/card/cards/monmap.py @@ -1,7 +1,7 @@ # --------------------------------------------------------------------- # MonMap # --------------------------------------------------------------------- -# Copyright (C) 2007-2019 The NOC Project +# Copyright (C) 2007-2020 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- @@ -79,21 +79,7 @@ class MonMapCard(BaseCard): return list(Layer.objects.filter(code__startswith="pop_")) def get_ajax_data(self, **kwargs): - def update_dict(d, s): - for k in s: - if k in d: - d[k] -= s[k] - else: - d[k] = s[k] - object_id = self.handler.get_argument("object_id") - # zoom = int(self.handler.get_argument("z")) - # west = float(self.handler.get_argument("w")) - # east = float(self.handler.get_argument("e")) - # north = float(self.handler.get_argument("n")) - # south = float(self.handler.get_argument("s")) - # ms = int(self.handler.get_argument("maintenance")) - # active_layers = [l for l in self.get_pop_layers() if l.min_zoom <= zoom <= l.max_zoom] if self.current_user.is_superuser: moss = ManagedObject.objects.filter(is_managed=True) else: @@ -103,11 +89,6 @@ class MonMapCard(BaseCard): objects = [] objects_status = {"error": [], "warning": [], "good": [], "maintenance": []} sss = {"error": {}, "warning": {}, "good": {}, "maintenance": {}} - # s_def = { - # "service": {}, - # "subscriber": {}, - # "interface": {} - # } services = defaultdict(list) try: object_root = Object.objects.filter(id=object_id).first() @@ -134,10 +115,16 @@ class MonMapCard(BaseCard): services_map = self.get_objects_summary_met(mo_ids) # Getting containers name and coordinates containers = { - str(o["_id"]): (o["name"], o["data"]) - for o in Object.objects.filter(data__geopoint__exists=True, id__in=con,) + str(o["_id"]): ( + o["name"], + { + "%s.%s" % (item["interface"], item["attr"]): item["value"] + for item in o.get("data", []) + }, + ) + for o in Object.objects.filter(data__match={"interface": "geopoint"}, id__in=con) .read_preference(ReadPreference.SECONDARY_PREFERRED) - .fields(id=1, name=1, data__geopoint__x=1, data__geopoint__y=1, data__address__text=1) + .fields(id=1, name=1, data=1) .as_pymongo() } # Main Loop. Get ManagedObject group by container @@ -145,11 +132,9 @@ class MonMapCard(BaseCard): moss.values_list("id", "name", "container").order_by("container"), key=lambda o: o[2] ): name, data = containers.get(container, ("", {"geopoint": {}})) - x = data["geopoint"].get("x") - y = data["geopoint"].get("y") - address = "" - if "address" in data: - address = data["address"].get("text", "") + x = data.get("geopoint.x") + y = data.get("geopoint.y") + address = data.get("address.text", "") ss = {"objects": [], "total": 0, "error": 0, "warning": 0, "good": 0, "maintenance": 0} for mo_id, mo_name, container in mol: # Status by alarm severity @@ -162,7 +147,6 @@ class MonMapCard(BaseCard): elif alarms.get(mo_id, 0) > 2000: status = "error" objects_status[status] += [mo_id] - # update_dict(sss[status], s_service["service"]) ss[status] += 1 ss["total"] += 1 services_ss = [ @@ -199,27 +183,14 @@ class MonMapCard(BaseCard): ) else: m_services, m_subscribers = ServiceSummary.get_direct_summary(objects_status[r]) - # update_dict(s_services["service"], m["serivce"]) - # if not object_root and r == "good": - # for s in s_services["service"]: - # if s in m["service"]: - # s_services["service"][s] -= m["service"][s] - # m = s_services profiles |= set(m_services) sss[r] = m_services - for r in sorted(sss, key=lambda k: ("error", "warning", "good", "maintenance").index(k)): - # for p in sss[r]: for p in profiles: services[p] += [(r, sss[r].get(p, None))] return { "objects": objects, - "summary": self.f_glyph_summary( - { - "service": services - # "subscriber": subscribers - } - ), + "summary": self.f_glyph_summary({"service": services}), } @staticmethod diff --git a/services/datasource/datasources/ch_container.py b/services/datasource/datasources/ch_container.py index 3fdbdf0aef89b5f3733bc256fb9ee450aacc4541..579d5b093e52fe3673d8c3773d3ca0b7720aca0a 100644 --- a/services/datasource/datasources/ch_container.py +++ b/services/datasource/datasources/ch_container.py @@ -26,11 +26,13 @@ class CHContainerDataSource(BaseDataSource): {"_id": 1, "bi_id": 1, "name": 1, "container": 1, "data.address.text": 1}, no_cursor_timeout=True, ): - data = obj.get("data", {}) + address = [ + a for a in obj["data"] if a and a["interface"] == "address" and a["attr"] == "text" + ] yield ( obj["bi_id"], obj["_id"], obj.get("name", ""), bi_hash(obj["container"]) if obj.get("container") else "", - data["address"].get("text", "") if data and "address" in data else "", + address[0] if address else "", ) diff --git a/services/discovery/jobs/box/asset.py b/services/discovery/jobs/box/asset.py index 8314aae76f0057e4231af5fe80bee0b91c8a782b..34d86bce81a8a359157a7d7e583d9dc70b007f1c 100644 --- a/services/discovery/jobs/box/asset.py +++ b/services/discovery/jobs/box/asset.py @@ -12,14 +12,15 @@ import base64 from threading import Lock import operator import re +from typing import Optional, List, Dict, Set, Tuple, Iterable, Any, Union # Third-party modules import cachetools # NOC modules from noc.services.discovery.jobs.base import DiscoveryCheck -from noc.inv.models.objectmodel import ObjectModel -from noc.inv.models.object import Object +from noc.inv.models.objectmodel import ObjectModel, ConnectionRule +from noc.inv.models.object import Object, ObjectAttr from noc.inv.models.vendor import Vendor from noc.inv.models.unknownmodel import UnknownModel from noc.inv.models.modelmapping import ModelMapping @@ -41,19 +42,23 @@ class AssetCheck(DiscoveryCheck): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.unknown_part_no = {} # part_no -> list of variants - self.pn_description = {} # part_no -> Description - self.vendors = {} # code -> Vendor instance - self.objects = [] # [(type, object, context, serial)] - self.to_disconnect = ( - set() - ) # Save processed connection. [(in_connection, object, out_connection), ... ] - self.rule = defaultdict(list) # Connection rule. type -> [rule1, ruleN] + self.unknown_part_no: Dict[str, Set[str]] = {} # part_no -> list of variants + self.pn_description: Dict[str, str] = {} # part_no -> Description + self.vendors: Dict[str, Vendor] = {} # code -> Vendor instance + self.objects: List[ + Tuple[str, Union[Object, str], Dict[str, Union[int, str]], Optional[str]] + ] = [] # [(type, object, context, serial)] + self.to_disconnect: Set[ + Tuple[Object, str, Object, str] + ] = set() # Save processed connection. [(in_connection, object, out_connection), ... ] + self.rule: Dict[str, List[ConnectionRule]] = defaultdict( + list + ) # Connection rule. type -> [rule1, ruleN] self.rule_context = {} - self.ctx = {} - self.stack_member = {} # object -> stack member numbers - self.managed = set() # Object ids - self.unk_model = {} # name -> model + self.ctx: Dict[str, Union[int, str]] = {} + self.stack_member: Dict["Object", str] = {} # object -> stack member numbers + self.managed: Set[str] = set() # Object ids + self.unk_model: Dict[str, ObjectModel] = {} # name -> model self.lost_and_found = self.get_lost_and_found(self.object) def handler(self): @@ -85,15 +90,15 @@ class AssetCheck(DiscoveryCheck): def submit( self, - type, - part_no, - number=None, - builtin=False, - vendor=None, - revision=None, - serial=None, - mfg_date=None, - description=None, + type: str, + part_no: List[str], + number: Optional[str] = None, + builtin: bool = False, + vendor: Optional[str] = None, + revision: Optional[str] = None, + serial: Optional[str] = None, + mfg_date: Optional[str] = None, + description: Optional[str] = None, ): # Check the vendor and the serial are sane # OEM transceivers return binary trash often @@ -181,15 +186,17 @@ class AssetCheck(DiscoveryCheck): if scope: self.set_context(scope, number) # Find existing object or create new - o = Object.objects.filter(model=m.id, data__asset__serial=serial).first() + o: Optional["Object"] = Object.objects.filter( + model=m.id, data__match={"interface": "asset", "attr": "serial", "value": serial} + ).first() if not o: # Create new object self.logger.info("Creating new object. model='%s', serial='%s'", m.name, serial) - data = {"asset": {"serial": serial}} + data = [ObjectAttr(scope="", interface="asset", attr="serial", value=serial)] if revision: - data["asset"]["revision"] = revision + data += [ObjectAttr(scope="", interface="asset", attr="revision", value=revision)] if mfg_date: - data["asset"]["mfg_date"] = mfg_date + data += [ObjectAttr(scope="", interface="asset", attr="mfg_date", value=mfg_date)] if self.object.container: container = self.object.container.id else: @@ -263,7 +270,7 @@ class AssetCheck(DiscoveryCheck): if number and o.get_data("stack", "stackable"): self.stack_member[o] = number - def prepare_context(self, type, number): + def prepare_context(self, type: str, number: Optional[str]): self.set_context("N", number) if type and type in self.rule_context: scope, reset_scopes = self.rule_context[type] @@ -272,7 +279,7 @@ class AssetCheck(DiscoveryCheck): if reset_scopes: self.reset_context(reset_scopes) - def update_name(self, object): + def update_name(self, object: Object): n = self.get_name(object, self.object) if n and n != object.name: object.name = n @@ -285,7 +292,9 @@ class AssetCheck(DiscoveryCheck): op="CHANGE", ) - def iter_object(self, i, scope, value, target_type, fwd): + def iter_object( + self, i: int, scope: str, value: int, target_type: str, fwd: bool + ) -> Iterable[Tuple[str, Union[Object, str], Dict[str, Union[int, str]]]]: # Search backwards if not fwd: for j in range(i - 1, -1, -1): @@ -305,7 +314,7 @@ class AssetCheck(DiscoveryCheck): else: return - def expand_context(self, s, ctx): + def expand_context(self, s: str, ctx: Dict[str, int]) -> str: """ Replace values in context """ @@ -374,7 +383,7 @@ class AssetCheck(DiscoveryCheck): if found: break - def connect_p2p(self, o1, c1, o2, c2): + def connect_p2p(self, o1: Object, c1: str, o2: Object, c2: str): """ Create P2P connection o1:c1 - o2:c2 """ @@ -400,7 +409,7 @@ class AssetCheck(DiscoveryCheck): except ConnectionError as e: self.logger.error("Failed to connect: %s", e) - def connect_twinax(self, o1, c1, o2, c2): + def connect_twinax(self, o1: Object, c1: str, o2: Object, c2: str): """ Connect twinax object o1 and virtual connection c1 to o2:c2 """ @@ -458,7 +467,9 @@ class AssetCheck(DiscoveryCheck): "Unknown part number for %s: %s (%s)", platform, ", ".join(pns), description ) - def register_unknown_part_no(self, vendor, part_no, descripton): + def register_unknown_part_no( + self, vendor: "Vendor", part_no: Union[List[str], str], descripton: Optional[str] + ): """ Register missed part number """ @@ -471,7 +482,7 @@ class AssetCheck(DiscoveryCheck): self.unknown_part_no[p].add(pp) UnknownModel.mark_unknown(vendor.code[0], self.object, p, descripton) - def get_unknown_part_no(self): + def get_unknown_part_no(self) -> List[List[str]]: """ Get list of missed part number variants """ @@ -482,7 +493,7 @@ class AssetCheck(DiscoveryCheck): r += [n] return r - def get_vendor(self, v): + def get_vendor(self, v: Optional[str]) -> Optional["Vendor"]: """ Get vendor instance or None """ @@ -506,7 +517,7 @@ class AssetCheck(DiscoveryCheck): self.vendors[v] = None return None - def set_rule(self, rule): + def set_rule(self, rule: "ConnectionRule"): self.logger.debug("Setting connection rule '%s'", rule.name) # Compile context mappings self.rule_context = {} @@ -517,7 +528,7 @@ class AssetCheck(DiscoveryCheck): for r in rule.rules: self.rule[r.match_type] += [r] - def set_context(self, name, value): + def set_context(self, name: str, value: Optional[str]): self.ctx[name] = value n = "N%s" % name if n not in self.ctx: @@ -526,7 +537,7 @@ class AssetCheck(DiscoveryCheck): self.ctx[n] += 1 self.logger.debug("Set context %s = %s -> %s", name, value, str_dict(self.ctx)) - def reset_context(self, names): + def reset_context(self, names: List[str]): for n in names: if n in self.ctx: del self.ctx[n] @@ -540,7 +551,13 @@ class AssetCheck(DiscoveryCheck): Get all objects managed by managed object """ self.managed = set( - Object.objects.filter(data__management__managed_object=self.object.id).values_list("id") + Object.objects.filter( + data__match={ + "interface": "management", + "attr": "managed_object", + "value": self.object.id, + } + ).values_list("id") ) def check_management(self): @@ -560,7 +577,9 @@ class AssetCheck(DiscoveryCheck): op="CHANGE", ) - def resolve_object(self, name, m_c, t_object, t_c, serial): + def resolve_object( + self, name: str, m_c: str, t_object: Object, t_c: str, serial: str + ) -> Optional["Object"]: """ Resolve object type """ @@ -613,7 +632,11 @@ class AssetCheck(DiscoveryCheck): container = self.object.container.id else: container = self.lost_and_found - o = Object(model=model, data={"asset": {"serial": serial}}, container=container) + o = Object( + model=model, + data=[ObjectAttr(scope="", interface="asset", attr="serial", value=serial)], + container=container, + ) o.save() o.log( "Created by asset_discovery", @@ -623,7 +646,9 @@ class AssetCheck(DiscoveryCheck): ) return o - def get_model_map(self, vendor, part_no, serial): + def get_model_map( + self, vendor: str, part_no: Union[List[str], str], serial: Optional[str] + ) -> Optional["ObjectModel"]: """ Try to resolve using model map """ @@ -639,7 +664,7 @@ class AssetCheck(DiscoveryCheck): continue if mm.from_serial and mm.to_serial: if mm.from_serial <= serial and serial <= mm.to_serial: - return True + return mm.model else: self.logger.debug("Mapping %s %s %s to %s", vendor, part_no, serial, mm.model.name) return mm.model @@ -656,7 +681,7 @@ class AssetCheck(DiscoveryCheck): return None return lf.id - def generate_serial(self, model, number): + def generate_serial(self, model: ObjectModel, number: Optional[str]) -> str: """ Generate virtual serial number """ @@ -667,7 +692,7 @@ class AssetCheck(DiscoveryCheck): return "NOC%s" % smart_text(base64.b32encode(h.digest())[:7]) @staticmethod - def get_name(obj, managed_object=None): + def get_name(obj: Object, managed_object: Optional[Any] = None) -> str: """ Generate discovered object's name """ @@ -685,7 +710,7 @@ class AssetCheck(DiscoveryCheck): self.logger.info("Disconnect: %s:%s ->X<- %s:%s", o1, c1, c2, o2) self.disconnect_p2p(o1, c1, c2, o2) - def disconnect_p2p(self, o1, c1, c2, o2): + def disconnect_p2p(self, o1: "Object", c1: str, c2: str, o2: "Object"): """ Disconnect P2P connection o1:c1 - o2:c2 """ @@ -708,7 +733,7 @@ class AssetCheck(DiscoveryCheck): except ConnectionError as e: self.logger.error("Failed to disconnect: %s", e) - def clean_serial(self, model, number, serial): + def clean_serial(self, model: "ObjectModel", number: Optional[str], serial: Optional[str]): # Empty value if not serial or serial == "None": new_serial = self.generate_serial(model, number) diff --git a/services/web/apps/inv/inv/plugins/data.py b/services/web/apps/inv/inv/plugins/data.py index ffb4ab9c1e4dad29d57b72cc42b2097625b2c82b..2545d222513b8a3e03e8cc17050aead2212e0ff0 100644 --- a/services/web/apps/inv/inv/plugins/data.py +++ b/services/web/apps/inv/inv/plugins/data.py @@ -5,11 +5,13 @@ # See LICENSE for details # --------------------------------------------------------------------- +# Python modules +from typing import Dict, Tuple, Optional + # NOC modules from noc.inv.models.object import Object from noc.inv.models.objectmodel import ObjectModel from noc.inv.models.modelinterface import ModelInterface -from noc.lib.utils import deep_merge from noc.sa.interfaces.base import StringParameter, UnicodeParameter from .base import InvPlugin @@ -44,7 +46,7 @@ class DataPlugin(InvPlugin): }, ) - def get_data(self, request, o): + def get_data(self, request, o: Object): data = [] for k, v, d, is_const in [ ("Name", " | ".join(o.get_name_path()), "Inventory name", False), @@ -61,6 +63,7 @@ class DataPlugin(InvPlugin): "required": True, "is_const": is_const, "choices": None, + "scope": "", } if k == "Model": for rg in self.RGROUPS: @@ -71,20 +74,26 @@ class DataPlugin(InvPlugin): r["choices"] = [[str(x.id), x.name] for x in g] break data += [r] - d = deep_merge(o.model.data, o.data) - for i in d: - mi = ModelInterface.objects.filter(name=i).first() + # Build result + mi_values: Dict[str, Dict[str, Tuple[Optional[str], str]]] = {} + for item in o.get_effective_data(): + if item.interface not in mi_values: + mi_values[item.interface] = {} + mi_values[item.interface][item.attr] = (item.value, item.scope) + for i in mi_values: + mi = ModelInterface.get_by_name(i) if not mi: continue for a in mi.attrs: - v = d[i].get(a.name) - if v is None and a.is_const: + value, scope = mi_values[i].get(a.name, (None, "")) + if value is None and a.is_const: continue data += [ { "interface": i, "name": a.name, - "value": v, + "scope": scope, + "value": value, "type": a.type, "description": a.description, "required": a.required, diff --git a/services/web/apps/inv/inv/plugins/map.py b/services/web/apps/inv/inv/plugins/map.py index a9859489c6f105def272b2309f32179ef06a190d..584445cf756547823d8bb2257f7f575dee246886 100644 --- a/services/web/apps/inv/inv/plugins/map.py +++ b/services/web/apps/inv/inv/plugins/map.py @@ -11,7 +11,7 @@ from noc.gis.models.layer import Layer from noc.gis.models.layerusersettings import LayerUserSettings from noc.sa.models.managedobject import ManagedObject from noc.inv.models.objectmodel import ObjectModel -from noc.inv.models.object import Object +from noc.inv.models.object import Object, ObjectAttr from noc.sa.interfaces.base import ( StringParameter, FloatParameter, @@ -82,30 +82,26 @@ class MapPlugin(InvPlugin): def get_data(self, request, o): layers = [ { - "name": l.name, - "code": l.code, - "min_zoom": l.min_zoom, - "max_zoom": l.max_zoom, - "stroke_color": "#%06x" % l.stroke_color, - "fill_color": "#%06x" % l.fill_color, - "stroke_width": l.stroke_width, - "point_radius": l.point_radius, - "show_labels": l.show_labels, - "stroke_dashstyle": l.stroke_dashstyle, - "point_graphic": l.point_graphic, - "is_visible": LayerUserSettings.is_visible_by_user(request.user, l), + "name": layer.name, + "code": layer.code, + "min_zoom": layer.min_zoom, + "max_zoom": layer.max_zoom, + "stroke_color": "#%06x" % layer.stroke_color, + "fill_color": "#%06x" % layer.fill_color, + "stroke_width": layer.stroke_width, + "point_radius": layer.point_radius, + "show_labels": layer.show_labels, + "stroke_dashstyle": layer.stroke_dashstyle, + "point_graphic": layer.point_graphic, + "is_visible": LayerUserSettings.is_visible_by_user(request.user, layer), } - for l in Layer.objects.order_by("zindex") + for layer in Layer.objects.order_by("zindex") ] - srid = o.get_data("geopoint", "srid") - x = o.get_data("geopoint", "x") - y = o.get_data("geopoint", "y") + srid, x, y = o.get_data_tuple("geopoint", ("srid", "x", "y")) if x is None or y is None or not srid: p = self.get_parent(o) if p: - srid = p.get_data("geopoint", "srid") - x = p.get_data("geopoint", "x") - y = p.get_data("geopoint", "y") + srid, x, y = p.get_data_tuple("geopoint", ("srid", "x", "y")) # @todo: Coordinates transform # Feed result return { @@ -177,7 +173,9 @@ class MapPlugin(InvPlugin): d = {} # All models with geopoint interface - for mt in ObjectModel.objects.filter(data__geopoint__layer__exists=True): + for mt in ObjectModel.objects.filter( + data__match={"interface": "geopoint", "attr": "layer"} + ): parts = mt.name.split(" | ") m = d for p in parts[:-1]: @@ -220,7 +218,11 @@ class MapPlugin(InvPlugin): name=name, model=model, container=container, - data={"geopoint": {"srid": srid, "x": x, "y": y}}, + data=[ + ObjectAttr(scope="", interface="geopoint", attr="srid", value=srid), + ObjectAttr(scope="", interface="geopoint", attr="x", value=x), + ObjectAttr(scope="", interface="geopoint", attr="y", value=y), + ], ) o.save() return {"id": str(o.id)} diff --git a/services/web/apps/inv/reportobjectsserial/views.py b/services/web/apps/inv/reportobjectsserial/views.py index 1794b6d69c9811e037cc02edc9454e0ca4a785e9..d10f07ae008a26e6b516f555318945f3127a74e8 100644 --- a/services/web/apps/inv/reportobjectsserial/views.py +++ b/services/web/apps/inv/reportobjectsserial/views.py @@ -52,7 +52,7 @@ class ReportFilterApplication(SimpleReport): for mo in mos_list: q = Object._get_collection().count_documents( - {"data.management.managed_object": {"$in": [mo.id]}} + {"data": {"$elemMatch": {"attr": "managed_object", "value": {"$in": [mo.id]}}}} ) if q == 0: data += [ @@ -69,7 +69,16 @@ class ReportFilterApplication(SimpleReport): ] else: for x in Object._get_collection().find( - {"data.management.managed_object": {"$in": [mo.id]}} + { + "data": { + "$elemMatch": { + "interface": "management", + "attr": "managed_object", + "value": {"$in": [mo.id]}, + } + } + }, + {"data": {"$elemMatch": {"interface": "asset", "attr": "serial"}}, "name": 1}, ): data += [ [ @@ -79,7 +88,7 @@ class ReportFilterApplication(SimpleReport): mo.platform.full_name if mo.platform else None, mo.get_attr("HW version") or None, mo.version.version if mo.version else None, - x["data"]["asset"]["serial"], + x["data"][0]["value"] if x["data"] else "", ] ] diff --git a/ui/web/inv/inv/plugins/data/DataPanel.js b/ui/web/inv/inv/plugins/data/DataPanel.js index 76a261220b8fc48e4eab3fc885a839efc17e0923..01c374b5693c0e0a2ef12e49995ca933c08c454a 100644 --- a/ui/web/inv/inv/plugins/data/DataPanel.js +++ b/ui/web/inv/inv/plugins/data/DataPanel.js @@ -1,7 +1,7 @@ //--------------------------------------------------------------------- // inv.inv LAG Panel //--------------------------------------------------------------------- -// Copyright (C) 2007-2012 The NOC Project +// Copyright (C) 2007-2020 The NOC Project // See LICENSE for details //--------------------------------------------------------------------- console.debug("Defining NOC.inv.inv.plugins.data.DataPanel"); @@ -44,6 +44,10 @@ Ext.define("NOC.inv.inv.plugins.data.DataPanel", { text: __("Description"), dataIndex: "description" }, + { + scope: __("Scope"), + dataIndex: "scope" + }, { text: __("Type"), dataIndex: "type"