style: apply black and isort

Gabriel Ferreira 2023-11-19 20:07:19 -03:00
parent af98671fbe
commit 8f6a3413a4
68 changed files with 7848 additions and 4299 deletions

View File

@ -1,10 +1,10 @@
import builtins
from functools import lru_cache
import glob
import os.path
import re
import sys
import sysconfig
import re
from functools import lru_cache
DEFAULT_INCLUDE_DIR = sysconfig.get_config_var("INCLUDEDIR")
DEFAULT_LIB_DIR = sysconfig.get_config_var("LIBDIR")
@ -14,7 +14,7 @@ def find_ns3_lock() -> str:
# Get the absolute path to this file
path_to_this_init_file = os.path.dirname(os.path.abspath(__file__))
path_to_lock = path_to_this_init_file
lock_file = (".lock-ns3_%s_build" % sys.platform)
lock_file = ".lock-ns3_%s_build" % sys.platform
# Move upwards until we reach the directory with the ns3 script
prev_path = None
@ -32,15 +32,17 @@ def find_ns3_lock() -> str:
return path_to_lock
SYSTEM_LIBRARY_DIRECTORIES = (DEFAULT_LIB_DIR,
os.path.dirname(DEFAULT_LIB_DIR),
"/usr/lib64",
"/usr/lib",
)
DYNAMIC_LIBRARY_EXTENSIONS = {"linux": "so",
"win32": "dll",
"darwin": "dylib",
}
SYSTEM_LIBRARY_DIRECTORIES = (
DEFAULT_LIB_DIR,
os.path.dirname(DEFAULT_LIB_DIR),
"/usr/lib64",
"/usr/lib",
)
DYNAMIC_LIBRARY_EXTENSIONS = {
"linux": "so",
"win32": "dll",
"darwin": "dylib",
}
LIBRARY_EXTENSION = DYNAMIC_LIBRARY_EXTENSIONS[sys.platform]
@ -76,8 +78,9 @@ def _search_libraries() -> dict:
library_search_paths += [os.path.dirname(library_search_paths[-1])]
# Filter unique search paths and those that are not part of system directories
library_search_paths = list(filter(lambda x: x not in SYSTEM_LIBRARY_DIRECTORIES,
set(library_search_paths)))
library_search_paths = list(
filter(lambda x: x not in SYSTEM_LIBRARY_DIRECTORIES, set(library_search_paths))
)
# Search for the core library in the search paths
libraries = []
@ -88,7 +91,9 @@ def _search_libraries() -> dict:
# Search system library directories (too slow for recursive search)
for search_path in SYSTEM_LIBRARY_DIRECTORIES:
if os.path.exists(search_path):
libraries += glob.glob("%s/**/*.%s*" % (search_path, LIBRARY_EXTENSION), recursive=False)
libraries += glob.glob(
"%s/**/*.%s*" % (search_path, LIBRARY_EXTENSION), recursive=False
)
libraries += glob.glob("%s/*.%s*" % (search_path, LIBRARY_EXTENSION), recursive=False)
del search_path, library_search_paths
@ -106,7 +111,7 @@ def _search_libraries() -> dict:
library_map[library_infix].add(library)
# Replace sets with lists
for (key, values) in library_map.items():
for key, values in library_map.items():
library_map[key] = list(values)
return library_map
@ -128,7 +133,7 @@ LIBRARY_AND_DEFINES = {
"libxml2": ["HAVE_LIBXML2"],
"libsqlite3": ["HAVE_SQLITE3"],
"openflow": ["NS3_OPENFLOW", "ENABLE_OPENFLOW"],
"click": ["NS3_CLICK"]
"click": ["NS3_CLICK"],
}
@ -137,11 +142,11 @@ def add_library_defines(library_name: str):
defines = ""
if len(has_defines):
for define in LIBRARY_AND_DEFINES[has_defines[0]]:
defines += (f"""
defines += f"""
#ifndef {define}
#define {define} 1
#endif
""")
"""
return defines
@ -156,7 +161,9 @@ def extract_linked_libraries(library_name: str, prefix: str) -> tuple:
# First discover which 3rd-party libraries are used by the current module
try:
with open(os.path.abspath(library_path), "rb") as f:
linked_libs = re.findall(b"\x00(lib.*?.%b)" % LIBRARY_EXTENSION.encode("utf-8"), f.read())
linked_libs = re.findall(
b"\x00(lib.*?.%b)" % LIBRARY_EXTENSION.encode("utf-8"), f.read()
)
except Exception as e:
print(f"Failed to extract libraries used by {library_path} with exception:{e}")
exit(-1)
@ -181,7 +188,8 @@ def extract_library_include_dirs(library_name: str, prefix: str) -> tuple:
# Raise error in case the library can't be found
if len(linked_library_path) == 0:
raise Exception(
f"Failed to find {linked_library}. Make sure its library directory is in LD_LIBRARY_PATH.")
f"Failed to find {linked_library}. Make sure its library directory is in LD_LIBRARY_PATH."
)
# Get path with the shortest length
linked_library_path = sorted(linked_library_path, key=lambda x: len(x))[0]
@ -199,7 +207,9 @@ def extract_library_include_dirs(library_name: str, prefix: str) -> tuple:
linked_libs_include_dirs.add(system_include_dir)
# If system_include_dir/library_name exists, we add it too
linked_library_name = linked_library.replace("lib", "").replace("." + LIBRARY_EXTENSION, "")
linked_library_name = linked_library.replace("lib", "").replace(
"." + LIBRARY_EXTENSION, ""
)
if os.path.exists(os.path.join(system_include_dir, linked_library_name)):
linked_libs_include_dirs.add(os.path.join(system_include_dir, linked_library_name))
@ -230,19 +240,26 @@ def find_ns3_from_lock_file(lock_file: str) -> (str, list, str):
suffix = "-" + values["BUILD_PROFILE"] if values["BUILD_PROFILE"] != "release" else ""
modules = [module.replace("ns3-", "") for module in values["NS3_ENABLED_MODULES"]]
prefix = values["out_dir"]
libraries = {os.path.splitext(os.path.basename(x))[0]: x for x in os.listdir(os.path.join(prefix, "lib"))}
libraries = {
os.path.splitext(os.path.basename(x))[0]: x for x in os.listdir(os.path.join(prefix, "lib"))
}
version = values["VERSION"]
# Filter out test libraries and incorrect versions
def filter_in_matching_ns3_libraries(libraries_to_filter: dict,
modules_to_filter: list,
version: str,
suffix: str,) -> dict:
def filter_in_matching_ns3_libraries(
libraries_to_filter: dict,
modules_to_filter: list,
version: str,
suffix: str,
) -> dict:
suffix = [suffix[1:]] if len(suffix) > 1 else []
filtered_in_modules = []
for module in modules_to_filter:
filtered_in_modules += list(filter(lambda x: "-".join([version, module, *suffix]) in x,
libraries_to_filter.keys()))
filtered_in_modules += list(
filter(
lambda x: "-".join([version, module, *suffix]) in x, libraries_to_filter.keys()
)
)
for library in list(libraries_to_filter.keys()):
if library not in filtered_in_modules:
libraries_to_filter.pop(library)
@ -255,9 +272,9 @@ def find_ns3_from_lock_file(lock_file: str) -> (str, list, str):
for module in modules:
library_name = f"libns{version}-{module}{suffix}"
if library_name not in libraries:
raise Exception(f"Missing library {library_name}\n",
"Build all modules with './ns3 build'"
)
raise Exception(
f"Missing library {library_name}\n", "Build all modules with './ns3 build'"
)
libraries_to_load.append(libraries[library_name])
return prefix, libraries_to_load, version
@ -276,7 +293,14 @@ def filter_module_name(library: str) -> str:
components.pop(0)
# Drop build profile suffix and test libraries
if components[-1] in ["debug", "default", "optimized", "release", "relwithdebinfo", "minsizerel"]:
if components[-1] in [
"debug",
"default",
"optimized",
"release",
"relwithdebinfo",
"minsizerel",
]:
components.pop(-1)
return "-".join(components)
@ -331,10 +355,14 @@ def find_ns3_from_search() -> (str, list, str):
# Filter out older ns-3 libraries
for module in list(modules_to_filter):
# Filter duplicates of modules, while excluding test libraries
conflicting_libraries = list(filter(lambda x: module == filter_module_name(x), libraries_to_filter))
conflicting_libraries = list(
filter(lambda x: module == filter_module_name(x), libraries_to_filter)
)
# Extract versions from conflicting libraries
conflicting_libraries_versions = list(map(lambda x: extract_version(x, module), conflicting_libraries))
conflicting_libraries_versions = list(
map(lambda x: extract_version(x, module), conflicting_libraries)
)
# Get the newest version found for that library
newest_version = get_newest_version(conflicting_libraries_versions)
@ -345,7 +373,9 @@ def find_ns3_from_search() -> (str, list, str):
else:
newest_version_found = get_newest_version([newest_version, newest_version_found])
if newest_version != newest_version_found:
raise Exception(f"Incompatible versions of the ns-3 module '{module}' were found: {newest_version} != {newest_version_found}.")
raise Exception(
f"Incompatible versions of the ns-3 module '{module}' were found: {newest_version} != {newest_version_found}."
)
for conflicting_library in list(conflicting_libraries):
if "-".join([newest_version, module]) not in conflicting_library:
@ -353,7 +383,9 @@ def find_ns3_from_search() -> (str, list, str):
conflicting_libraries.remove(conflicting_library)
if len(conflicting_libraries) > 1:
raise Exception(f"There are multiple build profiles for module '{module}'.\nDelete one to continue: {', '.join(conflicting_libraries)}")
raise Exception(
f"There are multiple build profiles for module '{module}'.\nDelete one to continue: {', '.join(conflicting_libraries)}"
)
return libraries_to_filter, newest_version_found
@ -379,7 +411,9 @@ def load_modules():
libraries = list(map(lambda x: os.path.basename(x), libraries))
for ns3_library in libraries:
_, _, linked_libraries = extract_linked_libraries(ns3_library, prefix)
linked_libraries = list(filter(lambda x: "libns3" in x and ns3_library not in x, linked_libraries))
linked_libraries = list(
filter(lambda x: "libns3" in x and ns3_library not in x, linked_libraries)
)
linked_libraries = list(map(lambda x: os.path.basename(x), linked_libraries))
module_dependencies[os.path.basename(ns3_library)] = linked_libraries
@ -396,19 +430,27 @@ def load_modules():
modules.append(pending_module)
return modules
def dependency_order(module_dependencies, pending_modules, current_modules, step_number=0, steps={}):
def dependency_order(
module_dependencies, pending_modules, current_modules, step_number=0, steps={}
):
if len(pending_modules) == 0:
return steps
if step_number not in steps:
steps[step_number] = []
for module in modules_that_can_be_loaded(module_dependencies, pending_modules, current_modules):
for module in modules_that_can_be_loaded(
module_dependencies, pending_modules, current_modules
):
steps[step_number].append(module)
pending_modules.remove(module)
current_modules.append(module)
return dependency_order(module_dependencies, pending_modules, current_modules, step_number + 1, steps)
return dependency_order(
module_dependencies, pending_modules, current_modules, step_number + 1, steps
)
sorted_libraries = []
for step in dependency_order(module_dependencies, list(module_dependencies.keys()), [], 0).values():
for step in dependency_order(
module_dependencies, list(module_dependencies.keys()), [], 0
).values():
sorted_libraries.extend(step)
return sorted_libraries
@ -433,7 +475,8 @@ def load_modules():
# Register Ptr<> as a smart pointer
import libcppyy
libcppyy.AddSmartPtrType('Ptr')
libcppyy.AddSmartPtrType("Ptr")
# Import ns-3 libraries
for variant in ["lib", "lib64"]:
@ -471,7 +514,8 @@ def load_modules():
setattr(cppyy.gbl.ns3, module.replace("-", "_"), cppyy.gbl.ns3)
# Set up a few tricks
cppyy.cppdef("""
cppyy.cppdef(
"""
using namespace ns3;
bool Time_ge(Time& a, Time& b){ return a >= b;}
bool Time_eq(Time& a, Time& b){ return a == b;}
@ -479,7 +523,8 @@ def load_modules():
bool Time_le(Time& a, Time& b){ return a <= b;}
bool Time_gt(Time& a, Time& b){ return a > b;}
bool Time_lt(Time& a, Time& b){ return a < b;}
""")
"""
)
cppyy.gbl.ns3.Time.__ge__ = cppyy.gbl.Time_ge
cppyy.gbl.ns3.Time.__eq__ = cppyy.gbl.Time_eq
cppyy.gbl.ns3.Time.__ne__ = cppyy.gbl.Time_ne
@ -500,7 +545,8 @@ def load_modules():
cppyy.gbl.ns3.Node.__del__ = Node_del
cppyy.cppdef("""
cppyy.cppdef(
"""
using namespace ns3;
std::tuple<bool, TypeId> LookupByNameFailSafe(std::string name)
{
@ -508,44 +554,49 @@ def load_modules():
bool ok = TypeId::LookupByNameFailSafe(name, &id);
return std::make_tuple(ok, id);
}
""")
"""
)
setattr(cppyy.gbl.ns3, "LookupByNameFailSafe", cppyy.gbl.LookupByNameFailSafe)
def CreateObject(className):
try:
try:
func = "CreateObject%s" % re.sub('[<|>]', '_', className)
func = "CreateObject%s" % re.sub("[<|>]", "_", className)
return getattr(cppyy.gbl, func)()
except AttributeError:
pass
try:
func = "Create%s" % re.sub('[<|>]', '_', className)
func = "Create%s" % re.sub("[<|>]", "_", className)
return getattr(cppyy.gbl, func)()
except AttributeError:
pass
raise AttributeError
except AttributeError:
try:
func = "CreateObject%s" % re.sub('[<|>]', '_', className)
cppyy.cppdef("""
func = "CreateObject%s" % re.sub("[<|>]", "_", className)
cppyy.cppdef(
"""
using namespace ns3;
Ptr<%s> %s(){
Ptr<%s> object = CreateObject<%s>();
return object;
}
""" % (className, func, className, className)
)
"""
% (className, func, className, className)
)
except Exception as e:
try:
func = "Create%s" % re.sub('[<|>]', '_', className)
cppyy.cppdef("""
func = "Create%s" % re.sub("[<|>]", "_", className)
cppyy.cppdef(
"""
using namespace ns3;
%s %s(){
%s object = %s();
return object;
}
""" % (className, func, className, className)
)
"""
% (className, func, className, className)
)
except Exception as e:
exit(-1)
return getattr(cppyy.gbl, func)()
@ -574,10 +625,12 @@ def load_modules():
{
return parentPtr->GetObject<%s>();
}
""" % (aggregatedType, aggregatedType, aggregatedType, aggregatedType)
"""
% (aggregatedType, aggregatedType, aggregatedType, aggregatedType)
)
return cppyy.gbl.getAggregatedObject(
parentObject, aggregatedObject if aggregatedIsClass else aggregatedObject.__class__
)
return cppyy.gbl.getAggregatedObject(parentObject,
aggregatedObject if aggregatedIsClass else aggregatedObject.__class__)
setattr(cppyy.gbl.ns3, "GetObject", GetObject)
return cppyy.gbl.ns3
@ -585,4 +638,4 @@ def load_modules():
# Load all modules and make them available via a built-in
ns = load_modules() # can be imported via 'from ns import ns'
builtins.__dict__['ns'] = ns # or be made widely available with 'from ns import *'
builtins.__dict__["ns"] = ns # or be made widely available with 'from ns import *'

View File

@ -8,4 +8,4 @@ for variant in ["lib", "lib64"]:
continue
for lib in os.listdir(lib_dir):
if "libns3" in lib:
print(f"--exclude {lib}", end=' ')
print(f"--exclude {lib}", end=" ")

View File

@ -4,7 +4,8 @@ import sys
try:
import ns3.ns
sys.modules['ns'] = ns3.ns
sys.modules["ns"] = ns3.ns
except ModuleNotFoundError as e:
print("Install the ns3 package with pip install ns3.", file=sys.stderr)
exit(-1)

View File

@ -8,4 +8,4 @@ except ModuleNotFoundError as e:
print("Install the ns3 package with pip install ns3.", file=sys.stderr)
exit(-1)
from ns3.visualizer import start, register_plugin, set_bounds, add_initialization_hook
from ns3.visualizer import add_initialization_hook, register_plugin, set_bounds, start

View File

@ -7,35 +7,41 @@
# ...
# </pages>
import pickle
import os
import codecs
import os
import pickle
def dump_pickles(out, dirname, filename, path):
with open(os.path.join(dirname, filename), 'r', encoding='utf-8') as f:
with open(os.path.join(dirname, filename), "r", encoding="utf-8") as f:
data = pickle.load(f)
with codecs.open(data['current_page_name'] + '.frag', mode='w', encoding='utf-8') as fragment_file:
fragment_file.write(data['body'])
with codecs.open(
data["current_page_name"] + ".frag", mode="w", encoding="utf-8"
) as fragment_file:
fragment_file.write(data["body"])
out.write(' <page url="%s">\n' % path)
out.write(' <fragment>%s.frag</fragment>\n' % data['current_page_name'])
if data['prev'] is not None:
out.write(' <prev url="%s">%s</prev>\n' %
(os.path.normpath(os.path.join(path, data['prev']['link'])),
data['prev']['title']))
if data['next'] is not None:
out.write(' <next url="%s">%s</next>\n' %
(os.path.normpath(os.path.join(path, data['next']['link'])),
data['next']['title']))
out.write(' </page>\n')
out.write(" <fragment>%s.frag</fragment>\n" % data["current_page_name"])
if data["prev"] is not None:
out.write(
' <prev url="%s">%s</prev>\n'
% (os.path.normpath(os.path.join(path, data["prev"]["link"])), data["prev"]["title"])
)
if data["next"] is not None:
out.write(
' <next url="%s">%s</next>\n'
% (os.path.normpath(os.path.join(path, data["next"]["link"])), data["next"]["title"])
)
out.write(" </page>\n")
if data['next'] is not None:
next_path = os.path.normpath(os.path.join(path, data['next']['link']))
next_filename = os.path.basename(next_path) + '.fpickle'
if data["next"] is not None:
next_path = os.path.normpath(os.path.join(path, data["next"]["link"]))
next_filename = os.path.basename(next_path) + ".fpickle"
dump_pickles(out, dirname, next_filename, next_path)
import sys
sys.stdout.write('<pages>\n')
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), '/')
sys.stdout.write('</pages>')
sys.stdout.write("<pages>\n")
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), "/")
sys.stdout.write("</pages>")

View File

@ -11,206 +11,203 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'ns-3'
copyright = u'2015, ns-3 project'
project = "ns-3"
copyright = "2015, ns-3 project"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ns3_html_theme'
html_theme = "ns3_html_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
html_theme_path = ["../.."]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Contributing'
html_title = "Contributing"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y %H:%M'
html_last_updated_fmt = "%b %d, %Y %H:%M"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
htmlhelp_basename = "ns-3doc"
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ns-3-contributing.tex', u'Contributing to ns-3',
u'ns-3 project', 'manual'),
("index", "ns-3-contributing.tex", "Contributing to ns-3", "ns-3 project", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '../../ns3_html_theme/static/ns-3.png'
latex_logo = "../../ns3_html_theme/static/ns-3.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
latex_preamble = '\\usepackage{amssymb}'
latex_preamble = "\\usepackage{amssymb}"
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-contributing', u'Contributing to ns-3',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-contributing", "Contributing to ns-3", ["ns-3 project"], 1)]

View File

@ -11,206 +11,203 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'ns-3'
copyright = u'2018, ns-3 project'
project = "ns-3"
copyright = "2018, ns-3 project"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ns3_html_theme'
html_theme = "ns3_html_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
html_theme_path = ["../.."]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Installation guide'
html_title = "Installation guide"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y %H:%M'
html_last_updated_fmt = "%b %d, %Y %H:%M"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
htmlhelp_basename = "ns-3doc"
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ns-3-installation.tex', u'ns-3 Installation Guide',
u'ns-3 project', 'manual'),
("index", "ns-3-installation.tex", "ns-3 Installation Guide", "ns-3 project", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '../../ns3_html_theme/static/ns-3.png'
latex_logo = "../../ns3_html_theme/static/ns-3.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
latex_preamble = '\\usepackage{amssymb}'
latex_preamble = "\\usepackage{amssymb}"
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-installation', u'ns-3 Installation',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-installation", "ns-3 Installation", ["ns-3 project"], 1)]

View File

@ -20,21 +20,22 @@
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sys, os
import os
import sys
# To change default code-block format in Latex to footnotesize (8pt)
# Tip from https://stackoverflow.com/questions/9899283/how-do-you-change-the-code-example-font-size-in-latex-pdf-output-with-sphinx/9955928
# Note: sizes are \footnotesize (8pt), \small (9pt), and \normalsize (10pt).
#from sphinx.highlighting import PygmentsBridge
#from pygments.formatters.latex import LatexFormatter
# from sphinx.highlighting import PygmentsBridge
# from pygments.formatters.latex import LatexFormatter
#
#class CustomLatexFormatter(LatexFormatter):
# class CustomLatexFormatter(LatexFormatter):
# def __init__(self, **options):
# super(CustomLatexFormatter, self).__init__(**options)
# self.verboptions = r"formatcom=\footnotesize"
#
#PygmentsBridge.latex_formatter = CustomLatexFormatter
# PygmentsBridge.latex_formatter = CustomLatexFormatter
# -- General configuration ------------------------------------------------
@ -45,37 +46,37 @@ import sys, os
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'ns-3 project'
copyright = u'2006-2019, ns-3 project'
#author = u'test'
project = "ns-3 project"
copyright = "2006-2019, ns-3 project"
# author = u'test'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = u'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -96,7 +97,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# These patterns also affect html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@ -118,7 +119,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@ -135,7 +136,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'ns3_html_theme'
html_theme = "ns3_html_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@ -144,13 +145,13 @@ html_theme = 'ns3_html_theme'
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
html_theme_path = ["../.."]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'est vtest'
html_title = 'Manual'
html_title = "Manual"
# A shorter title for the navigation bar. Default is the same as html_title.
#
@ -170,7 +171,7 @@ html_title = 'Manual'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@ -182,7 +183,7 @@ html_static_path = ['_static']
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
html_last_updated_fmt = '%b %d, %Y %H:%M'
html_last_updated_fmt = "%b %d, %Y %H:%M"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
@ -250,7 +251,7 @@ html_last_updated_fmt = '%b %d, %Y %H:%M'
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
htmlhelp_basename = "ns-3doc"
# -- Options for LaTeX output ---------------------------------------------
@ -258,11 +259,9 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# VerbatimBorderColor: make the box around code samples blend into the background
@ -275,10 +274,10 @@ latex_elements = {
# See above to change the font size of verbatim code blocks
#
# 'preamble': '',
'preamble': u'''\\usepackage{amssymb}
"preamble": """\\usepackage{amssymb}
\\definecolor{VerbatimBorderColor}{rgb}{1,1,1}
\\renewcommand{\\sphinxcode}[1]{\\texttt{\\small{#1}}}
'''
"""
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@ -288,14 +287,13 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ns-3-manual.tex', u'ns-3 Manual',
u'ns-3 project', 'manual'),
("index", "ns-3-manual.tex", "ns-3 Manual", "ns-3 project", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
latex_logo = '../../ns3_html_theme/static/ns-3.png'
latex_logo = "../../ns3_html_theme/static/ns-3.png"
# If true, show page references after internal links.
#
@ -324,10 +322,7 @@ latex_logo = '../../ns3_html_theme/static/ns-3.png'
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-manual', u'ns-3 Manual',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-manual", "ns-3 Manual", ["ns-3 project"], 1)]
# If true, show URL addresses after external links.
#
@ -335,11 +330,11 @@ man_pages = [
# -- Options for texinfo output ---------------------------------------
#texinfo_documents = [
# texinfo_documents = [
# (master_doc, 'test', u'test Documentation',
# author, 'test', 'One line description of project.',
# 'Miscellaneous'),
#]
# ]
# Documents to append as an appendix to all manuals.
#

View File

@ -20,21 +20,22 @@
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sys, os
import os
import sys
# To change default code-block format in Latex to footnotesize (8pt)
# Tip from https://stackoverflow.com/questions/9899283/how-do-you-change-the-code-example-font-size-in-latex-pdf-output-with-sphinx/9955928
# Note: sizes are \footnotesize (8pt), \small (9pt), and \normalsize (10pt).
#from sphinx.highlighting import PygmentsBridge
#from pygments.formatters.latex import LatexFormatter
# from sphinx.highlighting import PygmentsBridge
# from pygments.formatters.latex import LatexFormatter
#
#class CustomLatexFormatter(LatexFormatter):
# class CustomLatexFormatter(LatexFormatter):
# def __init__(self, **options):
# super(CustomLatexFormatter, self).__init__(**options)
# self.verboptions = r"formatcom=\footnotesize"
#
#PygmentsBridge.latex_formatter = CustomLatexFormatter
# PygmentsBridge.latex_formatter = CustomLatexFormatter
# -- General configuration ------------------------------------------------
@ -45,37 +46,37 @@ import sys, os
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'ns-3 project'
copyright = u'2006-2019, ns-3 project'
#author = u'test'
project = "ns-3 project"
copyright = "2006-2019, ns-3 project"
# author = u'test'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = u'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -96,7 +97,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# These patterns also affect html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@ -118,7 +119,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@ -135,7 +136,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'ns3_html_theme'
html_theme = "ns3_html_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@ -144,18 +145,18 @@ html_theme = 'ns3_html_theme'
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
html_theme_path = ["../.."]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'est vtest'
html_title = 'Model Library'
html_title = "Model Library"
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
html_short_title = 'Models'
html_short_title = "Models"
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
@ -171,7 +172,7 @@ html_short_title = 'Models'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@ -183,7 +184,7 @@ html_static_path = ['_static']
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
html_last_updated_fmt = '%b %d, %Y %H:%M'
html_last_updated_fmt = "%b %d, %Y %H:%M"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
@ -251,7 +252,7 @@ html_last_updated_fmt = '%b %d, %Y %H:%M'
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
htmlhelp_basename = "ns-3doc"
# -- Options for LaTeX output ---------------------------------------------
@ -259,11 +260,9 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# VerbatimBorderColor: make the box around code samples blend into the background
@ -276,10 +275,10 @@ latex_elements = {
# See above to change the font size of verbatim code blocks
#
# 'preamble': '',
'preamble': u'''\\usepackage{amssymb}
"preamble": """\\usepackage{amssymb}
\\definecolor{VerbatimBorderColor}{rgb}{1,1,1}
\\renewcommand{\\sphinxcode}[1]{\\texttt{\\small{#1}}}
'''
"""
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@ -289,14 +288,13 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ns-3-model-library.tex', u'ns-3 Model Library',
u'ns-3 project', 'manual'),
("index", "ns-3-model-library.tex", "ns-3 Model Library", "ns-3 project", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
latex_logo = '../../ns3_html_theme/static/ns-3.png'
latex_logo = "../../ns3_html_theme/static/ns-3.png"
# If true, show page references after internal links.
#
@ -325,10 +323,7 @@ latex_logo = '../../ns3_html_theme/static/ns-3.png'
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]
# If true, show URL addresses after external links.
#
@ -336,11 +331,11 @@ man_pages = [
# -- Options for texinfo output ---------------------------------------
#texinfo_documents = [
# texinfo_documents = [
# (master_doc, 'test', u'test Documentation',
# author, 'test', 'One line description of project.',
# 'Miscellaneous'),
#]
# ]
# Documents to append as an appendix to all manuals.
#

View File

@ -7,35 +7,41 @@
# ...
# </pages>
import pickle
import os
import codecs
import os
import pickle
def dump_pickles(out, dirname, filename, path):
with open(os.path.join(dirname, filename), 'r', encoding='utf-8') as f:
with open(os.path.join(dirname, filename), "r", encoding="utf-8") as f:
data = pickle.load(f)
with codecs.open(data['current_page_name'] + '.frag', mode='w', encoding='utf-8') as fragment_file:
fragment_file.write(data['body'])
with codecs.open(
data["current_page_name"] + ".frag", mode="w", encoding="utf-8"
) as fragment_file:
fragment_file.write(data["body"])
out.write(' <page url="%s">\n' % path)
out.write(' <fragment>%s.frag</fragment>\n' % data['current_page_name'])
if data['prev'] is not None:
out.write(' <prev url="%s">%s</prev>\n' %
(os.path.normpath(os.path.join(path, data['prev']['link'])),
data['prev']['title']))
if data['next'] is not None:
out.write(' <next url="%s">%s</next>\n' %
(os.path.normpath(os.path.join(path, data['next']['link'])),
data['next']['title']))
out.write(' </page>\n')
out.write(" <fragment>%s.frag</fragment>\n" % data["current_page_name"])
if data["prev"] is not None:
out.write(
' <prev url="%s">%s</prev>\n'
% (os.path.normpath(os.path.join(path, data["prev"]["link"])), data["prev"]["title"])
)
if data["next"] is not None:
out.write(
' <next url="%s">%s</next>\n'
% (os.path.normpath(os.path.join(path, data["next"]["link"])), data["next"]["title"])
)
out.write(" </page>\n")
if data['next'] is not None:
next_path = os.path.normpath(os.path.join(path, data['next']['link']))
next_filename = os.path.basename(next_path) + '.fpickle'
if data["next"] is not None:
next_path = os.path.normpath(os.path.join(path, data["next"]["link"]))
next_filename = os.path.basename(next_path) + ".fpickle"
dump_pickles(out, dirname, next_filename, next_path)
import sys
sys.stdout.write('<pages>\n')
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), '/')
sys.stdout.write('</pages>')
sys.stdout.write("<pages>\n")
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), "/")
sys.stdout.write("</pages>")

View File

@ -24,15 +24,15 @@
# Tip from https://stackoverflow.com/questions/9899283/how-do-you-change-the-code-example-font-size-in-latex-pdf-output-with-sphinx/9955928
# Note: sizes are \footnotesize (8pt), \small (9pt), and \normalsize (10pt).
#from sphinx.highlighting import PygmentsBridge
#from pygments.formatters.latex import LatexFormatter
# from sphinx.highlighting import PygmentsBridge
# from pygments.formatters.latex import LatexFormatter
#
#class CustomLatexFormatter(LatexFormatter):
# class CustomLatexFormatter(LatexFormatter):
# def __init__(self, **options):
# super(CustomLatexFormatter, self).__init__(**options)
# self.verboptions = r"formatcom=\footnotesize"
#
#PygmentsBridge.latex_formatter = CustomLatexFormatter
# PygmentsBridge.latex_formatter = CustomLatexFormatter
# -- General configuration ------------------------------------------------
@ -43,37 +43,37 @@
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'ns-3 project'
copyright = u'2006-2019, ns-3 project'
#author = u'test'
project = "ns-3 project"
copyright = "2006-2019, ns-3 project"
# author = u'test'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = u'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -94,7 +94,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# These patterns also affect html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@ -116,7 +116,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@ -133,7 +133,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'ns3_html_theme'
html_theme = "ns3_html_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@ -142,13 +142,13 @@ html_theme = 'ns3_html_theme'
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
html_theme_path = ["../.."]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'est vtest'
html_title = 'Tutorial'
html_title = "Tutorial"
# A shorter title for the navigation bar. Default is the same as html_title.
#
@ -168,7 +168,7 @@ html_title = 'Tutorial'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@ -180,7 +180,7 @@ html_static_path = ['_static']
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
html_last_updated_fmt = '%b %d, %Y %H:%M'
html_last_updated_fmt = "%b %d, %Y %H:%M"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
@ -248,7 +248,7 @@ html_last_updated_fmt = '%b %d, %Y %H:%M'
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
htmlhelp_basename = "ns-3doc"
# -- Options for LaTeX output ---------------------------------------------
@ -256,11 +256,9 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# VerbatimBorderColor: make the box around code samples blend into the background
@ -273,10 +271,10 @@ latex_elements = {
# See above to change the font size of verbatim code blocks
#
# 'preamble': '',
'preamble': u'''\\usepackage{amssymb}
"preamble": """\\usepackage{amssymb}
\\definecolor{VerbatimBorderColor}{rgb}{1,1,1}
\\renewcommand{\\sphinxcode}[1]{\\texttt{\\small{#1}}}
'''
"""
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@ -286,14 +284,13 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ns-3-tutorial.tex', u'ns-3 Tutorial',
u'ns-3 project', 'manual'),
("index", "ns-3-tutorial.tex", "ns-3 Tutorial", "ns-3 project", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
latex_logo = '../../ns3_html_theme/static/ns-3.png'
latex_logo = "../../ns3_html_theme/static/ns-3.png"
# If true, show page references after internal links.
#
@ -322,10 +319,7 @@ latex_logo = '../../ns3_html_theme/static/ns-3.png'
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-tutorial', u'ns-3 Tutorial',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-tutorial", "ns-3 Tutorial", ["ns-3 project"], 1)]
# If true, show URL addresses after external links.
#
@ -333,11 +327,11 @@ man_pages = [
# -- Options for texinfo output ---------------------------------------
#texinfo_documents = [
# texinfo_documents = [
# (master_doc, 'test', u'test Documentation',
# author, 'test', 'One line description of project.',
# 'Miscellaneous'),
#]
# ]
# Documents to append as an appendix to all manuals.
#

View File

@ -32,87 +32,92 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
def main(argv):
#
# Allow the user to override any of the defaults and the above Bind() at
# run-time, via command-line arguments
#
cmd = ns.core.CommandLine()
cmd.Parse(argv)
#
# Allow the user to override any of the defaults and the above Bind() at
# run-time, via command-line arguments
#
cmd = ns.core.CommandLine()
cmd.Parse(argv)
#
# But since this is a realtime script, don't allow the user to mess with
# that.
#
ns.core.GlobalValue.Bind("SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl"))
#
# But since this is a realtime script, don't allow the user to mess with
# that.
#
ns.core.GlobalValue.Bind(
"SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl")
)
#
# Explicitly create the nodes required by the topology (shown above).
#
print ("Create nodes.")
n = ns.network.NodeContainer()
n.Create(4)
#
# Explicitly create the nodes required by the topology (shown above).
#
print("Create nodes.")
n = ns.network.NodeContainer()
n.Create(4)
internet = ns.internet.InternetStackHelper()
internet.Install(n)
internet = ns.internet.InternetStackHelper()
internet.Install(n)
#
# Explicitly create the channels required by the topology (shown above).
#
print ("Create channels.")
csma = ns.csma.CsmaHelper()
csma.SetChannelAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate(5000000)))
csma.SetChannelAttribute("Delay", ns.core.TimeValue(ns.core.MilliSeconds(2)));
csma.SetDeviceAttribute("Mtu", ns.core.UintegerValue(1400))
d = csma.Install(n)
#
# Explicitly create the channels required by the topology (shown above).
#
print("Create channels.")
csma = ns.csma.CsmaHelper()
csma.SetChannelAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate(5000000)))
csma.SetChannelAttribute("Delay", ns.core.TimeValue(ns.core.MilliSeconds(2)))
csma.SetDeviceAttribute("Mtu", ns.core.UintegerValue(1400))
d = csma.Install(n)
#
# We've got the "hardware" in place. Now we need to add IP addresses.
#
print ("Assign IP Addresses.")
ipv4 = ns.internet.Ipv4AddressHelper()
ipv4.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
i = ipv4.Assign(d)
#
# We've got the "hardware" in place. Now we need to add IP addresses.
#
print("Assign IP Addresses.")
ipv4 = ns.internet.Ipv4AddressHelper()
ipv4.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
i = ipv4.Assign(d)
print ("Create Applications.")
print("Create Applications.")
#
# Create a UdpEchoServer application on node one.
#
port = 9 # well-known echo port number
server = ns.applications.UdpEchoServerHelper(port)
apps = server.Install(n.Get(1))
apps.Start(ns.core.Seconds(1.0))
apps.Stop(ns.core.Seconds(10.0))
#
# Create a UdpEchoServer application on node one.
#
port = 9 # well-known echo port number
server = ns.applications.UdpEchoServerHelper(port)
apps = server.Install(n.Get(1))
apps.Start(ns.core.Seconds(1.0))
apps.Stop(ns.core.Seconds(10.0))
#
# Create a UdpEchoClient application to send UDP datagrams from node zero to
# node one.
#
packetSize = 1024
maxPacketCount = 500
interPacketInterval = ns.core.Seconds(0.01)
client = ns.applications.UdpEchoClientHelper(i.GetAddress(1).ConvertTo(), port)
client.SetAttribute("MaxPackets", ns.core.UintegerValue(maxPacketCount))
client.SetAttribute("Interval", ns.core.TimeValue(interPacketInterval))
client.SetAttribute("PacketSize", ns.core.UintegerValue(packetSize))
apps = client.Install(n.Get(0))
apps.Start(ns.core.Seconds(2.0))
apps.Stop(ns.core.Seconds(10.0))
#
# Create a UdpEchoClient application to send UDP datagrams from node zero to
# node one.
#
packetSize = 1024
maxPacketCount = 500
interPacketInterval = ns.core.Seconds(0.01)
client = ns.applications.UdpEchoClientHelper(i.GetAddress(1).ConvertTo(), port)
client.SetAttribute("MaxPackets", ns.core.UintegerValue(maxPacketCount))
client.SetAttribute("Interval", ns.core.TimeValue(interPacketInterval))
client.SetAttribute("PacketSize", ns.core.UintegerValue(packetSize))
apps = client.Install(n.Get(0))
apps.Start(ns.core.Seconds(2.0))
apps.Stop(ns.core.Seconds(10.0))
ascii = ns.network.AsciiTraceHelper()
csma.EnableAsciiAll(ascii.CreateFileStream("realtime-udp-echo.tr"))
csma.EnablePcapAll("realtime-udp-echo", False)
ascii = ns.network.AsciiTraceHelper()
csma.EnableAsciiAll(ascii.CreateFileStream("realtime-udp-echo.tr"))
csma.EnablePcapAll("realtime-udp-echo", False)
#
# Now, do the actual simulation.
#
print ("Run Simulation.")
ns.core.Simulator.Stop(ns.Seconds(10))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
print ("Done.")
#
# Now, do the actual simulation.
#
print("Run Simulation.")
ns.core.Simulator.Stop(ns.Seconds(10))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
print("Done.")
if __name__ == '__main__':
import sys
main(sys.argv)
if __name__ == "__main__":
import sys
main(sys.argv)

View File

@ -80,7 +80,7 @@ def main(argv):
print("Application")
packetSize = 1024
maxPacketCount = 5
interPacketInterval = ns.Seconds(1.)
interPacketInterval = ns.Seconds(1.0)
# ping = ns.PingHelper(i2.GetAddress(1, 1).ConvertTo())
ping = ns.PingHelper(i2.GetAddress(1, 1).ConvertTo())
@ -105,7 +105,7 @@ def main(argv):
ns.Simulator.Destroy()
if __name__ == '__main__':
if __name__ == "__main__":
import sys
main(sys.argv)

View File

@ -11,10 +11,26 @@ cpp_examples = [
("tcp-large-transfer", "True", "True"),
("tcp-star-server", "True", "True"),
("tcp-variants-comparison", "True", "True"),
("tcp-validation --firstTcpType=dctcp --linkRate=50Mbps --baseRtt=10ms --queueUseEcn=1 --stopTime=15s --validate=dctcp-10ms", "True", "True"),
("tcp-validation --firstTcpType=dctcp --linkRate=50Mbps --baseRtt=80ms --queueUseEcn=1 --stopTime=40s --validate=dctcp-80ms", "True", "True"),
("tcp-validation --firstTcpType=cubic --linkRate=50Mbps --baseRtt=50ms --queueUseEcn=0 --stopTime=20s --validate=cubic-50ms-no-ecn", "True", "True"),
("tcp-validation --firstTcpType=cubic --linkRate=50Mbps --baseRtt=50ms --queueUseEcn=1 --stopTime=20s --validate=cubic-50ms-ecn", "True", "True"),
(
"tcp-validation --firstTcpType=dctcp --linkRate=50Mbps --baseRtt=10ms --queueUseEcn=1 --stopTime=15s --validate=dctcp-10ms",
"True",
"True",
),
(
"tcp-validation --firstTcpType=dctcp --linkRate=50Mbps --baseRtt=80ms --queueUseEcn=1 --stopTime=40s --validate=dctcp-80ms",
"True",
"True",
),
(
"tcp-validation --firstTcpType=cubic --linkRate=50Mbps --baseRtt=50ms --queueUseEcn=0 --stopTime=20s --validate=cubic-50ms-no-ecn",
"True",
"True",
),
(
"tcp-validation --firstTcpType=cubic --linkRate=50Mbps --baseRtt=50ms --queueUseEcn=1 --stopTime=20s --validate=cubic-50ms-ecn",
"True",
"True",
),
]
# A list of Python examples to run in order to ensure that they remain

View File

@ -45,8 +45,7 @@ stack = ns.internet.InternetStackHelper()
stack.Install(nodes)
address = ns.internet.Ipv4AddressHelper()
address.SetBase(ns.network.Ipv4Address("10.1.1.0"),
ns.network.Ipv4Mask("255.255.255.0"))
address.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
interfaces = address.Assign(devices)
@ -68,4 +67,3 @@ clientApps.Stop(ns.core.Seconds(10.0))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()

View File

@ -24,6 +24,7 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
import sys
from ctypes import c_bool, c_int
# // Default Network Topology
# //
@ -33,7 +34,7 @@ import sys
# // ================
# // LAN 10.1.2.0
from ctypes import c_int, c_bool
nCsma = c_int(3)
verbose = c_bool(True)
cmd = ns.CommandLine(__file__)
@ -82,9 +83,11 @@ serverApps = echoServer.Install(csmaNodes.Get(nCsma.value))
serverApps.Start(ns.core.Seconds(1.0))
serverApps.Stop(ns.core.Seconds(10.0))
echoClient = ns.applications.UdpEchoClientHelper(csmaInterfaces.GetAddress(nCsma.value).ConvertTo(), 9)
echoClient = ns.applications.UdpEchoClientHelper(
csmaInterfaces.GetAddress(nCsma.value).ConvertTo(), 9
)
echoClient.SetAttribute("MaxPackets", ns.core.UintegerValue(1))
echoClient.SetAttribute("Interval", ns.core.TimeValue(ns.core.Seconds (1.0)))
echoClient.SetAttribute("Interval", ns.core.TimeValue(ns.core.Seconds(1.0)))
echoClient.SetAttribute("PacketSize", ns.core.UintegerValue(1024))
clientApps = echoClient.Install(p2pNodes.Get(0))
@ -94,8 +97,7 @@ clientApps.Stop(ns.core.Seconds(10.0))
ns.internet.Ipv4GlobalRoutingHelper.PopulateRoutingTables()
pointToPoint.EnablePcapAll("second")
csma.EnablePcap ("second", csmaDevices.Get (1), True)
csma.EnablePcap("second", csmaDevices.Get(1), True)
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()

View File

@ -24,6 +24,7 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
import sys
from ctypes import c_bool, c_int
# // Default Network Topology
# //
@ -36,7 +37,7 @@ import sys
# // ================
# // LAN 10.1.2.0
from ctypes import c_bool, c_int
nCsma = c_int(3)
verbose = c_bool(True)
nWifi = c_int(3)
@ -89,22 +90,40 @@ phy = ns.wifi.YansWifiPhyHelper()
phy.SetChannel(channel.Create())
mac = ns.wifi.WifiMacHelper()
ssid = ns.wifi.Ssid ("ns-3-ssid")
ssid = ns.wifi.Ssid("ns-3-ssid")
wifi = ns.wifi.WifiHelper()
mac.SetType ("ns3::StaWifiMac", "Ssid", ns.wifi.SsidValue(ssid), "ActiveProbing", ns.core.BooleanValue(False))
mac.SetType(
"ns3::StaWifiMac", "Ssid", ns.wifi.SsidValue(ssid), "ActiveProbing", ns.core.BooleanValue(False)
)
staDevices = wifi.Install(phy, mac, wifiStaNodes)
mac.SetType("ns3::ApWifiMac","Ssid", ns.wifi.SsidValue (ssid))
mac.SetType("ns3::ApWifiMac", "Ssid", ns.wifi.SsidValue(ssid))
apDevices = wifi.Install(phy, mac, wifiApNode)
mobility = ns.mobility.MobilityHelper()
mobility.SetPositionAllocator("ns3::GridPositionAllocator", "MinX", ns.core.DoubleValue(0.0),
"MinY", ns.core.DoubleValue (0.0), "DeltaX", ns.core.DoubleValue(5.0), "DeltaY", ns.core.DoubleValue(10.0),
"GridWidth", ns.core.UintegerValue(3), "LayoutType", ns.core.StringValue("RowFirst"))
mobility.SetPositionAllocator(
"ns3::GridPositionAllocator",
"MinX",
ns.core.DoubleValue(0.0),
"MinY",
ns.core.DoubleValue(0.0),
"DeltaX",
ns.core.DoubleValue(5.0),
"DeltaY",
ns.core.DoubleValue(10.0),
"GridWidth",
ns.core.UintegerValue(3),
"LayoutType",
ns.core.StringValue("RowFirst"),
)
mobility.SetMobilityModel ("ns3::RandomWalk2dMobilityModel", "Bounds", ns.mobility.RectangleValue(ns.mobility.Rectangle (-50, 50, -50, 50)))
mobility.SetMobilityModel(
"ns3::RandomWalk2dMobilityModel",
"Bounds",
ns.mobility.RectangleValue(ns.mobility.Rectangle(-50, 50, -50, 50)),
)
mobility.Install(wifiStaNodes)
mobility.SetMobilityModel("ns3::ConstantPositionMobilityModel")
@ -132,12 +151,14 @@ serverApps = echoServer.Install(csmaNodes.Get(nCsma.value))
serverApps.Start(ns.core.Seconds(1.0))
serverApps.Stop(ns.core.Seconds(10.0))
echoClient = ns.applications.UdpEchoClientHelper(csmaInterfaces.GetAddress(nCsma.value).ConvertTo(), 9)
echoClient = ns.applications.UdpEchoClientHelper(
csmaInterfaces.GetAddress(nCsma.value).ConvertTo(), 9
)
echoClient.SetAttribute("MaxPackets", ns.core.UintegerValue(1))
echoClient.SetAttribute("Interval", ns.core.TimeValue(ns.core.Seconds (1.0)))
echoClient.SetAttribute("Interval", ns.core.TimeValue(ns.core.Seconds(1.0)))
echoClient.SetAttribute("PacketSize", ns.core.UintegerValue(1024))
clientApps = echoClient.Install(wifiStaNodes.Get (nWifi.value - 1))
clientApps = echoClient.Install(wifiStaNodes.Get(nWifi.value - 1))
clientApps.Start(ns.core.Seconds(2.0))
clientApps.Stop(ns.core.Seconds(10.0))
@ -147,10 +168,9 @@ ns.core.Simulator.Stop(ns.core.Seconds(10.0))
if tracing.value:
phy.SetPcapDataLinkType(phy.DLT_IEEE802_11_RADIO)
pointToPoint.EnablePcapAll ("third")
phy.EnablePcap ("third", apDevices.Get (0))
csma.EnablePcap ("third", csmaDevices.Get (0), True)
pointToPoint.EnablePcapAll("third")
phy.EnablePcap("third", apDevices.Get(0))
csma.EnablePcap("third", csmaDevices.Get(0), True)
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()

View File

@ -15,9 +15,9 @@ cpp_examples = [
("wifi-multirate --totalTime=0.3s --rateManager=ns3::MinstrelWifiManager", "True", "False"),
("wifi-multirate --totalTime=0.3s --rateManager=ns3::OnoeWifiManager", "True", "False"),
("wifi-multirate --totalTime=0.3s --rateManager=ns3::RraaWifiManager", "True", "False"),
("wifi-adhoc", "False", "True"), # Takes too long to run
("wifi-ap --verbose=0", "True", "True"), # Don't let it spew to stdout
("wifi-clear-channel-cmu", "False", "True"), # Requires specific hardware
("wifi-adhoc", "False", "True"), # Takes too long to run
("wifi-ap --verbose=0", "True", "True"), # Don't let it spew to stdout
("wifi-clear-channel-cmu", "False", "True"), # Requires specific hardware
("wifi-simple-adhoc", "True", "True"),
("wifi-simple-adhoc-grid", "True", "True"),
("wifi-simple-infra", "True", "True"),
@ -26,12 +26,36 @@ cpp_examples = [
("wifi-sleep", "True", "True"),
("wifi-blockack", "True", "True"),
("wifi-timing-attributes --simulationTime=1", "True", "True"),
("wifi-power-adaptation-distance --manager=ns3::ParfWifiManager --outputFileName=parf --steps=5 --stepsSize=10", "True", "True"),
("wifi-power-adaptation-distance --manager=ns3::AparfWifiManager --outputFileName=aparf --steps=5 --stepsSize=10", "True", "False"),
("wifi-power-adaptation-distance --manager=ns3::RrpaaWifiManager --outputFileName=rrpaa --steps=5 --stepsSize=10", "True", "False"),
("wifi-rate-adaptation-distance --standard=802.11a --staManager=ns3::MinstrelWifiManager --apManager=ns3::MinstrelWifiManager --outputFileName=minstrel --stepsSize=50 --stepsTime=0.1", "True", "False"),
("wifi-rate-adaptation-distance --standard=802.11a --staManager=ns3::MinstrelWifiManager --apManager=ns3::MinstrelWifiManager --outputFileName=minstrel --stepsSize=50 --stepsTime=0.1 --STA1_x=-200", "True", "False"),
("wifi-rate-adaptation-distance --staManager=ns3::MinstrelHtWifiManager --apManager=ns3::MinstrelHtWifiManager --outputFileName=minstrelHt --shortGuardInterval=true --channelWidth=40 --stepsSize=50 --stepsTime=0.1", "True", "False"),
(
"wifi-power-adaptation-distance --manager=ns3::ParfWifiManager --outputFileName=parf --steps=5 --stepsSize=10",
"True",
"True",
),
(
"wifi-power-adaptation-distance --manager=ns3::AparfWifiManager --outputFileName=aparf --steps=5 --stepsSize=10",
"True",
"False",
),
(
"wifi-power-adaptation-distance --manager=ns3::RrpaaWifiManager --outputFileName=rrpaa --steps=5 --stepsSize=10",
"True",
"False",
),
(
"wifi-rate-adaptation-distance --standard=802.11a --staManager=ns3::MinstrelWifiManager --apManager=ns3::MinstrelWifiManager --outputFileName=minstrel --stepsSize=50 --stepsTime=0.1",
"True",
"False",
),
(
"wifi-rate-adaptation-distance --standard=802.11a --staManager=ns3::MinstrelWifiManager --apManager=ns3::MinstrelWifiManager --outputFileName=minstrel --stepsSize=50 --stepsTime=0.1 --STA1_x=-200",
"True",
"False",
),
(
"wifi-rate-adaptation-distance --staManager=ns3::MinstrelHtWifiManager --apManager=ns3::MinstrelHtWifiManager --outputFileName=minstrelHt --shortGuardInterval=true --channelWidth=40 --stepsSize=50 --stepsTime=0.1",
"True",
"False",
),
("wifi-power-adaptation-interference --simuTime=5", "True", "False"),
("wifi-dsss-validation", "True", "True"),
("wifi-ofdm-validation", "True", "True"),
@ -40,37 +64,145 @@ cpp_examples = [
("wifi-ofdm-he-validation", "True", "True"),
("wifi-error-models-comparison", "True", "True"),
("wifi-80211n-mimo --simulationTime=0.1 --step=10", "True", "True"),
("wifi-ht-network --simulationTime=0.2 --frequency=5 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=135", "True", "True"),
("wifi-ht-network --simulationTime=0.2 --frequency=5 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=132", "True", "True"),
("wifi-ht-network --simulationTime=0.2 --frequency=2.4 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=132", "True", "True"),
("wifi-ht-network --simulationTime=0.2 --frequency=2.4 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=129", "True", "True"),
("wifi-vht-network --simulationTime=0.2 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=583", "True", "True"),
("wifi-vht-network --simulationTime=0.2 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=557", "True", "True"),
("wifi-he-network --simulationTime=0.25 --frequency=5 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=844", "True", "True"),
("wifi-he-network --simulationTime=0.3 --frequency=5 --useRts=0 --useExtendedBlockAck=1 --minExpectedThroughput=6 --maxExpectedThroughput=1033", "True", "True"),
("wifi-he-network --simulationTime=0.3 --frequency=5 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=745", "True", "True"),
("wifi-he-network --simulationTime=0.25 --frequency=2.4 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=238", "True", "True"),
("wifi-he-network --simulationTime=0.3 --frequency=2.4 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=223", "True", "True"),
("wifi-he-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=0 --nStations=4 --dlAckType=ACK-SU-FORMAT --enableUlOfdma=1 --enableBsrp=0 --mcs=4 --minExpectedThroughput=20 --maxExpectedThroughput=212", "True", "True"),
("wifi-he-network --simulationTime=0.3 --frequency=2.4 --udp=0 --downlink=1 --useRts=1 --nStations=5 --dlAckType=MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --minExpectedThroughput=27 --maxExpectedThroughput=50", "True", "True"),
("wifi-he-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=0 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=0 --mcs=6 --muSchedAccessReqInterval=50ms --minExpectedThroughput=31 --maxExpectedThroughput=290", "True", "True"),
("wifi-he-network --simulationTime=0.3 --udp=1 --downlink=0 --useRts=1 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --muSchedAccessReqInterval=50ms --minExpectedThroughput=46 --maxExpectedThroughput=327", "True", "True"),
("wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=550", "True", "True"),
("wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=0 --useExtendedBlockAck=1 --frequency2=6 --minExpectedThroughput=12 --maxExpectedThroughput=550", "True", "True"),
("wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=547", "True", "True"),
("wifi-eht-network --simulationTime=0.1 --frequency=2.4 --useRts=0 --useExtendedBlockAck=1 --frequency2=5 --minExpectedThroughput=12 --maxExpectedThroughput=500", "True", "True"),
("wifi-eht-network --simulationTime=0.1 --frequency=2.4 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=212", "True", "True"),
("wifi-eht-network --simulationTime=0.23 --udp=0 --downlink=1 --useRts=0 --nStations=4 --dlAckType=ACK-SU-FORMAT --enableUlOfdma=1 --enableBsrp=0 --mcs=4 --frequency2=6 --minExpectedThroughput=35 --maxExpectedThroughput=280", "True", "True"),
("wifi-eht-network --simulationTime=0.25 --frequency=2.4 --udp=0 --downlink=1 --useRts=0 --nStations=5 --dlAckType=MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --frequency2=5 --useExtendedBlockAck=1 --minExpectedThroughput=40 --maxExpectedThroughput=100", "True", "True"),
("wifi-eht-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=1 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=0 --mcs=6 --muSchedAccessReqInterval=50ms --frequency2=2.4 --minExpectedThroughput=50 --maxExpectedThroughput=140", "True", "True"),
("wifi-eht-network --simulationTime=0.2 --udp=1 --downlink=0 --useRts=0 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --muSchedAccessReqInterval=50ms --frequency2=6 --minExpectedThroughput=70 --maxExpectedThroughput=715", "True", "True"),
("wifi-simple-ht-hidden-stations --simulationTime=1 --enableRts=0 --nMpdus=32 --minExpectedThroughput=59 --maxExpectedThroughput=60", "True", "True"),
("wifi-simple-ht-hidden-stations --simulationTime=1 --enableRts=1 --nMpdus=32 --minExpectedThroughput=57 --maxExpectedThroughput=58", "True", "True"),
(
"wifi-ht-network --simulationTime=0.2 --frequency=5 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=135",
"True",
"True",
),
(
"wifi-ht-network --simulationTime=0.2 --frequency=5 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=132",
"True",
"True",
),
(
"wifi-ht-network --simulationTime=0.2 --frequency=2.4 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=132",
"True",
"True",
),
(
"wifi-ht-network --simulationTime=0.2 --frequency=2.4 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=129",
"True",
"True",
),
(
"wifi-vht-network --simulationTime=0.2 --useRts=0 --minExpectedThroughput=5 --maxExpectedThroughput=583",
"True",
"True",
),
(
"wifi-vht-network --simulationTime=0.2 --useRts=1 --minExpectedThroughput=5 --maxExpectedThroughput=557",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.25 --frequency=5 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=844",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --frequency=5 --useRts=0 --useExtendedBlockAck=1 --minExpectedThroughput=6 --maxExpectedThroughput=1033",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --frequency=5 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=745",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.25 --frequency=2.4 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=238",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --frequency=2.4 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=223",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=0 --nStations=4 --dlAckType=ACK-SU-FORMAT --enableUlOfdma=1 --enableBsrp=0 --mcs=4 --minExpectedThroughput=20 --maxExpectedThroughput=212",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --frequency=2.4 --udp=0 --downlink=1 --useRts=1 --nStations=5 --dlAckType=MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --minExpectedThroughput=27 --maxExpectedThroughput=50",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=0 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=0 --mcs=6 --muSchedAccessReqInterval=50ms --minExpectedThroughput=31 --maxExpectedThroughput=290",
"True",
"True",
),
(
"wifi-he-network --simulationTime=0.3 --udp=1 --downlink=0 --useRts=1 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --muSchedAccessReqInterval=50ms --minExpectedThroughput=46 --maxExpectedThroughput=327",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=0 --minExpectedThroughput=6 --maxExpectedThroughput=550",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=0 --useExtendedBlockAck=1 --frequency2=6 --minExpectedThroughput=12 --maxExpectedThroughput=550",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.1 --frequency=5 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=547",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.1 --frequency=2.4 --useRts=0 --useExtendedBlockAck=1 --frequency2=5 --minExpectedThroughput=12 --maxExpectedThroughput=500",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.1 --frequency=2.4 --useRts=1 --minExpectedThroughput=6 --maxExpectedThroughput=212",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.23 --udp=0 --downlink=1 --useRts=0 --nStations=4 --dlAckType=ACK-SU-FORMAT --enableUlOfdma=1 --enableBsrp=0 --mcs=4 --frequency2=6 --minExpectedThroughput=35 --maxExpectedThroughput=280",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.25 --frequency=2.4 --udp=0 --downlink=1 --useRts=0 --nStations=5 --dlAckType=MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --frequency2=5 --useExtendedBlockAck=1 --minExpectedThroughput=40 --maxExpectedThroughput=100",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.3 --udp=0 --downlink=1 --useRts=1 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=0 --mcs=6 --muSchedAccessReqInterval=50ms --frequency2=2.4 --minExpectedThroughput=50 --maxExpectedThroughput=140",
"True",
"True",
),
(
"wifi-eht-network --simulationTime=0.2 --udp=1 --downlink=0 --useRts=0 --nStations=5 --dlAckType=AGGR-MU-BAR --enableUlOfdma=1 --enableBsrp=1 --mcs=5 --muSchedAccessReqInterval=50ms --frequency2=6 --minExpectedThroughput=70 --maxExpectedThroughput=715",
"True",
"True",
),
(
"wifi-simple-ht-hidden-stations --simulationTime=1 --enableRts=0 --nMpdus=32 --minExpectedThroughput=59 --maxExpectedThroughput=60",
"True",
"True",
),
(
"wifi-simple-ht-hidden-stations --simulationTime=1 --enableRts=1 --nMpdus=32 --minExpectedThroughput=57 --maxExpectedThroughput=58",
"True",
"True",
),
("wifi-mixed-network --simulationTime=1", "True", "True"),
("wifi-aggregation --simulationTime=1 --verifyResults=1", "True", "True"),
("wifi-txop-aggregation --simulationTime=1 --verifyResults=1", "True", "True"),
("wifi-80211e-txop --simulationTime=1 --verifyResults=1", "True", "True"),
("wifi-multi-tos --simulationTime=1 --nWifi=16 --useRts=1 --useShortGuardInterval=1", "True", "True"),
(
"wifi-multi-tos --simulationTime=1 --nWifi=16 --useRts=1 --useShortGuardInterval=1",
"True",
"True",
),
("wifi-tcp", "True", "True"),
("wifi-hidden-terminal --wifiManager=Arf", "True", "True"),
("wifi-hidden-terminal --wifiManager=Aarf", "True", "True"),
@ -81,14 +213,42 @@ cpp_examples = [
("wifi-hidden-terminal --wifiManager=Cara", "True", "True"),
("wifi-hidden-terminal --wifiManager=Rraa", "True", "True"),
("wifi-hidden-terminal --wifiManager=Rrpaa", "True", "True"),
("wifi-spectrum-per-example --distance=52 --index=3 --wifiType=ns3::SpectrumWifiPhy --simulationTime=1", "True", "True"),
("wifi-spectrum-per-example --distance=24 --index=31 --wifiType=ns3::YansWifiPhy --simulationTime=1", "True", "False"),
("wifi-spectrum-per-interference --distance=24 --index=31 --simulationTime=1 --waveformPower=0.1", "True", "True"),
(
"wifi-spectrum-per-example --distance=52 --index=3 --wifiType=ns3::SpectrumWifiPhy --simulationTime=1",
"True",
"True",
),
(
"wifi-spectrum-per-example --distance=24 --index=31 --wifiType=ns3::YansWifiPhy --simulationTime=1",
"True",
"False",
),
(
"wifi-spectrum-per-interference --distance=24 --index=31 --simulationTime=1 --waveformPower=0.1",
"True",
"True",
),
("wifi-spectrum-saturation-example --simulationTime=1 --index=63", "True", "True"),
("wifi-backward-compatibility --apVersion=80211a --staVersion=80211n_5GHZ --simulationTime=1", "True", "True"),
("wifi-backward-compatibility --apVersion=80211a --staVersion=80211n_5GHZ --apRaa=Ideal --staRaa=Ideal --simulationTime=1", "True", "False"),
("wifi-backward-compatibility --apVersion=80211a --staVersion=80211ac --simulationTime=1", "True", "False"),
("wifi-backward-compatibility --apVersion=80211a --staVersion=80211ac --apRaa=Ideal --staRaa=Ideal --simulationTime=1", "True", "False"),
(
"wifi-backward-compatibility --apVersion=80211a --staVersion=80211n_5GHZ --simulationTime=1",
"True",
"True",
),
(
"wifi-backward-compatibility --apVersion=80211a --staVersion=80211n_5GHZ --apRaa=Ideal --staRaa=Ideal --simulationTime=1",
"True",
"False",
),
(
"wifi-backward-compatibility --apVersion=80211a --staVersion=80211ac --simulationTime=1",
"True",
"False",
),
(
"wifi-backward-compatibility --apVersion=80211a --staVersion=80211ac --apRaa=Ideal --staRaa=Ideal --simulationTime=1",
"True",
"False",
),
]
# A list of Python examples to run in order to ensure that they remain

View File

@ -70,12 +70,14 @@ except ModuleNotFoundError:
# std.cout << "CourseChange " << path << " x=" << position.x << ", y=" << position.y << ", z=" << position.z << std.endl;
# }
def main(argv):
#
# First, we initialize a few local variables that control some
# simulation parameters.
#
from ctypes import c_int, c_double
from ctypes import c_double, c_int
backboneNodes = c_int(10)
infraNodes = c_int(2)
lanNodes = c_int(2)
@ -106,8 +108,8 @@ def main(argv):
#
cmd.Parse(argv)
if (stopTime.value < 10):
print ("Use a simulation stop time >= 10 seconds")
if stopTime.value < 10:
print("Use a simulation stop time >= 10 seconds")
exit(1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # /
# #
@ -128,8 +130,9 @@ def main(argv):
wifi = ns.wifi.WifiHelper()
mac = ns.wifi.WifiMacHelper()
mac.SetType("ns3::AdhocWifiMac")
wifi.SetRemoteStationManager("ns3::ConstantRateWifiManager",
"DataMode", ns.core.StringValue("OfdmRate54Mbps"))
wifi.SetRemoteStationManager(
"ns3::ConstantRateWifiManager", "DataMode", ns.core.StringValue("OfdmRate54Mbps")
)
wifiPhy = ns.wifi.YansWifiPhyHelper()
wifiPhy.SetPcapDataLinkType(wifiPhy.DLT_IEEE802_11_RADIO)
wifiChannel = ns.wifi.YansWifiChannelHelper.Default()
@ -138,11 +141,12 @@ def main(argv):
#
# Add the IPv4 protocol stack to the nodes in our container
#
print ("Enabling OLSR routing on all backbone nodes")
print("Enabling OLSR routing on all backbone nodes")
internet = ns.internet.InternetStackHelper()
olsr = ns.olsr.OlsrHelper()
internet.SetRoutingHelper(olsr); # has effect on the next Install ()
internet.Install(backbone);
internet.SetRoutingHelper(olsr)
# has effect on the next Install ()
internet.Install(backbone)
# re-initialize for non-olsr routing.
# internet.Reset()
#
@ -158,17 +162,30 @@ def main(argv):
# each of the nodes we just finished building.
#
mobility = ns.mobility.MobilityHelper()
mobility.SetPositionAllocator("ns3::GridPositionAllocator",
"MinX", ns.core.DoubleValue(20.0),
"MinY", ns.core.DoubleValue(20.0),
"DeltaX", ns.core.DoubleValue(20.0),
"DeltaY", ns.core.DoubleValue(20.0),
"GridWidth", ns.core.UintegerValue(5),
"LayoutType", ns.core.StringValue("RowFirst"))
mobility.SetMobilityModel("ns3::RandomDirection2dMobilityModel",
"Bounds", ns.mobility.RectangleValue(ns.mobility.Rectangle(-500, 500, -500, 500)),
"Speed", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=2]"),
"Pause", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0.2]"))
mobility.SetPositionAllocator(
"ns3::GridPositionAllocator",
"MinX",
ns.core.DoubleValue(20.0),
"MinY",
ns.core.DoubleValue(20.0),
"DeltaX",
ns.core.DoubleValue(20.0),
"DeltaY",
ns.core.DoubleValue(20.0),
"GridWidth",
ns.core.UintegerValue(5),
"LayoutType",
ns.core.StringValue("RowFirst"),
)
mobility.SetMobilityModel(
"ns3::RandomDirection2dMobilityModel",
"Bounds",
ns.mobility.RectangleValue(ns.mobility.Rectangle(-500, 500, -500, 500)),
"Speed",
ns.core.StringValue("ns3::ConstantRandomVariable[Constant=2]"),
"Pause",
ns.core.StringValue("ns3::ConstantRandomVariable[Constant=0.2]"),
)
mobility.Install(backbone)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # /
@ -182,7 +199,7 @@ def main(argv):
ipAddrs.SetBase(ns.network.Ipv4Address("172.16.0.0"), ns.network.Ipv4Mask("255.255.255.0"))
for i in range(backboneNodes.value):
print ("Configuring local area network for backbone node ", i)
print("Configuring local area network for backbone node ", i)
#
# Create a container to manage the nodes of the LAN. We need
# two containers here; one with all of the new nodes, and one
@ -221,12 +238,12 @@ def main(argv):
mobilityLan = ns.mobility.MobilityHelper()
positionAlloc = ns.mobility.ListPositionAllocator()
for j in range(newLanNodes.GetN()):
positionAlloc.Add(ns.core.Vector(0.0, (j*10 + 10), 0.0))
positionAlloc.Add(ns.core.Vector(0.0, (j * 10 + 10), 0.0))
mobilityLan.SetPositionAllocator(positionAlloc)
mobilityLan.PushReferenceMobilityModel(backbone.Get(i))
mobilityLan.SetMobilityModel("ns3::ConstantPositionMobilityModel")
mobilityLan.Install(newLanNodes);
mobilityLan.Install(newLanNodes)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # /
# #
@ -239,7 +256,7 @@ def main(argv):
ipAddrs.SetBase(ns.network.Ipv4Address("10.0.0.0"), ns.network.Ipv4Mask("255.255.255.0"))
tempRef = [] # list of references to be held to prevent garbage collection
for i in range(backboneNodes.value):
print ("Configuring wireless network for backbone node ", i)
print("Configuring wireless network for backbone node ", i)
#
# Create a container to manage the nodes of the LAN. We need
# two containers here; one with all of the new nodes, and one
@ -252,18 +269,16 @@ def main(argv):
#
# Create another ad hoc network and devices
#
ssid = ns.wifi.Ssid('wifi-infra' + str(i))
ssid = ns.wifi.Ssid("wifi-infra" + str(i))
wifiInfra = ns.wifi.WifiHelper()
wifiPhy.SetChannel(wifiChannel.Create())
macInfra = ns.wifi.WifiMacHelper();
macInfra.SetType("ns3::StaWifiMac",
"Ssid", ns.wifi.SsidValue(ssid))
macInfra = ns.wifi.WifiMacHelper()
macInfra.SetType("ns3::StaWifiMac", "Ssid", ns.wifi.SsidValue(ssid))
# setup stas
staDevices = wifiInfra.Install(wifiPhy, macInfra, stas)
# setup ap.
macInfra.SetType("ns3::ApWifiMac",
"Ssid", ns.wifi.SsidValue(ssid))
macInfra.SetType("ns3::ApWifiMac", "Ssid", ns.wifi.SsidValue(ssid))
apDevices = wifiInfra.Install(wifiPhy, macInfra, backbone.Get(i))
# Collect all of these new devices
infraDevices = ns.network.NetDeviceContainer(apDevices, staDevices)
@ -298,10 +313,15 @@ def main(argv):
mobility.PushReferenceMobilityModel(backbone.Get(i))
mobility.SetPositionAllocator(subnetAlloc)
mobility.SetMobilityModel("ns3::RandomDirection2dMobilityModel",
"Bounds", ns.mobility.RectangleValue(ns.mobility.Rectangle(-10, 10, -10, 10)),
"Speed", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=3]"),
"Pause", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0.4]"))
mobility.SetMobilityModel(
"ns3::RandomDirection2dMobilityModel",
"Bounds",
ns.mobility.RectangleValue(ns.mobility.Rectangle(-10, 10, -10, 10)),
"Speed",
ns.core.StringValue("ns3::ConstantRandomVariable[Constant=3]"),
"Pause",
ns.core.StringValue("ns3::ConstantRandomVariable[Constant=0.4]"),
)
mobility.Install(stas)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # /
@ -312,18 +332,25 @@ def main(argv):
# Create the OnOff application to send UDP datagrams of size
# 210 bytes at a rate of 448 Kb/s, between two nodes
print ("Create Applications.")
port = 9 # Discard port(RFC 863)
print("Create Applications.")
port = 9 # Discard port(RFC 863)
appSource = ns.network.NodeList.GetNode(backboneNodes.value)
lastNodeIndex = backboneNodes.value + backboneNodes.value*(lanNodes.value - 1) + backboneNodes.value*(infraNodes.value - 1) - 1
lastNodeIndex = (
backboneNodes.value
+ backboneNodes.value * (lanNodes.value - 1)
+ backboneNodes.value * (infraNodes.value - 1)
- 1
)
appSink = ns.network.NodeList.GetNode(lastNodeIndex)
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
Ipv4Address getIpv4AddressFromNode(Ptr<Node> node){
return node->GetObject<Ipv4>()->GetAddress(1,0).GetLocal();
}
""")
"""
)
# Let's fetch the IP address of the last node, which is on Ipv4Interface 1
remoteAddr = ns.cppyy.gbl.getIpv4AddressFromNode(appSink)
socketAddr = ns.network.InetSocketAddress(remoteAddr, port)
@ -333,8 +360,12 @@ def main(argv):
apps.Stop(ns.core.Seconds(stopTime.value - 1))
# Create a packet sink to receive these packets
sink = ns.applications.PacketSinkHelper("ns3::UdpSocketFactory",
ns.network.InetSocketAddress(ns.network.InetSocketAddress(ns.network.Ipv4Address.GetAny(), port)).ConvertTo())
sink = ns.applications.PacketSinkHelper(
"ns3::UdpSocketFactory",
ns.network.InetSocketAddress(
ns.network.InetSocketAddress(ns.network.Ipv4Address.GetAny(), port)
).ConvertTo(),
)
sinkContainer = ns.network.NodeContainer(appSink)
apps = sink.Install(sinkContainer)
apps.Start(ns.core.Seconds(3))
@ -345,16 +376,16 @@ def main(argv):
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # /
print ("Configure Tracing.")
print("Configure Tracing.")
csma = ns.csma.CsmaHelper()
#
# Let's set up some ns-2-like ascii traces, using another helper class
#
ascii = ns.network.AsciiTraceHelper();
stream = ascii.CreateFileStream("mixed-wireless.tr");
wifiPhy.EnableAsciiAll(stream);
csma.EnableAsciiAll(stream);
internet.EnableAsciiIpv4All(stream);
ascii = ns.network.AsciiTraceHelper()
stream = ascii.CreateFileStream("mixed-wireless.tr")
wifiPhy.EnableAsciiAll(stream)
csma.EnableAsciiAll(stream)
internet.EnableAsciiIpv4All(stream)
# Csma captures in non-promiscuous mode
csma.EnablePcapAll("mixed-wireless", False)
@ -362,11 +393,10 @@ def main(argv):
wifiPhy.EnablePcap("mixed-wireless", backboneDevices)
wifiPhy.EnablePcap("mixed-wireless", appSink.GetId(), 0)
# #ifdef ENABLE_FOR_TRACING_EXAMPLE
# Config.Connect("/NodeList/*/$MobilityModel/CourseChange",
# MakeCallback(&CourseChangeCallback))
# #endif
# #ifdef ENABLE_FOR_TRACING_EXAMPLE
# Config.Connect("/NodeList/*/$MobilityModel/CourseChange",
# MakeCallback(&CourseChangeCallback))
# #endif
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
@ -374,13 +404,13 @@ def main(argv):
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
print ("Run Simulation.")
print("Run Simulation.")
ns.core.Simulator.Stop(ns.core.Seconds(stopTime.value))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
if __name__ == '__main__':
if __name__ == "__main__":
import sys
main(sys.argv)

View File

@ -77,7 +77,8 @@ except ModuleNotFoundError:
# std::cout << " start="<<start<<" duration="<<duration<<std::endl;
# }
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
using namespace ns3;
void AdvancePosition(Ptr<Node> node){
Ptr<MobilityModel> mob = node->GetObject<MobilityModel>();
@ -87,7 +88,9 @@ ns.cppyy.cppdef("""
return;
mob->SetPosition(pos);
Simulator::Schedule(Seconds(1.0), AdvancePosition, node);
}""")
}"""
)
def main(argv):
ns.core.CommandLine().Parse(argv)
@ -98,7 +101,7 @@ def main(argv):
mobility = ns.mobility.MobilityHelper()
stas = ns.network.NodeContainer()
ap = ns.network.NodeContainer()
#NetDeviceContainer staDevs;
# NetDeviceContainer staDevs;
packetSocket = ns.network.PacketSocketHelper()
stas.Create(2)
@ -116,15 +119,16 @@ def main(argv):
wifiMac = ns.wifi.WifiMacHelper()
# setup stas.
wifiMac.SetType("ns3::StaWifiMac",
"ActiveProbing",
ns.core.BooleanValue(True),
"Ssid",
ns.wifi.SsidValue(ssid))
wifiMac.SetType(
"ns3::StaWifiMac",
"ActiveProbing",
ns.core.BooleanValue(True),
"Ssid",
ns.wifi.SsidValue(ssid),
)
staDevs = wifi.Install(wifiPhy, wifiMac, stas)
# setup ap.
wifiMac.SetType("ns3::ApWifiMac",
"Ssid", ns.wifi.SsidValue(ssid))
wifiMac.SetType("ns3::ApWifiMac", "Ssid", ns.wifi.SsidValue(ssid))
wifi.Install(wifiPhy, wifiMac, ap)
# mobility.
@ -139,7 +143,7 @@ def main(argv):
socket.SetProtocol(1)
onoff = ns.applications.OnOffHelper("ns3::PacketSocketFactory", socket.ConvertTo())
onoff.SetConstantRate (ns.network.DataRate ("500kb/s"))
onoff.SetConstantRate(ns.network.DataRate("500kb/s"))
apps = onoff.Install(ns.network.NodeContainer(stas.Get(0)))
apps.Start(ns.core.Seconds(0.5))
@ -147,13 +151,12 @@ def main(argv):
ns.core.Simulator.Stop(ns.core.Seconds(44.0))
# Config::Connect("/NodeList/*/DeviceList/*/Tx", MakeCallback(&DevTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Rx", MakeCallback(&DevRxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxOk", MakeCallback(&PhyRxOkTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxError", MakeCallback(&PhyRxErrorTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/Tx", MakeCallback(&PhyTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/State", MakeCallback(&PhyStateTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Tx", MakeCallback(&DevTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Rx", MakeCallback(&DevRxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxOk", MakeCallback(&PhyRxOkTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxError", MakeCallback(&PhyRxErrorTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/Tx", MakeCallback(&PhyTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/State", MakeCallback(&PhyStateTrace));
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
@ -161,6 +164,5 @@ def main(argv):
return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv))

1158
ns3

File diff suppressed because it is too large Load Diff

View File

@ -1,14 +1,15 @@
import cmake_build_extension
import setuptools
import sys
import sysconfig
import cmake_build_extension
import setuptools
setuptools.setup(
cmdclass=dict(build_ext=cmake_build_extension.BuildExtension),
packages=['ns', 'visualizer'],
packages=["ns", "visualizer"],
package_dir={
'ns': './build-support/pip-wheel/ns',
'visualizer': './build-support/pip-wheel/visualizer'
"ns": "./build-support/pip-wheel/ns",
"visualizer": "./build-support/pip-wheel/visualizer",
},
ext_modules=[
cmake_build_extension.CMakeExtension(
@ -28,8 +29,8 @@ setuptools.setup(
# https://catherineh.github.io/programming/2021/11/16/python-binary-distributions-whls-with-c17-cmake-auditwheel-and-manylinux
f"-DPython3_LIBRARY_DIRS={sysconfig.get_config_var('LIBDIR')}",
f"-DPython3_INCLUDE_DIRS={sysconfig.get_config_var('INCLUDEPY')}",
f"-DPython3_EXECUTABLE={sys.executable}"
]
f"-DPython3_EXECUTABLE={sys.executable}",
],
),
],
)

View File

@ -11,205 +11,209 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'LENA'
copyright = u'2011-2012, CTTC'
project = "LENA"
copyright = "2011-2012, CTTC"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'ns-3doc'
# htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('antenna', 'antenna.tex', u'Antenna Module Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
(
"antenna",
"antenna.tex",
"Antenna Module Documentation",
"Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]

View File

@ -44,7 +44,6 @@ except ModuleNotFoundError:
def main(argv):
#
# Allow the user to override any of the defaults and the above Bind() at
# run-time, via command-line arguments
@ -55,14 +54,14 @@ def main(argv):
#
# Explicitly create the nodes required by the topology(shown above).
#
#print "Create nodes."
# print "Create nodes."
terminals = ns.network.NodeContainer()
terminals.Create(4)
csmaSwitch = ns.network.NodeContainer()
csmaSwitch.Create(1)
#print "Build Topology"
# print "Build Topology"
csma = ns.csma.CsmaHelper()
csma.SetChannelAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate(5000000)))
csma.SetChannelAttribute("Delay", ns.core.TimeValue(ns.core.MilliSeconds(2)))
@ -73,7 +72,9 @@ def main(argv):
switchDevices = ns.network.NetDeviceContainer()
for i in range(4):
link = csma.Install(ns.network.NodeContainer(ns.network.NodeContainer(terminals.Get(i)), csmaSwitch))
link = csma.Install(
ns.network.NodeContainer(ns.network.NodeContainer(terminals.Get(i)), csmaSwitch)
)
terminalDevices.Add(link.Get(0))
switchDevices.Add(link.Get(1))
@ -91,7 +92,7 @@ def main(argv):
# We've got the "hardware" in place. Now we need to add IP addresses.
#
#print "Assign IP Addresses."
# print "Assign IP Addresses."
ipv4 = ns.internet.Ipv4AddressHelper()
ipv4.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
ipv4.Assign(terminalDevices)
@ -99,12 +100,12 @@ def main(argv):
#
# Create an OnOff application to send UDP datagrams from node zero to node 1.
#
#print "Create Applications."
port = 9 # Discard port(RFC 863)
# print "Create Applications."
port = 9 # Discard port(RFC 863)
inet_sock_address = ns.network.InetSocketAddress(ns.network.Ipv4Address("10.1.1.2"), port)
onoff = ns.applications.OnOffHelper("ns3::UdpSocketFactory", inet_sock_address.ConvertTo())
onoff.SetConstantRate (ns.network.DataRate ("500kb/s"))
onoff.SetConstantRate(ns.network.DataRate("500kb/s"))
app = onoff.Install(ns.network.NodeContainer(terminals.Get(0)))
# Start the application
@ -121,8 +122,7 @@ def main(argv):
# Create a similar flow from n3 to n0, starting at time 1.1 seconds
#
inet_address = ns.network.InetSocketAddress(ns.network.Ipv4Address("10.1.1.1"), port)
onoff.SetAttribute("Remote",
ns.network.AddressValue(inet_address.ConvertTo()))
onoff.SetAttribute("Remote", ns.network.AddressValue(inet_address.ConvertTo()))
app = onoff.Install(ns.network.NodeContainer(terminals.Get(3)))
app.Start(ns.core.Seconds(1.1))
app.Stop(ns.core.Seconds(10.0))
@ -134,9 +134,9 @@ def main(argv):
# Configure tracing of all enqueue, dequeue, and NetDevice receive events.
# Trace output will be sent to the file "csma-bridge.tr"
#
#print "Configure Tracing."
#ascii = ns.network.AsciiTraceHelper();
#csma.EnableAsciiAll(ascii.CreateFileStream ("csma-bridge.tr"));
# print "Configure Tracing."
# ascii = ns.network.AsciiTraceHelper();
# csma.EnableAsciiAll(ascii.CreateFileStream ("csma-bridge.tr"));
#
# Also configure some tcpdump traces; each interface will be traced.
@ -150,14 +150,13 @@ def main(argv):
#
# Now, do the actual simulation.
#
#print "Run Simulation."
# print "Run Simulation."
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
#print "Done."
# print "Done."
if __name__ == '__main__':
if __name__ == "__main__":
import sys
main(sys.argv)
main(sys.argv)

View File

@ -95,7 +95,7 @@ serverApps.Stop(ns.Seconds(5.0))
echoClient = ns.UdpEchoClientHelper(serverInterfaces.GetAddress(0).ConvertTo(), 9)
echoClient.SetAttribute("MaxPackets", ns.UintegerValue(1))
echoClient.SetAttribute("Interval", ns.TimeValue(ns.Seconds(1.)))
echoClient.SetAttribute("Interval", ns.TimeValue(ns.Seconds(1.0)))
echoClient.SetAttribute("PacketSize", ns.UintegerValue(1024))
clientApps = echoClient.Install(client.Get(0))

View File

@ -11,206 +11,209 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath',
'sphinxcontrib.seqdiag']
extensions = ["sphinx.ext.imgmath", "sphinxcontrib.seqdiag"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'buildings'
master_doc = "buildings"
# General information about the project.
project = u'LENA'
copyright = u'2011-2012, CTTC'
project = "LENA"
copyright = "2011-2012, CTTC"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'M2'
version = "M2"
# The full version, including alpha/beta/rc tags.
release = 'M2'
release = "M2"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'ns-3doc'
# htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('buildings', 'buildings.tex', u'Buildings Module Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
(
"buildings",
"buildings.tex",
"Buildings Module Documentation",
"Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]

View File

@ -47,8 +47,9 @@ internet.Install(csmaNodes.Get(1))
# Install Click on node A
clickinternet = ns.ClickInternetStackHelper()
clickinternet.SetClickFile(csmaNodes.Get(0),
clickConfigFolder + "/nsclick-lan-single-interface.click")
clickinternet.SetClickFile(
csmaNodes.Get(0), clickConfigFolder + "/nsclick-lan-single-interface.click"
)
clickinternet.SetRoutingTableElement(csmaNodes.Get(0), "rt")
clickinternet.Install(csmaNodes.Get(0))

View File

@ -9,8 +9,16 @@
cpp_examples = [
("nsclick-simple-lan --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
("nsclick-raw-wlan --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
("nsclick-udp-client-server-csma --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
("nsclick-udp-client-server-wifi --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
(
"nsclick-udp-client-server-csma --clickConfigFolder=../../src/click/examples",
"NSCLICK == True",
"False",
),
(
"nsclick-udp-client-server-wifi --clickConfigFolder=../../src/click/examples",
"NSCLICK == True",
"False",
),
("nsclick-routing --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
("nsclick-defines --clickConfigFolder=../../src/click/examples", "NSCLICK == True", "False"),
]

View File

@ -22,10 +22,12 @@
# This is adapted from Gustavo Carneiro's ns-3 tutorial
import numpy as np
import matplotlib.pyplot as plt
import sys
import argparse
import sys
import matplotlib.pyplot as plt
import numpy as np
## Import ns-3
try:
from ns import ns
@ -39,9 +41,7 @@ except ModuleNotFoundError:
def main():
parser = argparse.ArgumentParser("sample-rng-plot")
parser.add_argument("--not-blocking",
action="store_true",
default=False)
parser.add_argument("--not-blocking", action="store_true", default=False)
args = parser.parse_args(sys.argv[1:])
# mu, var = 100, 225
@ -59,16 +59,16 @@ def main():
## Make a probability density histogram
density = 1
## Plot color
facecolor = 'g'
facecolor = "g"
## Plot alpha value (transparency)
alpha = 0.75
# We don't really need the plot results, we're just going to show it later.
# n, bins, patches = plt.hist(x, 50, density=1, facecolor='g', alpha=0.75)
n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)
n, bins, patches = plt.hist(x, 50, density=True, facecolor="g", alpha=0.75)
plt.title('ns-3 histogram')
plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.title("ns-3 histogram")
plt.text(60, 0.025, r"$\mu=100,\ \sigma=15$")
plt.axis([40, 160, 0, 0.03])
plt.grid(True)
plt.show(block=not args.not_blocking)

View File

@ -34,17 +34,21 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
## Example function - triggered at a random time.
## \return None.
def RandomFunction():
print ("RandomFunction received event at", ns.core.Simulator.Now().GetSeconds(), "s")
print("RandomFunction received event at", ns.core.Simulator.Now().GetSeconds(), "s")
## Example function - triggered if an event is canceled (should not be called).
## \return None.
def CancelledEvent():
print ("I should never be called... ")
print("I should never be called... ")
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
#include "CPyCppyy/API.h"
using namespace ns3;
@ -106,7 +110,8 @@ ns.cppyy.cppdef("""
{
return MakeEvent(&CancelledFunctionCpp);
}
""")
"""
)
def main(argv):
@ -132,6 +137,8 @@ def main(argv):
ns.core.Simulator.Destroy()
if __name__ == '__main__':
if __name__ == "__main__":
import sys
main(sys.argv)

View File

@ -29,6 +29,7 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
def main(argv):
"""The main function in this Battery discharge example
@ -39,7 +40,7 @@ def main(argv):
ns.core.LogComponentEnable("GenericBatteryModel", ns.core.LOG_LEVEL_DEBUG)
node = ns.network.Node()
batteryHelper = ns.energy.GenericBatteryModelHelper()
batteryHelper = ns.energy.GenericBatteryModelHelper()
batteryModel = ns.CreateObject("GenericBatteryModel")
devicesEnergyModel = ns.energy.SimpleDeviceEnergyModel()
@ -47,14 +48,14 @@ def main(argv):
batteryModel.SetAttribute("MaxCapacity", ns.core.DoubleValue(7.0)) # Q
batteryModel.SetAttribute("NominalVoltage", ns.core.DoubleValue(1.18)) # Vnom
batteryModel.SetAttribute("NominalCapacity", ns.core.DoubleValue(6.25)) # QNom
batteryModel.SetAttribute("NominalCapacity", ns.core.DoubleValue(6.25)) # QNom
batteryModel.SetAttribute("ExponentialVoltage", ns.core.DoubleValue(1.28)) # Vexp
batteryModel.SetAttribute("ExponentialCapacity", ns.core.DoubleValue(1.3)) # Qexp
batteryModel.SetAttribute("ExponentialVoltage", ns.core.DoubleValue(1.28)) # Vexp
batteryModel.SetAttribute("ExponentialCapacity", ns.core.DoubleValue(1.3)) # Qexp
batteryModel.SetAttribute("InternalResistance", ns.core.DoubleValue(0.0046)) # R
batteryModel.SetAttribute("TypicalDischargeCurrent", ns.core.DoubleValue(1.3)) # i typical
batteryModel.SetAttribute("CutoffVoltage", ns.core.DoubleValue(1.0)) # End of charge.
batteryModel.SetAttribute("InternalResistance", ns.core.DoubleValue(0.0046)) # R
batteryModel.SetAttribute("TypicalDischargeCurrent", ns.core.DoubleValue(1.3)) # i typical
batteryModel.SetAttribute("CutoffVoltage", ns.core.DoubleValue(1.0)) # End of charge.
batteryModel.SetAttribute("BatteryType", ns.core.EnumValue(ns.NIMH_NICD)) # Battery type
@ -64,14 +65,12 @@ def main(argv):
devicesEnergyModel.SetCurrentA(6.5)
ns.core.Simulator.Stop(ns.core.Seconds(3600))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
if __name__ == '__main__':
if __name__ == "__main__":
import sys
main(sys.argv)
main(sys.argv)

View File

@ -1,18 +1,20 @@
from __future__ import division
import sys
import os
import sys
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
from xml.etree import ElementTree
def parse_time_ns(tm):
if tm.endswith('ns'):
if tm.endswith("ns"):
return float(tm[:-2])
raise ValueError(tm)
## FiveTuple
class FiveTuple(object):
## class variables
@ -28,17 +30,19 @@ class FiveTuple(object):
# destination port
## @var __slots_
# class variable list
__slots_ = ['sourceAddress', 'destinationAddress', 'protocol', 'sourcePort', 'destinationPort']
__slots_ = ["sourceAddress", "destinationAddress", "protocol", "sourcePort", "destinationPort"]
def __init__(self, el):
'''! The initializer.
"""! The initializer.
@param self The object pointer.
@param el The element.
'''
self.sourceAddress = el.get('sourceAddress')
self.destinationAddress = el.get('destinationAddress')
self.sourcePort = int(el.get('sourcePort'))
self.destinationPort = int(el.get('destinationPort'))
self.protocol = int(el.get('protocol'))
"""
self.sourceAddress = el.get("sourceAddress")
self.destinationAddress = el.get("destinationAddress")
self.sourcePort = int(el.get("sourcePort"))
self.destinationPort = int(el.get("destinationPort"))
self.protocol = int(el.get("protocol"))
## Histogram
class Histogram(object):
@ -47,17 +51,21 @@ class Histogram(object):
# histogram bins
## @var __slots_
# class variable list
__slots_ = 'bins', 'nbins', 'number_of_flows'
__slots_ = "bins", "nbins", "number_of_flows"
def __init__(self, el=None):
'''! The initializer.
"""! The initializer.
@param self The object pointer.
@param el The element.
'''
"""
self.bins = []
if el is not None:
#self.nbins = int(el.get('nBins'))
for bin in el.findall('bin'):
self.bins.append( (float(bin.get("start")), float(bin.get("width")), int(bin.get("count"))) )
# self.nbins = int(el.get('nBins'))
for bin in el.findall("bin"):
self.bins.append(
(float(bin.get("start")), float(bin.get("width")), int(bin.get("count")))
)
## Flow
class Flow(object):
@ -84,46 +92,63 @@ class Flow(object):
# receive duration
## @var __slots_
# class variable list
__slots_ = ['flowId', 'delayMean', 'packetLossRatio', 'rxBitrate', 'txBitrate',
'fiveTuple', 'packetSizeMean', 'probe_stats_unsorted',
'hopCount', 'flowInterruptionsHistogram', 'rx_duration']
__slots_ = [
"flowId",
"delayMean",
"packetLossRatio",
"rxBitrate",
"txBitrate",
"fiveTuple",
"packetSizeMean",
"probe_stats_unsorted",
"hopCount",
"flowInterruptionsHistogram",
"rx_duration",
]
def __init__(self, flow_el):
'''! The initializer.
"""! The initializer.
@param self The object pointer.
@param flow_el The element.
'''
self.flowId = int(flow_el.get('flowId'))
rxPackets = float(flow_el.get('rxPackets'))
txPackets = float(flow_el.get('txPackets'))
"""
self.flowId = int(flow_el.get("flowId"))
rxPackets = float(flow_el.get("rxPackets"))
txPackets = float(flow_el.get("txPackets"))
tx_duration = (parse_time_ns (flow_el.get('timeLastTxPacket')) - parse_time_ns(flow_el.get('timeFirstTxPacket')))*1e-9
rx_duration = (parse_time_ns (flow_el.get('timeLastRxPacket')) - parse_time_ns(flow_el.get('timeFirstRxPacket')))*1e-9
tx_duration = (
parse_time_ns(flow_el.get("timeLastTxPacket"))
- parse_time_ns(flow_el.get("timeFirstTxPacket"))
) * 1e-9
rx_duration = (
parse_time_ns(flow_el.get("timeLastRxPacket"))
- parse_time_ns(flow_el.get("timeFirstRxPacket"))
) * 1e-9
self.rx_duration = rx_duration
self.probe_stats_unsorted = []
if rxPackets:
self.hopCount = float(flow_el.get('timesForwarded')) / rxPackets + 1
self.hopCount = float(flow_el.get("timesForwarded")) / rxPackets + 1
else:
self.hopCount = -1000
if rxPackets:
self.delayMean = float(flow_el.get('delaySum')[:-2]) / rxPackets * 1e-9
self.packetSizeMean = float(flow_el.get('rxBytes')) / rxPackets
self.delayMean = float(flow_el.get("delaySum")[:-2]) / rxPackets * 1e-9
self.packetSizeMean = float(flow_el.get("rxBytes")) / rxPackets
else:
self.delayMean = None
self.packetSizeMean = None
if rx_duration > 0:
self.rxBitrate = float(flow_el.get('rxBytes'))*8 / rx_duration
self.rxBitrate = float(flow_el.get("rxBytes")) * 8 / rx_duration
else:
self.rxBitrate = None
if tx_duration > 0:
self.txBitrate = float(flow_el.get('txBytes'))*8 / tx_duration
self.txBitrate = float(flow_el.get("txBytes")) * 8 / tx_duration
else:
self.txBitrate = None
lost = float(flow_el.get('lostPackets'))
#print "rxBytes: %s; txPackets: %s; rxPackets: %s; lostPackets: %s" % (flow_el.get('rxBytes'), txPackets, rxPackets, lost)
lost = float(flow_el.get("lostPackets"))
# print "rxBytes: %s; txPackets: %s; rxPackets: %s; lostPackets: %s" % (flow_el.get('rxBytes'), txPackets, rxPackets, lost)
if rxPackets == 0:
self.packetLossRatio = None
else:
self.packetLossRatio = (lost / (rxPackets + lost))
self.packetLossRatio = lost / (rxPackets + lost)
interrupt_hist_elem = flow_el.find("flowInterruptionsHistogram")
if interrupt_hist_elem is None:
@ -131,6 +156,7 @@ class Flow(object):
else:
self.flowInterruptionsHistogram = Histogram(interrupt_hist_elem)
## ProbeFlowStats
class ProbeFlowStats(object):
## class variables
@ -140,7 +166,8 @@ class ProbeFlowStats(object):
# bytes
## @var __slots_
# class variable list
__slots_ = ['probeId', 'packets', 'bytes', 'delayFromFirstProbe']
__slots_ = ["probeId", "packets", "bytes", "delayFromFirstProbe"]
## Simulation
class Simulation(object):
@ -148,31 +175,33 @@ class Simulation(object):
## @var flows
# list of flows
def __init__(self, simulation_el):
'''! The initializer.
"""! The initializer.
@param self The object pointer.
@param simulation_el The element.
'''
"""
self.flows = []
FlowClassifier_el, = simulation_el.findall("Ipv4FlowClassifier")
(FlowClassifier_el,) = simulation_el.findall("Ipv4FlowClassifier")
flow_map = {}
for flow_el in simulation_el.findall("FlowStats/Flow"):
flow = Flow(flow_el)
flow_map[flow.flowId] = flow
self.flows.append(flow)
for flow_cls in FlowClassifier_el.findall("Flow"):
flowId = int(flow_cls.get('flowId'))
flowId = int(flow_cls.get("flowId"))
flow_map[flowId].fiveTuple = FiveTuple(flow_cls)
for probe_elem in simulation_el.findall("FlowProbes/FlowProbe"):
probeId = int(probe_elem.get('index'))
probeId = int(probe_elem.get("index"))
for stats in probe_elem.findall("FlowStats"):
flowId = int(stats.get('flowId'))
flowId = int(stats.get("flowId"))
s = ProbeFlowStats()
s.packets = int(stats.get('packets'))
s.bytes = float(stats.get('bytes'))
s.packets = int(stats.get("packets"))
s.bytes = float(stats.get("bytes"))
s.probeId = probeId
if s.packets > 0:
s.delayFromFirstProbe = parse_time_ns(stats.get('delayFromFirstProbeSum')) / float(s.packets)
s.delayFromFirstProbe = parse_time_ns(
stats.get("delayFromFirstProbeSum")
) / float(s.packets)
else:
s.delayFromFirstProbe = 0
flow_map[flowId].probe_stats_unsorted.append(s)
@ -190,38 +219,46 @@ def main(argv):
level += 1
if event == "end":
level -= 1
if level == 0 and elem.tag == 'FlowMonitor':
if level == 0 and elem.tag == "FlowMonitor":
sim = Simulation(elem)
sim_list.append(sim)
elem.clear() # won't need this any more
elem.clear() # won't need this any more
sys.stdout.write(".")
sys.stdout.flush()
print(" done.")
for sim in sim_list:
for flow in sim.flows:
t = flow.fiveTuple
proto = {6: 'TCP', 17: 'UDP'} [t.protocol]
print("FlowID: %i (%s %s/%s --> %s/%i)" % \
(flow.flowId, proto, t.sourceAddress, t.sourcePort, t.destinationAddress, t.destinationPort))
proto = {6: "TCP", 17: "UDP"}[t.protocol]
print(
"FlowID: %i (%s %s/%s --> %s/%i)"
% (
flow.flowId,
proto,
t.sourceAddress,
t.sourcePort,
t.destinationAddress,
t.destinationPort,
)
)
if flow.txBitrate is None:
print("\tTX bitrate: None")
else:
print("\tTX bitrate: %.2f kbit/s" % (flow.txBitrate*1e-3,))
print("\tTX bitrate: %.2f kbit/s" % (flow.txBitrate * 1e-3,))
if flow.rxBitrate is None:
print("\tRX bitrate: None")
else:
print("\tRX bitrate: %.2f kbit/s" % (flow.rxBitrate*1e-3,))
print("\tRX bitrate: %.2f kbit/s" % (flow.rxBitrate * 1e-3,))
if flow.delayMean is None:
print("\tMean Delay: None")
else:
print("\tMean Delay: %.2f ms" % (flow.delayMean*1e3,))
print("\tMean Delay: %.2f ms" % (flow.delayMean * 1e3,))
if flow.packetLossRatio is None:
print("\tPacket Loss Ratio: None")
else:
print("\tPacket Loss Ratio: %.2f %%" % (flow.packetLossRatio*100))
print("\tPacket Loss Ratio: %.2f %%" % (flow.packetLossRatio * 100))
if __name__ == '__main__':
if __name__ == "__main__":
main(sys.argv)

View File

@ -17,6 +17,7 @@
# Authors: Gustavo Carneiro <gjc@inescporto.pt>
from __future__ import print_function
import sys
try:
@ -28,13 +29,13 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
DISTANCE = 20 # (m)
DISTANCE = 20 # (m)
NUM_NODES_SIDE = 3
def main(argv):
from ctypes import c_bool, c_char_p, c_int, create_string_buffer
from ctypes import c_int, c_bool, c_char_p, create_string_buffer
NumNodesSide = c_int(2)
Plot = c_bool(False)
BUFFLEN = 4096
@ -42,7 +43,11 @@ def main(argv):
Results = c_char_p(ResultsBuffer.raw)
cmd = ns.CommandLine(__file__)
cmd.AddValue("NumNodesSide", "Grid side number of nodes (total number of nodes will be this number squared)", NumNodesSide)
cmd.AddValue(
"NumNodesSide",
"Grid side number of nodes (total number of nodes will be this number squared)",
NumNodesSide,
)
cmd.AddValue("Results", "Write XML results to file", Results, BUFFLEN)
cmd.AddValue("Plot", "Plot the results using the matplotlib python module", Plot)
cmd.Parse(argv)
@ -53,8 +58,7 @@ def main(argv):
wifiChannel = ns.wifi.YansWifiChannelHelper.Default()
wifiPhy.SetChannel(wifiChannel.Create())
ssid = ns.wifi.Ssid("wifi-default")
wifiMac.SetType ("ns3::AdhocWifiMac",
"Ssid", ns.wifi.SsidValue(ssid))
wifiMac.SetType("ns3::AdhocWifiMac", "Ssid", ns.wifi.SsidValue(ssid))
internet = ns.internet.InternetStackHelper()
list_routing = ns.internet.Ipv4ListRoutingHelper()
@ -67,12 +71,16 @@ def main(argv):
ipv4Addresses = ns.internet.Ipv4AddressHelper()
ipv4Addresses.SetBase(ns.network.Ipv4Address("10.0.0.0"), ns.network.Ipv4Mask("255.255.255.0"))
port = 9 # Discard port(RFC 863)
port = 9 # Discard port(RFC 863)
inetAddress = ns.network.InetSocketAddress(ns.network.Ipv4Address("10.0.0.1"), port)
onOffHelper = ns.applications.OnOffHelper("ns3::UdpSocketFactory", inetAddress.ConvertTo())
onOffHelper.SetAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate("100kbps")))
onOffHelper.SetAttribute("OnTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=1]"))
onOffHelper.SetAttribute("OffTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0]"))
onOffHelper.SetAttribute(
"OnTime", ns.core.StringValue("ns3::ConstantRandomVariable[Constant=1]")
)
onOffHelper.SetAttribute(
"OffTime", ns.core.StringValue("ns3::ConstantRandomVariable[Constant=0]")
)
addresses = []
nodes = []
@ -82,18 +90,17 @@ def main(argv):
else:
num_nodes_side = NumNodesSide.value
nodes = ns.NodeContainer(num_nodes_side*num_nodes_side)
nodes = ns.NodeContainer(num_nodes_side * num_nodes_side)
accumulator = 0
for xi in range(num_nodes_side):
for yi in range(num_nodes_side):
node = nodes.Get(accumulator)
accumulator += 1
container = ns.network.NodeContainer(node)
internet.Install(container)
mobility = ns.CreateObject("ConstantPositionMobilityModel")
mobility.SetPosition(ns.core.Vector(xi*DISTANCE, yi*DISTANCE, 0))
mobility.SetPosition(ns.core.Vector(xi * DISTANCE, yi * DISTANCE, 0))
node.AggregateObject(mobility)
device = wifi.Install(wifiPhy, wifiMac, node)
@ -102,17 +109,20 @@ def main(argv):
for i, node in [(i, nodes.Get(i)) for i in range(nodes.GetN())]:
destaddr = addresses[(len(addresses) - 1 - i) % len(addresses)]
#print (i, destaddr)
onOffHelper.SetAttribute("Remote", ns.network.AddressValue(ns.network.InetSocketAddress(destaddr, port).ConvertTo()))
# print (i, destaddr)
onOffHelper.SetAttribute(
"Remote",
ns.network.AddressValue(ns.network.InetSocketAddress(destaddr, port).ConvertTo()),
)
container = ns.network.NodeContainer(node)
app = onOffHelper.Install(container)
urv = ns.CreateObject("UniformRandomVariable")#ns.cppyy.gbl.get_rng()
urv = ns.CreateObject("UniformRandomVariable") # ns.cppyy.gbl.get_rng()
startDelay = ns.Seconds(urv.GetValue(20, 30))
app.Start(startDelay)
#internet.EnablePcapAll("wifi-olsr")
# internet.EnablePcapAll("wifi-olsr")
flowmon_helper = ns.flow_monitor.FlowMonitorHelper()
#flowmon_helper.SetMonitorAttribute("StartTime", ns.core.TimeValue(ns.core.Seconds(31)))
# flowmon_helper.SetMonitorAttribute("StartTime", ns.core.TimeValue(ns.core.Seconds(31)))
monitor = flowmon_helper.InstallAll()
monitor = flowmon_helper.GetMonitor()
monitor.SetAttribute("DelayBinWidth", ns.core.DoubleValue(0.001))
@ -123,33 +133,60 @@ def main(argv):
ns.core.Simulator.Run()
def print_stats(os, st):
print (" Tx Bytes: ", st.txBytes, file=os)
print (" Rx Bytes: ", st.rxBytes, file=os)
print (" Tx Packets: ", st.txPackets, file=os)
print (" Rx Packets: ", st.rxPackets, file=os)
print (" Lost Packets: ", st.lostPackets, file=os)
print(" Tx Bytes: ", st.txBytes, file=os)
print(" Rx Bytes: ", st.rxBytes, file=os)
print(" Tx Packets: ", st.txPackets, file=os)
print(" Rx Packets: ", st.rxPackets, file=os)
print(" Lost Packets: ", st.lostPackets, file=os)
if st.rxPackets > 0:
print (" Mean{Delay}: ", (st.delaySum.GetSeconds() / st.rxPackets), file=os)
print (" Mean{Jitter}: ", (st.jitterSum.GetSeconds() / (st.rxPackets-1)), file=os)
print (" Mean{Hop Count}: ", float(st.timesForwarded) / st.rxPackets + 1, file=os)
print(" Mean{Delay}: ", (st.delaySum.GetSeconds() / st.rxPackets), file=os)
print(" Mean{Jitter}: ", (st.jitterSum.GetSeconds() / (st.rxPackets - 1)), file=os)
print(" Mean{Hop Count}: ", float(st.timesForwarded) / st.rxPackets + 1, file=os)
if 0:
print ("Delay Histogram", file=os)
for i in range(st.delayHistogram.GetNBins () ):
print (" ",i,"(", st.delayHistogram.GetBinStart (i), "-", \
st.delayHistogram.GetBinEnd (i), "): ", st.delayHistogram.GetBinCount (i), file=os)
print ("Jitter Histogram", file=os)
for i in range(st.jitterHistogram.GetNBins () ):
print (" ",i,"(", st.jitterHistogram.GetBinStart (i), "-", \
st.jitterHistogram.GetBinEnd (i), "): ", st.jitterHistogram.GetBinCount (i), file=os)
print ("PacketSize Histogram", file=os)
for i in range(st.packetSizeHistogram.GetNBins () ):
print (" ",i,"(", st.packetSizeHistogram.GetBinStart (i), "-", \
st.packetSizeHistogram.GetBinEnd (i), "): ", st.packetSizeHistogram.GetBinCount (i), file=os)
print("Delay Histogram", file=os)
for i in range(st.delayHistogram.GetNBins()):
print(
" ",
i,
"(",
st.delayHistogram.GetBinStart(i),
"-",
st.delayHistogram.GetBinEnd(i),
"): ",
st.delayHistogram.GetBinCount(i),
file=os,
)
print("Jitter Histogram", file=os)
for i in range(st.jitterHistogram.GetNBins()):
print(
" ",
i,
"(",
st.jitterHistogram.GetBinStart(i),
"-",
st.jitterHistogram.GetBinEnd(i),
"): ",
st.jitterHistogram.GetBinCount(i),
file=os,
)
print("PacketSize Histogram", file=os)
for i in range(st.packetSizeHistogram.GetNBins()):
print(
" ",
i,
"(",
st.packetSizeHistogram.GetBinStart(i),
"-",
st.packetSizeHistogram.GetBinEnd(i),
"): ",
st.packetSizeHistogram.GetBinCount(i),
file=os,
)
for reason, drops in enumerate(st.packetsDropped):
print (" Packets dropped by reason %i: %i" % (reason, drops), file=os)
#for reason, drops in enumerate(st.bytesDropped):
print(" Packets dropped by reason %i: %i" % (reason, drops), file=os)
# for reason, drops in enumerate(st.bytesDropped):
# print "Bytes dropped by reason %i: %i" % (reason, drops)
monitor.CheckForLostPackets()
@ -158,17 +195,26 @@ def main(argv):
if Results.value != b"output.xml":
for flow_id, flow_stats in monitor.GetFlowStats():
t = classifier.FindFlow(flow_id)
proto = {6: 'TCP', 17: 'UDP'} [t.protocol]
print ("FlowID: %i (%s %s/%s --> %s/%i)" % \
(flow_id, proto, t.sourceAddress, t.sourcePort, t.destinationAddress, t.destinationPort))
proto = {6: "TCP", 17: "UDP"}[t.protocol]
print(
"FlowID: %i (%s %s/%s --> %s/%i)"
% (
flow_id,
proto,
t.sourceAddress,
t.sourcePort,
t.destinationAddress,
t.destinationPort,
)
)
print_stats(sys.stdout, flow_stats)
else:
res = monitor.SerializeToXmlFile(Results.value.decode("utf-8"), True, True)
print (res)
print(res)
if Plot.value:
import pylab
delays = []
for flow_id, flow_stats in monitor.GetFlowStats():
tupl = classifier.FindFlow(flow_id)
@ -183,6 +229,5 @@ def main(argv):
return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv))

View File

@ -11,202 +11,208 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'lte'
master_doc = "lte"
# General information about the project.
project = u'LENA'
copyright = u'CTTC'
project = "LENA"
copyright = "CTTC"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'v10'
version = "v10"
# The full version, including alpha/beta/rc tags.
release = 'v10'
release = "v10"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'ns-3doc'
# htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
# ('lte-testing', 'lte-doc-testing.tex', u'LTE Simulator Testing Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
# ('lte-design', 'lte-doc-design.tex', u'LTE Simulator Design Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
# ('lte-user', 'lte-doc-user.tex', u'LTE Simulator User Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
('lte', 'lena-lte-module-doc.tex', u'The LENA ns-3 LTE Module Documentation', u'Centre Tecnològic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
# ('lte-testing', 'lte-doc-testing.tex', u'LTE Simulator Testing Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
# ('lte-design', 'lte-doc-design.tex', u'LTE Simulator Design Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
# ('lte-user', 'lte-doc-user.tex', u'LTE Simulator User Documentation', u'Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)', 'manual'),
(
"lte",
"lena-lte-module-doc.tex",
"The LENA ns-3 LTE Module Documentation",
"Centre Tecnològic de Telecomunicacions de Catalunya (CTTC)",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# add page breaks in the pdf. Level 1 is for top-level sections, level 2 for subsections, and so on.
@ -217,7 +223,4 @@ pdf_break_level = 4
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]

View File

@ -9,14 +9,34 @@
cpp_examples = [
("lena-cqi-threshold", "True", "True"),
("lena-dual-stripe", "True", "True"),
("lena-dual-stripe --simTime=0.0 --nApartmentsX=1 --homeEnbDeploymentRatio=0.5 --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1", "True", "True"),
("lena-dual-stripe --epc=1 --simTime=0.0 --nApartmentsX=1 --homeEnbDeploymentRatio=0.5 --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1", "True", "True"),
(
"lena-dual-stripe --simTime=0.0 --nApartmentsX=1 --homeEnbDeploymentRatio=0.5 --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1",
"True",
"True",
),
(
"lena-dual-stripe --epc=1 --simTime=0.0 --nApartmentsX=1 --homeEnbDeploymentRatio=0.5 --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1",
"True",
"True",
),
("lena-dual-stripe --simTime=0.01", "True", "True"),
("lena-dual-stripe --epc=1 --simTime=0.01", "True", "True"),
("lena-dual-stripe --epc=1 --useUdp=0 --simTime=0.01", "True", "True"),
("lena-dual-stripe --epc=1 --fadingTrace=../../src/lte/model/fading-traces/fading_trace_EPA_3kmph.fad --simTime=0.01", "True", "True"),
("lena-dual-stripe --nBlocks=1 --nMacroEnbSites=0 --macroUeDensity=0 --homeEnbDeploymentRatio=1 --homeEnbActivationRatio=1 --homeUesHomeEnbRatio=2 --macroEnbTxPowerDbm=0 --simTime=0.01", "True", "True"),
("lena-dual-stripe --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1 --nApartmentsX=4 --nMacroEnbSitesX=0 --homeEnbDeploymentRatio=1 --homeEnbActivationRatio=1 --macroEnbTxPowerDbm=0 --epcDl=1 --epcUl=0 --epc=1 --numBearersPerUe=4 --homeUesHomeEnbRatio=15 --simTime=0.01", "True", "True"),
(
"lena-dual-stripe --epc=1 --fadingTrace=../../src/lte/model/fading-traces/fading_trace_EPA_3kmph.fad --simTime=0.01",
"True",
"True",
),
(
"lena-dual-stripe --nBlocks=1 --nMacroEnbSites=0 --macroUeDensity=0 --homeEnbDeploymentRatio=1 --homeEnbActivationRatio=1 --homeUesHomeEnbRatio=2 --macroEnbTxPowerDbm=0 --simTime=0.01",
"True",
"True",
),
(
"lena-dual-stripe --nMacroEnbSites=0 --macroUeDensity=0 --nBlocks=1 --nApartmentsX=4 --nMacroEnbSitesX=0 --homeEnbDeploymentRatio=1 --homeEnbActivationRatio=1 --macroEnbTxPowerDbm=0 --epcDl=1 --epcUl=0 --epc=1 --numBearersPerUe=4 --homeUesHomeEnbRatio=15 --simTime=0.01",
"True",
"True",
),
("lena-fading", "True", "True"),
("lena-gtpu-tunnel", "True", "True"),
("lena-intercell-interference --simTime=0.1", "True", "True"),
@ -30,23 +50,71 @@ cpp_examples = [
("lena-simple", "True", "True"),
("lena-simple-epc", "True", "True"),
("lena-x2-handover", "True", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TtaFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdTbfqFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdMtFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdBetFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::RrFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::PssFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::PfFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdTbfqFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdMtFfMacScheduler", "args.valgrind", "True"),
("lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdBetFfMacScheduler", "args.valgrind", "True"),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TtaFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdTbfqFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdMtFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::TdBetFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::RrFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::PssFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::PfFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdTbfqFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdMtFfMacScheduler",
"args.valgrind",
"True",
),
(
"lena-simple-epc --simTime=1.1 --ns3::LteHelper::Scheduler=ns3::FdBetFfMacScheduler",
"args.valgrind",
"True",
),
("lena-ipv6-addr-conf", "True", "True"),
("lena-ipv6-ue-rh", "True", "True"),
("lena-ipv6-ue-ue", "True", "True"),
("lena-radio-link-failure --numberOfEnbs=1 --simTime=17", "True", "True"),
("lena-radio-link-failure --numberOfEnbs=2 --interSiteDistance=700 --simTime=17", "True", "True"),
(
"lena-radio-link-failure --numberOfEnbs=2 --interSiteDistance=700 --simTime=17",
"True",
"True",
),
("lena-radio-link-failure --numberOfEnbs=1 --useIdealRrc=0 --simTime=17", "True", "True"),
("lena-radio-link-failure --numberOfEnbs=2 --useIdealRrc=0 --interSiteDistance=700 --simTime=17", "True", "True"),
(
"lena-radio-link-failure --numberOfEnbs=2 --useIdealRrc=0 --interSiteDistance=700 --simTime=17",
"True",
"True",
),
]
# A list of Python examples to run in order to ensure that they remain

View File

@ -11,202 +11,208 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'mesh'
master_doc = "mesh"
# General information about the project.
project = u'ns-3'
copyright = u'ns-3 project'
project = "ns-3"
copyright = "ns-3 project"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'ns-3doc'
# htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
# ('mesh-testing', 'mesh-doc-testing.tex', u'Mesh Wi-Fi Testing Documentation', u'ns-3 project', 'manual'),
# ('mesh-design', 'mesh-doc-design.tex', u'Mesh Wi-Fi Design Documentation', u'ns-3 project', 'manual'),
# ('mesh-user', 'mesh-doc-user.tex', u'Mesh Wi-Fi User Documentation', u'ns-3 project', 'manual'),
('mesh', 'mesh-module-doc.tex', u'The ns-3 Mesh Wi-Fi Module Documentation', u'ns-3 project', 'manual'),
# ('mesh-testing', 'mesh-doc-testing.tex', u'Mesh Wi-Fi Testing Documentation', u'ns-3 project', 'manual'),
# ('mesh-design', 'mesh-doc-design.tex', u'Mesh Wi-Fi Design Documentation', u'ns-3 project', 'manual'),
# ('mesh-user', 'mesh-doc-user.tex', u'Mesh Wi-Fi User Documentation', u'ns-3 project', 'manual'),
(
"mesh",
"mesh-module-doc.tex",
"The ns-3 Mesh Wi-Fi Module Documentation",
"ns-3 project",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# add page breaks in the pdf. Level 1 is for top-level sections, level 2 for subsections, and so on.
@ -217,7 +223,4 @@ pdf_break_level = 4
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]

View File

@ -6,4 +6,4 @@
# on 'git describe --tags' command. Example, if the latest release was 3.108,
# and 'git describe --tags' reports "netanim-3.108-6-g8e7c0a9", then write the
# version string below as 'netanim-3.108.post6+ng8e7c0a9'
__required_netanim_version__ = 'netanim-3.109'
__required_netanim_version__ = "netanim-3.109"

View File

@ -8,7 +8,7 @@
# See test.py for more information.
cpp_examples = [
("nix-simple", "True", "True"),
("nms-p2p-nix", "False", "True"), # Takes too long to run
("nms-p2p-nix", "False", "True"), # Takes too long to run
]
# A list of Python examples to run in order to ensure that they remain

View File

@ -67,7 +67,9 @@ ipv4.Assign(terminalDevices)
port = 9
onoff = ns.OnOffHelper("ns3::UdpSocketFactory", ns.InetSocketAddress(ns.Ipv4Address("10.1.1.2"), port).ConvertTo())
onoff = ns.OnOffHelper(
"ns3::UdpSocketFactory", ns.InetSocketAddress(ns.Ipv4Address("10.1.1.2"), port).ConvertTo()
)
onoff.SetConstantRate(ns.DataRate("500kb/s"))
app = onoff.Install(terminals.Get(0))
@ -75,12 +77,15 @@ app = onoff.Install(terminals.Get(0))
app.Start(ns.Seconds(1.0))
app.Stop(ns.Seconds(10.0))
sink = ns.PacketSinkHelper("ns3::UdpSocketFactory",
ns.InetSocketAddress(ns.Ipv4Address.GetAny(), port).ConvertTo())
sink = ns.PacketSinkHelper(
"ns3::UdpSocketFactory", ns.InetSocketAddress(ns.Ipv4Address.GetAny(), port).ConvertTo()
)
app = sink.Install(terminals.Get(1))
app.Start(ns.Seconds(0.0))
onoff.SetAttribute("Remote", ns.AddressValue(ns.InetSocketAddress(ns.Ipv4Address("10.1.1.1"), port).ConvertTo()))
onoff.SetAttribute(
"Remote", ns.AddressValue(ns.InetSocketAddress(ns.Ipv4Address("10.1.1.1"), port).ConvertTo())
)
app = onoff.Install(terminals.Get(3))
app.Start(ns.Seconds(1.1))
app.Stop(ns.Seconds(10.0))

View File

@ -10,7 +10,7 @@ cpp_examples = [
("adhoc-aloha-ideal-phy", "True", "True"),
("adhoc-aloha-ideal-phy-with-microwave-oven", "True", "True"),
("adhoc-aloha-ideal-phy-matrix-propagation-loss-model", "True", "True"),
("three-gpp-channel-example", "True", "True")
("three-gpp-channel-example", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain

View File

@ -19,42 +19,67 @@ You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
import numpy as np
from pathlib import Path
from itertools import product
from tqdm import tqdm
import joblib
import contextlib
import argparse as argp
import contextlib
from itertools import product
from pathlib import Path
import joblib
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
from tqdm import tqdm
# Command line arguments
parser = argp.ArgumentParser(formatter_class=argp.ArgumentDefaultsHelpFormatter)
parser.add_argument("--num_search_grid_params", default=30,
help="Number of values for each parameter of the search grids")
parser.add_argument("--num_refinements", default=1,
help="Number of refinement local search runs to be carried out")
parser.add_argument("--ref_data_fname", default="two-ray-to-three-gpp-splm-calibration.csv",
help="Filename of the fit reference data, obtained from ns-3")
parser.add_argument("--fit_out_fname", default="two-ray-splm-fitted-params.txt",
help="Filename of the fit results")
parser.add_argument("--c_plus_plus_out_fname", default="two-ray-cplusplus-fitted-params.txt",
help="Filename of the fit results, encoded as a C++ data structure to be imported in ns-3")
parser.add_argument("--figs_folder", default="FiguresTwoRayThreeGppChCalibration/",
help="Output folder for the fit results figures")
parser.add_argument("--epsilon", default=1e-7,
help="Tolerance value for the preliminary tests")
parser.add_argument("--preliminary_fit_test", default=True,
help="Whether to run preliminary tests which check the correctness of the script functions")
parser.add_argument("--fit_ftr_to_threegpp", default=True,
help="Whether to run the calibration with respect to the 3GPP reference channel gains")
parser.add_argument("--output_ns3_table", default=True,
help="Whether to output the code for importing the calibration results in ns-3")
parser.add_argument("--plot_fit_results", default=False,
help="Whether to plot a comparison of the reference data ECDFs vs the fitted FTR distributions")
parser.add_argument(
"--num_search_grid_params",
default=30,
help="Number of values for each parameter of the search grids",
)
parser.add_argument(
"--num_refinements", default=1, help="Number of refinement local search runs to be carried out"
)
parser.add_argument(
"--ref_data_fname",
default="two-ray-to-three-gpp-splm-calibration.csv",
help="Filename of the fit reference data, obtained from ns-3",
)
parser.add_argument(
"--fit_out_fname", default="two-ray-splm-fitted-params.txt", help="Filename of the fit results"
)
parser.add_argument(
"--c_plus_plus_out_fname",
default="two-ray-cplusplus-fitted-params.txt",
help="Filename of the fit results, encoded as a C++ data structure to be imported in ns-3",
)
parser.add_argument(
"--figs_folder",
default="FiguresTwoRayThreeGppChCalibration/",
help="Output folder for the fit results figures",
)
parser.add_argument("--epsilon", default=1e-7, help="Tolerance value for the preliminary tests")
parser.add_argument(
"--preliminary_fit_test",
default=True,
help="Whether to run preliminary tests which check the correctness of the script functions",
)
parser.add_argument(
"--fit_ftr_to_threegpp",
default=True,
help="Whether to run the calibration with respect to the 3GPP reference channel gains",
)
parser.add_argument(
"--output_ns3_table",
default=True,
help="Whether to output the code for importing the calibration results in ns-3",
)
parser.add_argument(
"--plot_fit_results",
default=False,
help="Whether to plot a comparison of the reference data ECDFs vs the fitted FTR distributions",
)
args = parser.parse_args()
# Number of values for each parameter of the search grids
@ -84,10 +109,11 @@ plot_fit_results = bool(args.plot_fit_results)
@contextlib.contextmanager
def tqdm_joblib(tqdm_object):
"""
Context manager to patch joblib to report into tqdm progress bar given as argument.
Taken from: https://stackoverflow.com/questions/24983493/tracking-progress-of-joblib-parallel-execution
Context manager to patch joblib to report into tqdm progress bar given as argument.
Taken from: https://stackoverflow.com/questions/24983493/tracking-progress-of-joblib-parallel-execution
"""
class TqdmBatchCompletionCallback(joblib.parallel.BatchCompletionCallBack):
def __call__(self, *args, **kwargs):
tqdm_object.update(n=self.batch_size)
@ -114,13 +140,13 @@ class FtrParams:
# Parameter delta [0, 1]. Expresses how similar the amplitudes of the two dominant specular components are.
def __init__(self, m: float, sigma: float, k: float, delta: float):
'''! The initializer.
"""! The initializer.
@param self: the object pointer
@param m: Parameter m for the Gamma variable. Used both as the shape and rate parameters.
@param sigma: Parameter sigma. Used as the variance of the amplitudes of the normal diffuse components.
@param k: Parameter K. Expresses ratio between dominant specular components and diffuse components.
@param delta: Parameter delta [0, 1]. Expresses how similar the amplitudes of the two dominant specular components are.
'''
"""
self.m = m
self.sigma = sigma
@ -128,9 +154,9 @@ class FtrParams:
self.delta = delta
def __init__(self):
'''! The initializer with default values.
"""! The initializer with default values.
@param self: the object pointer
'''
"""
self.m = 1
self.sigma = 1.0
@ -138,37 +164,36 @@ class FtrParams:
self.delta = 0.0
def __str__(self):
'''! The initializer with default values.
"""! The initializer with default values.
@param self: the object pointer
@returns A string reporting the value of each of the FTR fading model parameters
'''
"""
return f'm: {self.m}, sigma: {self.sigma}, k: {self.k}, delta: {self.delta}'
return f"m: {self.m}, sigma: {self.sigma}, k: {self.k}, delta: {self.delta}"
def get_ftr_ecdf(params: FtrParams, n_samples: int, db=False):
'''! Returns the ECDF for the FTR fading model, for a given parameter grid.
@param params: The FTR parameters grid.
@param n_samples: The number of samples of the output ECDF
@param db: Whether to return the ECDF with the gain expressed in dB
@returns The ECDF for the FTR fading model
'''
"""! Returns the ECDF for the FTR fading model, for a given parameter grid.
@param params: The FTR parameters grid.
@param n_samples: The number of samples of the output ECDF
@param db: Whether to return the ECDF with the gain expressed in dB
@returns The ECDF for the FTR fading model
"""
assert (params.delta >= 0 and params.delta <= 1.0)
assert params.delta >= 0 and params.delta <= 1.0
# Compute the specular components amplitudes from the FTR parameters
cmn_sqrt_term = np.sqrt(1 - params.delta**2)
v1 = np.sqrt(params.sigma) * np.sqrt(params.k * (1 - cmn_sqrt_term))
v2 = np.sqrt(params.sigma) * np.sqrt(params.k * (1 + cmn_sqrt_term))
assert (abs((v1**2 + v2**2)/(2*params.sigma) - params.k) < 1e-5)
assert abs((v1**2 + v2**2) / (2 * params.sigma) - params.k) < 1e-5
if params.k > 0:
assert (abs((2*v1*v2)/(v1**2 + v2**2) - params.delta) < 1e-4)
assert abs((2 * v1 * v2) / (v1**2 + v2**2) - params.delta) < 1e-4
else:
assert (v1 == v2 == params.k)
assert v1 == v2 == params.k
sqrt_gamma = np.sqrt(np.random.gamma(
shape=params.m, scale=1/params.m, size=n_samples))
sqrt_gamma = np.sqrt(np.random.gamma(shape=params.m, scale=1 / params.m, size=n_samples))
# Sample the random phases of the specular components, which are uniformly distributed in [0, 2*PI]
phi1 = np.random.uniform(low=0, high=1.0, size=n_samples)
@ -182,75 +207,78 @@ def get_ftr_ecdf(params: FtrParams, n_samples: int, db=False):
compl_phi1 = np.vectorize(complex)(np.cos(phi1), np.sin(phi1))
compl_phi2 = np.vectorize(complex)(np.cos(phi2), np.sin(phi2))
compl_xy = np.vectorize(complex)(x, y)
h = np.multiply(sqrt_gamma, compl_phi1) * v1 + \
np.multiply(sqrt_gamma, compl_phi2) * v2 + compl_xy
h = (
np.multiply(sqrt_gamma, compl_phi1) * v1
+ np.multiply(sqrt_gamma, compl_phi2) * v2
+ compl_xy
)
# Compute the squared norms
power = np.square(np.absolute(h))
if db:
power = 10*np.log10(power)
power = 10 * np.log10(power)
return np.sort(power)
def compute_ftr_mean(params: FtrParams):
'''! Computes the mean of the FTR fading model, given a specific set of parameters.
@param params: The FTR fading model parameters.
'''
"""! Computes the mean of the FTR fading model, given a specific set of parameters.
@param params: The FTR fading model parameters.
"""
cmn_sqrt_term = np.sqrt(1 - params.delta**2)
v1 = np.sqrt(params.sigma) * np.sqrt(params.k * (1 - cmn_sqrt_term))
v2 = np.sqrt(params.sigma) * np.sqrt(params.k * (1 + cmn_sqrt_term))
mean = v1**2 + v2**2 + 2*params.sigma
mean = v1**2 + v2**2 + 2 * params.sigma
return mean
def compute_ftr_th_mean(params: FtrParams):
'''! Computes the mean of the FTR fading model using the formula reported in the corresponding paper,
given a specific set of parameters.
@param params: The FTR fading model parameters.
'''
"""! Computes the mean of the FTR fading model using the formula reported in the corresponding paper,
given a specific set of parameters.
@param params: The FTR fading model parameters.
"""
return 2 * params.sigma * (1 + params.k)
def compute_anderson_darling_measure(ref_ecdf: list, target_ecdf: list) -> float:
'''! Computes the Anderson-Darling measure for the specified reference and targets distributions.
In particular, the Anderson-Darling measure is defined as:
\f$A^2 = -N -S\f$, where \f$S = \sum_{i=1}^N \frac{2i - 1}{N} \left[ ln F(Y_i) + ln F(Y_{N + 1 - i}) \right]\f$.
"""! Computes the Anderson-Darling measure for the specified reference and targets distributions.
In particular, the Anderson-Darling measure is defined as:
\f$A^2 = -N -S\f$, where \f$S = \sum_{i=1}^N \frac{2i - 1}{N} \left[ ln F(Y_i) + ln F(Y_{N + 1 - i}) \right]\f$.
See https://www.itl.nist.gov/div898/handbook/eda/section3/eda35e.htm for further details.
See https://www.itl.nist.gov/div898/handbook/eda/section3/eda35e.htm for further details.
@param ref_ecdf: The reference ECDF.
@param target_ecdf: The target ECDF we wish to match the reference distribution to.
@returns The Anderson-Darling measure for the specified reference and targets distributions.
'''
@param ref_ecdf: The reference ECDF.
@param target_ecdf: The target ECDF we wish to match the reference distribution to.
@returns The Anderson-Darling measure for the specified reference and targets distributions.
"""
assert (len(ref_ecdf) == len(target_ecdf))
assert len(ref_ecdf) == len(target_ecdf)
n = len(ref_ecdf)
mult_factors = np.linspace(start=1, stop=n, num=n)*2 + 1
mult_factors = np.linspace(start=1, stop=n, num=n) * 2 + 1
ecdf_values = compute_ecdf_value(ref_ecdf, target_ecdf)
# First and last elements of the ECDF may lead to NaNs
with np.errstate(divide='ignore'):
with np.errstate(divide="ignore"):
log_a_plus_b = np.log(ecdf_values) + np.log(1 - np.flip(ecdf_values))
valid_idxs = np.isfinite(log_a_plus_b)
A_sq = - np.dot(mult_factors[valid_idxs], log_a_plus_b[valid_idxs])
A_sq = -np.dot(mult_factors[valid_idxs], log_a_plus_b[valid_idxs])
return A_sq
def compute_ecdf_value(ecdf: list, data_points: float) -> np.ndarray:
'''! Given an ECDF and data points belonging to its domain, returns their associated EDCF value.
@param ecdf: The ECDF, represented as a sorted list of samples.
@param data_points: A list of data points belonging to the same domain as the samples.
@returns The ECDF value of the domain points of the specified ECDF
'''
"""! Given an ECDF and data points belonging to its domain, returns their associated EDCF value.
@param ecdf: The ECDF, represented as a sorted list of samples.
@param data_points: A list of data points belonging to the same domain as the samples.
@returns The ECDF value of the domain points of the specified ECDF
"""
ecdf_values = []
for point in data_points:
@ -261,36 +289,39 @@ def compute_ecdf_value(ecdf: list, data_points: float) -> np.ndarray:
def get_sigma_from_k(k: float) -> float:
'''! Computes the value for the FTR parameter sigma, given k, yielding a unit-mean fading process.
@param k: The K parameter of the FTR fading model, which represents the ratio of the average power
of the dominant components to the power of the remaining diffuse multipath.
@returns The value for the FTR parameter sigma, given k, yielding a unit-mean fading process.
'''
"""! Computes the value for the FTR parameter sigma, given k, yielding a unit-mean fading process.
@param k: The K parameter of the FTR fading model, which represents the ratio of the average power
of the dominant components to the power of the remaining diffuse multipath.
@returns The value for the FTR parameter sigma, given k, yielding a unit-mean fading process.
"""
return 1 / (2 + 2 * k)
def fit_ftr_to_reference(ref_data: pd.DataFrame, ref_params_combo: tuple, num_params: int, num_refinements: int) -> str:
'''! Estimate the FTR parameters yielding the closest ECDF to the reference one.
def fit_ftr_to_reference(
ref_data: pd.DataFrame, ref_params_combo: tuple, num_params: int, num_refinements: int
) -> str:
"""! Estimate the FTR parameters yielding the closest ECDF to the reference one.
Uses a global search to estimate the FTR parameters yielding the best fit to the reference ECDF.
Then, the search is refined by repeating the procedure in the neighborhood of the parameters
identified with the global search. Such a neighborhood is determined as the interval whose center
is the previous iteration best value, and the lower and upper bounds are the first lower and upper
values which were previously considered, respectively.
Uses a global search to estimate the FTR parameters yielding the best fit to the reference ECDF.
Then, the search is refined by repeating the procedure in the neighborhood of the parameters
identified with the global search. Such a neighborhood is determined as the interval whose center
is the previous iteration best value, and the lower and upper bounds are the first lower and upper
values which were previously considered, respectively.
@param ref_data: The reference data, represented as a DataFrame of samples.
@param ref_params_combo: The specific combination of simulation parameters corresponding
to the reference ECDF
@param num_params: The number of values of each parameter in the global and local search grids.
@param num_refinements: The number of local refinement search to be carried out after the global search.
@param ref_data: The reference data, represented as a DataFrame of samples.
@param ref_params_combo: The specific combination of simulation parameters corresponding
to the reference ECDF
@param num_params: The number of values of each parameter in the global and local search grids.
@param num_refinements: The number of local refinement search to be carried out after the global search.
@returns An estimate of the FTR parameters yielding the closest ECDF to the reference one.
'''
@returns An estimate of the FTR parameters yielding the closest ECDF to the reference one.
"""
# Retrieve the reference ECDF
ref_ecdf = ref_data.query(
'scen == @ref_params_combo[0] and cond == @ref_params_combo[1] and fc == @ref_params_combo[2]')
"scen == @ref_params_combo[0] and cond == @ref_params_combo[1] and fc == @ref_params_combo[2]"
)
# Perform the fit
n_samples = len(ref_ecdf)
@ -303,21 +334,26 @@ def fit_ftr_to_reference(ref_data: pd.DataFrame, ref_params_combo: tuple, num_p
m_and_k_step = (m_and_k_ub - m_and_k_lb) / n_samples
# The delta parameter can range in [0, 1]
delta_step = 1/n_samples
delta_step = 1 / n_samples
# Define the coarse grid
coarse_search_grid = {
# m must be in [0, +inf]
'm': np.power(np.ones(num_params)*10, np.linspace(start=m_and_k_lb, stop=m_and_k_ub, endpoint=True, num=num_params)),
"m": np.power(
np.ones(num_params) * 10,
np.linspace(start=m_and_k_lb, stop=m_and_k_ub, endpoint=True, num=num_params),
),
# k must be in [0, +inf]
'k': np.power(np.ones(num_params)*10, np.linspace(start=m_and_k_lb, stop=m_and_k_ub, endpoint=True, num=num_params)),
"k": np.power(
np.ones(num_params) * 10,
np.linspace(start=m_and_k_lb, stop=m_and_k_ub, endpoint=True, num=num_params),
),
# delta must be in [0, 1]
'delta': np.linspace(start=0.0, stop=1.0, endpoint=True, num=num_params)
"delta": np.linspace(start=0.0, stop=1.0, endpoint=True, num=num_params)
# sigma determined from k, due to the unit-mean constraint
}
for element in product(*coarse_search_grid.values()):
# Create FTR params object
params = FtrParams()
params.m = element[0]
@ -329,36 +365,48 @@ def fit_ftr_to_reference(ref_data: pd.DataFrame, ref_params_combo: tuple, num_p
ftr_ecdf = get_ftr_ecdf(params, n_samples, db=True)
ad_meas = compute_anderson_darling_measure(ref_ecdf, ftr_ecdf)
if (ad_meas < best_ad):
if ad_meas < best_ad:
best_params = params
best_ad = ad_meas
for _ in range(num_refinements):
# Refine search in the neighborhood of the previously identified params
finer_search_grid = {
'm': np.power(np.ones(num_params)*10,
np.linspace(start=max(0, np.log10(best_params.m) - m_and_k_step),
stop=np.log10(best_params.m) +
m_and_k_step,
endpoint=True, num=num_params)),
'k': np.power(np.ones(num_params)*10,
np.linspace(start=max(0, np.log10(best_params.k) - m_and_k_step),
stop=np.log10(best_params.k) +
m_and_k_step,
endpoint=True, num=num_params)),
'delta': np.linspace(start=max(0, best_params.delta - delta_step),
stop=min(1, best_params.delta + delta_step),
endpoint=True, num=num_params)
"m": np.power(
np.ones(num_params) * 10,
np.linspace(
start=max(0, np.log10(best_params.m) - m_and_k_step),
stop=np.log10(best_params.m) + m_and_k_step,
endpoint=True,
num=num_params,
),
),
"k": np.power(
np.ones(num_params) * 10,
np.linspace(
start=max(0, np.log10(best_params.k) - m_and_k_step),
stop=np.log10(best_params.k) + m_and_k_step,
endpoint=True,
num=num_params,
),
),
"delta": np.linspace(
start=max(0, best_params.delta - delta_step),
stop=min(1, best_params.delta + delta_step),
endpoint=True,
num=num_params,
)
# sigma determined from k, due to the unit-mean constraint
}
m_and_k_step = (np.log10(best_params.m) + m_and_k_step -
max(0, np.log10(best_params.m) - m_and_k_step)) / n_samples
delta_step = (min(1, best_params.delta + 1/num_params) -
max(0, best_params.delta - 1/num_params)) / n_samples
m_and_k_step = (
np.log10(best_params.m) + m_and_k_step - max(0, np.log10(best_params.m) - m_and_k_step)
) / n_samples
delta_step = (
min(1, best_params.delta + 1 / num_params) - max(0, best_params.delta - 1 / num_params)
) / n_samples
for element in product(*finer_search_grid.values()):
# Create FTR params object
params = FtrParams()
params.m = element[0]
@ -370,19 +418,21 @@ def fit_ftr_to_reference(ref_data: pd.DataFrame, ref_params_combo: tuple, num_p
ftr_ecdf = get_ftr_ecdf(params, n_samples, db=True)
ad_meas = compute_anderson_darling_measure(ref_ecdf, ftr_ecdf)
if (ad_meas < best_ad):
if ad_meas < best_ad:
best_params = params
best_ad = ad_meas
out_str = f"{ref_params_combo[0]}\t{ref_params_combo[1]}\t{ref_params_combo[2]}" + \
f" \t{best_params.sigma}\t{best_params.k}\t{best_params.delta}\t{best_params.m}\n"
out_str = (
f"{ref_params_combo[0]}\t{ref_params_combo[1]}\t{ref_params_combo[2]}"
+ f" \t{best_params.sigma}\t{best_params.k}\t{best_params.delta}\t{best_params.m}\n"
)
return out_str
def append_ftr_params_to_cpp_string(text: str, params: FtrParams) -> str:
text += f'TwoRaySpectrumPropagationLossModel::FtrParams({np.format_float_scientific(params.m)}, {np.format_float_scientific(params.sigma)}, \
{np.format_float_scientific(params.k)}, {np.format_float_scientific(params.delta)})'
text += f"TwoRaySpectrumPropagationLossModel::FtrParams({np.format_float_scientific(params.m)}, {np.format_float_scientific(params.sigma)}, \
{np.format_float_scientific(params.k)}, {np.format_float_scientific(params.delta)})"
return text
@ -396,74 +446,71 @@ def print_cplusplus_map_from_fit_results(fit: pd.DataFrame, out_fname: str):
out_fname (str): The name of the file to print the C++ code to.
"""
out_str = '{'
out_str = "{"
for scen in set(fit['scen']):
out_str += f'{{\"{scen}\",\n{{'
for scen in set(fit["scen"]):
out_str += f'{{"{scen}",\n{{'
for cond in set(fit['cond']):
out_str += f'{{ChannelCondition::LosConditionValue::{cond}, \n'
for cond in set(fit["cond"]):
out_str += f"{{ChannelCondition::LosConditionValue::{cond}, \n"
# Print vector of carrier frequencies
freqs = np.sort(list(set(fit['fc'])))
freqs = np.sort(list(set(fit["fc"])))
out_str += "{{"
for fc in freqs:
out_str += f'{float(fc)}, '
out_str += f"{float(fc)}, "
out_str = out_str[0:-2]
out_str += '},\n{'
out_str += "},\n{"
# Load corresponding fit results
for fc in freqs:
fit_line = fit.query(
'scen == @scen and cond == @cond and fc == @fc')
assert(fit_line.reset_index().shape[0] == 1)
fit_line = fit.query("scen == @scen and cond == @cond and fc == @fc")
assert fit_line.reset_index().shape[0] == 1
params = FtrParams()
params.m = fit_line.iloc[0]['m']
params.k = fit_line.iloc[0]['k']
params.delta = fit_line.iloc[0]['delta']
params.sigma = fit_line.iloc[0]['sigma']
params.m = fit_line.iloc[0]["m"]
params.k = fit_line.iloc[0]["k"]
params.delta = fit_line.iloc[0]["delta"]
params.sigma = fit_line.iloc[0]["sigma"]
# Print vector of corresponding FTR parameters
out_str = append_ftr_params_to_cpp_string(out_str, params)
out_str += ', '
out_str += ", "
out_str = out_str[0:-2]
out_str += '}'
out_str += '}},\n'
out_str += "}"
out_str += "}},\n"
out_str = out_str[0:-2]
out_str += '}},\n'
out_str += "}},\n"
out_str = out_str[0:-2]
out_str += '}\n'
out_str += "}\n"
with open(out_fname, "w", encoding="utf-8") as f:
f.write(out_str)
if __name__ == '__main__':
if __name__ == "__main__":
#########################
## Data pre-processing ##
#########################
# Load reference data obtained from the ns-3 TR 38.901 implementation
df = pd.read_csv(ref_data_fname, sep='\t')
df = pd.read_csv(ref_data_fname, sep="\t")
# Linear gain --> gain in dB
df['gain'] = 10*np.log10(df['gain'])
df["gain"] = 10 * np.log10(df["gain"])
# Retrieve the possible parameters configurations
scenarios = set(df['scen'])
is_los = set(df['cond'])
frequencies = np.sort(list(set(df['fc'])))
scenarios = set(df["scen"])
is_los = set(df["cond"])
frequencies = np.sort(list(set(df["fc"])))
####################################################################################################
## Fit Fluctuating Two Ray model to the 3GPP TR 38.901 using the Anderson-Darling goodness-of-fit ##
####################################################################################################
if preliminary_fit_test:
params = FtrParams()
get_ftr_ecdf(params, 100)
@ -492,11 +539,19 @@ if __name__ == '__main__':
assert np.all(np.abs(mean_th_list - np.float64(1.0)) < epsilon)
if fit_ftr_to_threegpp:
# Parallel search for the different simulation parameters combination
with tqdm_joblib(tqdm(desc="Fitting FTR to the 3GPP fading model", total=(len(scenarios) * len(is_los) * len(frequencies)))) as progress_bar:
with tqdm_joblib(
tqdm(
desc="Fitting FTR to the 3GPP fading model",
total=(len(scenarios) * len(is_los) * len(frequencies)),
)
) as progress_bar:
res = joblib.Parallel(n_jobs=10)(
joblib.delayed(fit_ftr_to_reference)(df, params_comb, num_search_grid_params, num_refinements) for params_comb in product(scenarios, is_los, frequencies))
joblib.delayed(fit_ftr_to_reference)(
df, params_comb, num_search_grid_params, num_refinements
)
for params_comb in product(scenarios, is_los, frequencies)
)
with open(fit_out_fname, "w", encoding="utf-8") as f:
f.write("scen\tcond\tfc\tsigma\tk\tdelta\tm\n")
@ -504,64 +559,61 @@ if __name__ == '__main__':
f.write(line)
if output_ns3_table:
# Load the fit results
fit = pd.read_csv(fit_out_fname, delimiter='\t')
fit = pd.read_csv(fit_out_fname, delimiter="\t")
# Output the C++ data structure
print_cplusplus_map_from_fit_results(fit, c_plus_plus_out_fname)
if plot_fit_results:
# Set Seaborn defaults and setup output folder
sns.set(rc={'figure.figsize': (7, 5)})
sns.set(rc={"figure.figsize": (7, 5)})
sns.set_theme()
sns.set_style('darkgrid')
sns.set_style("darkgrid")
fit = pd.read_csv(fit_out_fname, delimiter='\t')
fit = pd.read_csv(fit_out_fname, delimiter="\t")
# Create folder if it does not exist
Path(figs_folder).mkdir(parents=True, exist_ok=True)
ad_measures = []
for params_comb in product(scenarios, is_los, frequencies):
data_query = 'scen == @params_comb[0] and cond == @params_comb[1] and fc == @params_comb[2]'
data_query = (
"scen == @params_comb[0] and cond == @params_comb[1] and fc == @params_comb[2]"
)
# Load corresponding reference data
ref_data = df.query(data_query)
# Create FTR params object
fit_line = fit.query(data_query)
assert(fit_line.reset_index().shape[0] == 1)
assert fit_line.reset_index().shape[0] == 1
params = FtrParams()
params.m = fit_line.iloc[0]['m']
params.k = fit_line.iloc[0]['k']
params.delta = fit_line.iloc[0]['delta']
params.sigma = fit_line.iloc[0]['sigma']
params.m = fit_line.iloc[0]["m"]
params.k = fit_line.iloc[0]["k"]
params.delta = fit_line.iloc[0]["delta"]
params.sigma = fit_line.iloc[0]["sigma"]
# Retrieve the corresponding FTR ECDF
ftr_ecdf = get_ftr_ecdf(params, len(ref_data), db=True)
# Compute the AD measure
ad_meas = compute_anderson_darling_measure(
np.sort(ref_data['gain']), ftr_ecdf)
ad_meas = compute_anderson_darling_measure(np.sort(ref_data["gain"]), ftr_ecdf)
ad_measures.append(np.sqrt(ad_meas))
sns.ecdfplot(data=ref_data, x='gain',
label='38.901 reference model')
sns.ecdfplot(
ftr_ecdf, label=f'Fitted FTR, sqrt(AD)={round(np.sqrt(ad_meas), 2)}')
plt.xlabel(
'End-to-end channel gain due to small scale fading [dB]')
sns.ecdfplot(data=ref_data, x="gain", label="38.901 reference model")
sns.ecdfplot(ftr_ecdf, label=f"Fitted FTR, sqrt(AD)={round(np.sqrt(ad_meas), 2)}")
plt.xlabel("End-to-end channel gain due to small scale fading [dB]")
plt.legend()
plt.savefig(
f'{figs_folder}{params_comb[0]}_{params_comb[1]}_{params_comb[2]/1e9}GHz_fit.png', dpi=500, bbox_inches='tight')
f"{figs_folder}{params_comb[0]}_{params_comb[1]}_{params_comb[2]/1e9}GHz_fit.png",
dpi=500,
bbox_inches="tight",
)
plt.clf()
# Plot ECDF of the scaled and normalized AD measures
sns.ecdfplot(ad_measures, label='AD measures')
plt.xlabel('Anderson-Darling goodness-of-fit')
sns.ecdfplot(ad_measures, label="AD measures")
plt.xlabel("Anderson-Darling goodness-of-fit")
plt.legend()
plt.savefig(f'{figs_folder}AD_measures.png',
dpi=500, bbox_inches='tight')
plt.savefig(f"{figs_folder}AD_measures.png", dpi=500, bbox_inches="tight")
plt.clf()

View File

@ -27,8 +27,8 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
def main(argv):
def main(argv):
ns.core.CommandLine().Parse(argv)
#
@ -36,7 +36,9 @@ def main(argv):
# interact in real-time and therefore we have to use the real-time simulator
# and take the time to calculate checksums.
#
ns.core.GlobalValue.Bind("SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl"))
ns.core.GlobalValue.Bind(
"SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl")
)
ns.core.GlobalValue.Bind("ChecksumEnabled", ns.core.BooleanValue(True))
#
@ -45,7 +47,7 @@ def main(argv):
# the right side.
#
nodes = ns.network.NodeContainer()
nodes.Create (2)
nodes.Create(2)
#
# Use a CsmaHelper to get a CSMA channel created, and the needed net
@ -64,24 +66,25 @@ def main(argv):
# for this configuration.
#
tapBridge = ns.tap_bridge.TapBridgeHelper()
tapBridge.SetAttribute ("Mode", ns.core.StringValue ("UseLocal"))
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-left"))
tapBridge.Install (nodes.Get (0), devices.Get (0))
tapBridge.SetAttribute("Mode", ns.core.StringValue("UseLocal"))
tapBridge.SetAttribute("DeviceName", ns.core.StringValue("tap-left"))
tapBridge.Install(nodes.Get(0), devices.Get(0))
#
# Connect the right side tap to the right side wifi device on the right-side
# ghost node.
#
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-right"))
tapBridge.Install (nodes.Get (1), devices.Get (1))
tapBridge.SetAttribute("DeviceName", ns.core.StringValue("tap-right"))
tapBridge.Install(nodes.Get(1), devices.Get(1))
#
# Run the simulation for ten minutes to give the user time to play around
#
ns.core.Simulator.Stop (ns.core.Seconds (600))
ns.core.Simulator.Run()#signal_check_frequency = -1
ns.core.Simulator.Stop(ns.core.Seconds(600))
ns.core.Simulator.Run() # signal_check_frequency = -1
ns.core.Simulator.Destroy()
return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv))

View File

@ -17,6 +17,7 @@
#
import sys
try:
from ns import ns
except ModuleNotFoundError:
@ -26,8 +27,8 @@ except ModuleNotFoundError:
" or your PYTHONPATH might not be properly configured"
)
def main(argv):
def main(argv):
ns.core.CommandLine().Parse(argv)
#
@ -35,7 +36,9 @@ def main(argv):
# interact in real-time and therefore we have to use the real-time simulator
# and take the time to calculate checksums.
#
ns.core.GlobalValue.Bind("SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl"))
ns.core.GlobalValue.Bind(
"SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl")
)
ns.core.GlobalValue.Bind("ChecksumEnabled", ns.core.BooleanValue(True))
#
@ -44,20 +47,22 @@ def main(argv):
# the right side.
#
nodes = ns.network.NodeContainer()
nodes.Create (2);
nodes.Create(2)
#
# We're going to use 802.11 A so set up a wifi helper to reflect that.
#
wifi = ns.wifi.WifiHelper()
wifi.SetStandard (ns.wifi.WIFI_STANDARD_80211a);
wifi.SetRemoteStationManager ("ns3::ConstantRateWifiManager", "DataMode", ns.core.StringValue ("OfdmRate54Mbps"));
wifi.SetStandard(ns.wifi.WIFI_STANDARD_80211a)
wifi.SetRemoteStationManager(
"ns3::ConstantRateWifiManager", "DataMode", ns.core.StringValue("OfdmRate54Mbps")
)
#
# No reason for pesky access points, so we'll use an ad-hoc network.
#
wifiMac = ns.wifi.WifiMacHelper()
wifiMac.SetType ("ns3::AdhocWifiMac");
wifiMac.SetType("ns3::AdhocWifiMac")
#
# Configure the physical layer.
@ -80,7 +85,7 @@ def main(argv):
positionAlloc.Add(ns.core.Vector(0.0, 0.0, 0.0))
positionAlloc.Add(ns.core.Vector(5.0, 0.0, 0.0))
mobility.SetPositionAllocator(positionAlloc)
mobility.SetMobilityModel ("ns3::ConstantPositionMobilityModel")
mobility.SetMobilityModel("ns3::ConstantPositionMobilityModel")
mobility.Install(nodes)
#
@ -92,25 +97,25 @@ def main(argv):
# for this configuration.
#
tapBridge = ns.tap_bridge.TapBridgeHelper()
tapBridge.SetAttribute ("Mode", ns.core.StringValue ("UseLocal"));
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-left"));
tapBridge.Install (nodes.Get (0), devices.Get (0));
tapBridge.SetAttribute("Mode", ns.core.StringValue("UseLocal"))
tapBridge.SetAttribute("DeviceName", ns.core.StringValue("tap-left"))
tapBridge.Install(nodes.Get(0), devices.Get(0))
#
# Connect the right side tap to the right side wifi device on the right-side
# ghost node.
#
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-right"));
tapBridge.Install (nodes.Get (1), devices.Get (1));
tapBridge.SetAttribute("DeviceName", ns.core.StringValue("tap-right"))
tapBridge.Install(nodes.Get(1), devices.Get(1))
#
# Run the simulation for ten minutes to give the user time to play around
#
ns.core.Simulator.Stop (ns.core.Seconds (600));
ns.core.Simulator.Run()#signal_check_frequency = -1
ns.core.Simulator.Stop(ns.core.Seconds(600))
ns.core.Simulator.Run() # signal_check_frequency = -1
ns.core.Simulator.Destroy()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
if __name__ == "__main__":
sys.exit(main(sys.argv))

View File

@ -7,7 +7,7 @@
#
# See test.py for more information.
cpp_examples = [
("tap-wifi-dumbbell", "False", "True"), # Requires manual configuration
("tap-wifi-dumbbell", "False", "True"), # Requires manual configuration
]
# A list of Python examples to run in order to ensure that they remain
@ -17,6 +17,6 @@ cpp_examples = [
#
# See test.py for more information.
python_examples = [
("tap-csma-virtual-machine.py", "False"), # requires enable-sudo
("tap-wifi-virtual-machine.py", "False"), # requires enable-sudo
("tap-csma-virtual-machine.py", "False"), # requires enable-sudo
("tap-wifi-virtual-machine.py", "False"), # requires enable-sudo
]

View File

@ -21,7 +21,11 @@ cpp_examples = [
("adaptive-red-tests --testNumber=13", "True", "True"),
("adaptive-red-tests --testNumber=14", "True", "True"),
("adaptive-red-tests --testNumber=15", "True", "True"),
("codel-vs-pfifo-asymmetric --routerWanQueueDiscType=PfifoFast --simDuration=10", "True", "True"),
(
"codel-vs-pfifo-asymmetric --routerWanQueueDiscType=PfifoFast --simDuration=10",
"True",
"True",
),
("codel-vs-pfifo-asymmetric --routerWanQueueDiscType=CoDel --simDuration=10", "True", "False"),
("codel-vs-pfifo-basic-test --queueDiscType=PfifoFast --simDuration=10", "True", "False"),
("codel-vs-pfifo-basic-test --queueDiscType=CoDel --simDuration=10", "True", "False"),

View File

@ -2,14 +2,27 @@ from ns import *
# Some useful tricks for visualizer
# we need to check if the node has a mobility model, but we can't pass Ptr<MobilityModel> to python
ns.cppyy.cppdef("""using namespace ns3; bool hasMobilityModel(Ptr<Node> node){ return !(node->GetObject<MobilityModel>() == 0); };""")
ns.cppyy.cppdef("""using namespace ns3; Vector3D getNodePosition(Ptr<Node> node){ return node->GetObject<MobilityModel>()->GetPosition(); };""")
ns.cppyy.cppdef("""using namespace ns3; Ptr<Ipv4> getNodeIpv4(Ptr<Node> node){ return node->GetObject<Ipv4>(); };""")
ns.cppyy.cppdef("""using namespace ns3; Ptr<Ipv6> getNodeIpv6(Ptr<Node> node){ return node->GetObject<Ipv6>(); };""")
ns.cppyy.cppdef("""using namespace ns3; std::string getMobilityModelName(Ptr<Node> node){ return node->GetObject<MobilityModel>()->GetInstanceTypeId().GetName(); };""")
ns.cppyy.cppdef("""using namespace ns3; bool hasOlsr(Ptr<Node> node){ return !(node->GetObject<olsr::RoutingProtocol>() == 0); };""")
ns.cppyy.cppdef("""using namespace ns3; Ptr<olsr::RoutingProtocol> getNodeOlsr(Ptr<Node> node){ return node->GetObject<olsr::RoutingProtocol>(); };""")
ns.cppyy.cppdef(
"""using namespace ns3; bool hasMobilityModel(Ptr<Node> node){ return !(node->GetObject<MobilityModel>() == 0); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; Vector3D getNodePosition(Ptr<Node> node){ return node->GetObject<MobilityModel>()->GetPosition(); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; Ptr<Ipv4> getNodeIpv4(Ptr<Node> node){ return node->GetObject<Ipv4>(); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; Ptr<Ipv6> getNodeIpv6(Ptr<Node> node){ return node->GetObject<Ipv6>(); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; std::string getMobilityModelName(Ptr<Node> node){ return node->GetObject<MobilityModel>()->GetInstanceTypeId().GetName(); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; bool hasOlsr(Ptr<Node> node){ return !(node->GetObject<olsr::RoutingProtocol>() == 0); };"""
)
ns.cppyy.cppdef(
"""using namespace ns3; Ptr<olsr::RoutingProtocol> getNodeOlsr(Ptr<Node> node){ return node->GetObject<olsr::RoutingProtocol>(); };"""
)
from .core import start, register_plugin, set_bounds, add_initialization_hook
from .core import add_initialization_hook, register_plugin, set_bounds, start

View File

@ -1,8 +1,10 @@
from gi.repository import GObject
import os.path
import sys
PIXELS_PER_METER = 3.0 # pixels-per-meter, at 100% zoom level
from gi.repository import GObject
PIXELS_PER_METER = 3.0 # pixels-per-meter, at 100% zoom level
## PyVizObject class
class PyVizObject(GObject.GObject):
@ -16,6 +18,7 @@ class PyVizObject(GObject.GObject):
def tooltip_query(self, tooltip):
tooltip.set_text("TODO: tooltip for %r" % self)
## Link class
class Link(PyVizObject):
pass
@ -29,6 +32,7 @@ class InformationWindow(object):
def update(self):
raise NotImplementedError
## NetDeviceTraits class
class NetDeviceTraits(object):
## class variables
@ -47,6 +51,7 @@ class NetDeviceTraits(object):
self.is_wireless = is_wireless
self.is_virtual = is_virtual
netdevice_traits = {
ns.PointToPointNetDevice: NetDeviceTraits(is_wireless=False),
ns.CsmaNetDevice: NetDeviceTraits(is_wireless=False),
@ -60,36 +65,42 @@ netdevice_traits = {
ns.LteEnbNetDevice: NetDeviceTraits(is_wireless=True),
}
def lookup_netdevice_traits(class_type):
try:
return netdevice_traits[class_type]
except KeyError:
sys.stderr.write("WARNING: no NetDeviceTraits registered for device type %r; "
"I will assume this is a non-virtual wireless device, "
"but you should edit %r, variable 'netdevice_traits',"
" to make sure.\n" % (class_type.__name__, __file__))
sys.stderr.write(
"WARNING: no NetDeviceTraits registered for device type %r; "
"I will assume this is a non-virtual wireless device, "
"but you should edit %r, variable 'netdevice_traits',"
" to make sure.\n" % (class_type.__name__, __file__)
)
t = NetDeviceTraits(is_virtual=False, is_wireless=True)
netdevice_traits[class_type] = t
return t
def transform_distance_simulation_to_canvas(d):
return d*PIXELS_PER_METER
return d * PIXELS_PER_METER
def transform_point_simulation_to_canvas(x, y):
return x*PIXELS_PER_METER, y*PIXELS_PER_METER
return x * PIXELS_PER_METER, y * PIXELS_PER_METER
def transform_distance_canvas_to_simulation(d):
return d/PIXELS_PER_METER
return d / PIXELS_PER_METER
def transform_point_canvas_to_simulation(x, y):
return x/PIXELS_PER_METER, y/PIXELS_PER_METER
return x / PIXELS_PER_METER, y / PIXELS_PER_METER
plugins = []
plugin_modules = {}
def register_plugin(plugin_init_func, plugin_name=None, plugin_module=None):
"""
Register a plugin.
@ -102,18 +113,21 @@ def register_plugin(plugin_init_func, plugin_name=None, plugin_module=None):
if plugin_module is not None:
plugin_modules[plugin_name] = plugin_module
plugins_loaded = False
def load_plugins():
global plugins_loaded
if plugins_loaded:
return
plugins_loaded = True
plugins_dir = os.path.join(os.path.dirname(__file__), 'plugins')
plugins_dir = os.path.join(os.path.dirname(__file__), "plugins")
old_path = list(sys.path)
sys.path.insert(0, plugins_dir)
for filename in os.listdir(plugins_dir):
name, ext = os.path.splitext(filename)
if ext != '.py':
if ext != ".py":
continue
try:
plugin_module = __import__(name)
@ -125,7 +139,6 @@ def load_plugins():
except AttributeError:
print("Plugin %r has no 'register' function" % name, file=sys.stderr)
else:
#print("Plugin %r registered" % name, file=sys.stderr)
# print("Plugin %r registered" % name, file=sys.stderr)
register_plugin(plugin_func, name, plugin_module)
sys.path = old_path

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
import math
from gi.repository import GooCanvas, Gtk, Pango
from .base import PIXELS_PER_METER
from gi.repository import Pango
from gi.repository import Gtk
from gi.repository import GooCanvas
## Axes class
@ -28,16 +28,22 @@ class Axes(object):
"""
self.viz = viz
self.color = 0x8080C0FF
self.hlines = GooCanvas.CanvasPath(parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color)
self.hlines = GooCanvas.CanvasPath(
parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color
)
self.hlines.lower(None)
self.vlines = GooCanvas.CanvasPath(parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color)
self.vlines = GooCanvas.CanvasPath(
parent=viz.canvas.get_root_item(), stroke_color_rgba=self.color
)
self.vlines.lower(None)
self.labels = []
hadj = self.viz.get_hadjustment()
vadj = self.viz.get_vadjustment()
def update(adj):
if self.visible:
self.update_view()
hadj.connect("value-changed", update)
vadj.connect("value-changed", update)
hadj.connect("changed", update)
@ -76,7 +82,7 @@ class Axes(object):
dx = xf - xi
size = dx
ndiv = 5
text_width = dx/ndiv/2
text_width = dx / ndiv / 2
def rint(x):
"""!
@ -85,26 +91,25 @@ class Axes(object):
@param x: x
@return x rounded up
"""
return math.floor(x+0.5)
return math.floor(x + 0.5)
dx_over_ndiv = dx / ndiv
for n in range(5): # iterate 5 times to find optimum division size
for n in range(5): # iterate 5 times to find optimum division size
# div: length of each division
# looking for approx. 'ndiv' divisions in a length 'dx'
tbe = math.log10(dx_over_ndiv)
# div: power of 10 closest to dx/ndiv
div = pow(10, rint(tbe))
# test if div/2 is closer to dx/ndiv
if math.fabs(div/2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv):
if math.fabs(div / 2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv):
div /= 2
elif math.fabs(div*2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv):
div *= 2 # test if div*2 is closer to dx/ndiv
x0 = div*math.ceil(xi / div) - div
elif math.fabs(div * 2 - dx_over_ndiv) < math.fabs(div - dx_over_ndiv):
div *= 2 # test if div*2 is closer to dx/ndiv
x0 = div * math.ceil(xi / div) - div
if n > 1:
ndiv = rint(size / text_width)
return x0, div
def update_view(self):
"""!
Update view function
@ -119,6 +124,7 @@ class Axes(object):
self.labels = []
for label in unused_labels:
label.set_property("visibility", GooCanvas.CanvasItemVisibility.HIDDEN)
def get_label():
"""!
Get label function
@ -129,7 +135,9 @@ class Axes(object):
try:
label = unused_labels.pop(0)
except IndexError:
label = GooCanvas.CanvasText(parent=self.viz.canvas.get_root_item(), stroke_color_rgba=self.color)
label = GooCanvas.CanvasText(
parent=self.viz.canvas.get_root_item(), stroke_color_rgba=self.color
)
else:
label.set_property("visibility", GooCanvas.CanvasItemVisibility.VISIBLE)
label.lower(None)
@ -139,31 +147,37 @@ class Axes(object):
hadj = self.viz.get_hadjustment()
vadj = self.viz.get_vadjustment()
zoom = self.viz.zoom.get_value()
offset = 10/zoom
offset = 10 / zoom
x1, y1 = self.viz.canvas.convert_from_pixels(hadj.get_value(), vadj.get_value())
x2, y2 = self.viz.canvas.convert_from_pixels(hadj.get_value() + hadj.get_page_size(), vadj.get_value() + vadj.get_page_size())
line_width = 5.0/self.viz.zoom.get_value()
x2, y2 = self.viz.canvas.convert_from_pixels(
hadj.get_value() + hadj.get_page_size(), vadj.get_value() + vadj.get_page_size()
)
line_width = 5.0 / self.viz.zoom.get_value()
# draw the horizontal axis
self.hlines.set_property("line-width", line_width)
yc = y2 - line_width/2
yc = y2 - line_width / 2
sim_x1 = x1/PIXELS_PER_METER
sim_x2 = x2/PIXELS_PER_METER
sim_x1 = x1 / PIXELS_PER_METER
sim_x2 = x2 / PIXELS_PER_METER
x0, xdiv = self._compute_divisions(sim_x1, sim_x2)
path = ["M %r %r L %r %r" % (x1, yc, x2, yc)]
x = x0
while x < sim_x2:
path.append("M %r %r L %r %r" % (PIXELS_PER_METER*x, yc - offset, PIXELS_PER_METER*x, yc))
path.append(
"M %r %r L %r %r" % (PIXELS_PER_METER * x, yc - offset, PIXELS_PER_METER * x, yc)
)
label = get_label()
label.set_properties(font=("Sans Serif %f" % int(12/zoom)),
text=("%G" % x),
fill_color_rgba=self.color,
alignment=Pango.Alignment.CENTER,
# anchor=Gtk.Widget.ANCHOR_S,
x=PIXELS_PER_METER*x,
y=(yc - offset))
label.set_properties(
font=("Sans Serif %f" % int(12 / zoom)),
text=("%G" % x),
fill_color_rgba=self.color,
alignment=Pango.Alignment.CENTER,
# anchor=Gtk.Widget.ANCHOR_S,
x=PIXELS_PER_METER * x,
y=(yc - offset),
)
x += xdiv
del x
@ -171,29 +185,30 @@ class Axes(object):
# draw the vertical axis
self.vlines.set_property("line-width", line_width)
xc = x1 + line_width/2
sim_y1 = y1/PIXELS_PER_METER
sim_y2 = y2/PIXELS_PER_METER
xc = x1 + line_width / 2
sim_y1 = y1 / PIXELS_PER_METER
sim_y2 = y2 / PIXELS_PER_METER
y0, ydiv = self._compute_divisions(sim_y1, sim_y2)
path = ["M %r %r L %r %r" % (xc, y1, xc, y2)]
y = y0
while y < sim_y2:
path.append("M %r %r L %r %r" % (xc, PIXELS_PER_METER*y, xc + offset, PIXELS_PER_METER*y))
path.append(
"M %r %r L %r %r" % (xc, PIXELS_PER_METER * y, xc + offset, PIXELS_PER_METER * y)
)
label = get_label()
label.set_properties(font=("Sans Serif %f" % int(12/zoom)),
text=("%G" % y),
fill_color_rgba=self.color,
alignment=Pango.Alignment.LEFT,
# anchor=Gtk.ANCHOR_W,
x=xc + offset,
y=PIXELS_PER_METER*y)
label.set_properties(
font=("Sans Serif %f" % int(12 / zoom)),
text=("%G" % y),
fill_color_rgba=self.color,
alignment=Pango.Alignment.LEFT,
# anchor=Gtk.ANCHOR_W,
x=xc + offset,
y=PIXELS_PER_METER * y,
)
y += ydiv
self.vlines.set_property("data", " ".join(path))
self.labels.extend(unused_labels)

File diff suppressed because it is too large Load Diff

View File

@ -13,6 +13,7 @@ class StatisticsCollector(object):
"""
Collects interface statistics for all nodes.
"""
## @var node_statistics
# node statistics
## @var visualizer
@ -21,8 +22,16 @@ class StatisticsCollector(object):
## NetDevStats class
class NetDevStats(object):
## class members
__slots__ = ['rxPackets', 'rxBytes', 'txPackets', 'txBytes',
'rxPacketRate', 'rxBitRate', 'txPacketRate', 'txBitRate']
__slots__ = [
"rxPackets",
"rxBytes",
"txPackets",
"txBytes",
"rxPacketRate",
"rxBitRate",
"txPacketRate",
"txBitRate",
]
def __init__(self, visualizer):
"""!
@ -30,7 +39,7 @@ class StatisticsCollector(object):
@param self this object
@param visualizer visualizer object
"""
self.node_statistics = {} # nodeid -> list(raw statistics)
self.node_statistics = {} # nodeid -> list(raw statistics)
self.visualizer = visualizer
def simulation_periodic_update(self, viz):
@ -66,7 +75,7 @@ class StatisticsCollector(object):
if len(raw_stats_list) < NODE_STATISTICS_MEMORY:
return []
assert len(raw_stats_list) == NODE_STATISTICS_MEMORY
tx_packets1 = [] # transmitted packets, one value per interface
tx_packets1 = [] # transmitted packets, one value per interface
rx_packets1 = []
tx_bytes1 = []
rx_bytes1 = []
@ -78,7 +87,7 @@ class StatisticsCollector(object):
retval = []
k = self.visualizer.sample_period*(NODE_STATISTICS_MEMORY-1)
k = self.visualizer.sample_period * (NODE_STATISTICS_MEMORY - 1)
for iface, stats in enumerate(raw_stats_list[-1]):
outStat = self.NetDevStats()
outStat.txPackets = stats.transmittedPackets
@ -86,10 +95,10 @@ class StatisticsCollector(object):
outStat.rxPackets = stats.receivedPackets
outStat.rxBytes = stats.receivedBytes
outStat.txPacketRate = (stats.transmittedPackets - tx_packets1[iface])/k
outStat.rxPacketRate = (stats.receivedPackets - rx_packets1[iface])/k
outStat.txBitRate = (stats.transmittedBytes - tx_bytes1[iface])*8/k
outStat.rxBitRate = (stats.receivedBytes - rx_bytes1[iface])*8/k
outStat.txPacketRate = (stats.transmittedPackets - tx_packets1[iface]) / k
outStat.rxPacketRate = (stats.receivedPackets - rx_packets1[iface]) / k
outStat.txBitRate = (stats.transmittedBytes - tx_bytes1[iface]) * 8 / k
outStat.rxBitRate = (stats.receivedBytes - rx_bytes1[iface]) * 8 / k
retval.append(outStat)
return retval
@ -110,17 +119,14 @@ class ShowInterfaceStatistics(InformationWindow):
# table model
(
COLUMN_INTERFACE,
COLUMN_TX_PACKETS,
COLUMN_TX_BYTES,
COLUMN_TX_PACKET_RATE,
COLUMN_TX_BIT_RATE,
COLUMN_RX_PACKETS,
COLUMN_RX_BYTES,
COLUMN_RX_PACKET_RATE,
COLUMN_RX_BIT_RATE,
) = range(9)
def __init__(self, visualizer, node_index, statistics_collector):
@ -132,9 +138,11 @@ class ShowInterfaceStatistics(InformationWindow):
@param statistics_collector statistics collector class
"""
InformationWindow.__init__(self)
self.win = Gtk.Dialog(parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE))
self.win = Gtk.Dialog(
parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE),
)
self.win.connect("response", self._response_cb)
self.win.set_title("Statistics for node %i" % node_index)
self.visualizer = visualizer
@ -142,7 +150,7 @@ class ShowInterfaceStatistics(InformationWindow):
self.node_index = node_index
self.viz_node = visualizer.get_node(node_index)
self.table_model = Gtk.ListStore(*([str]*13))
self.table_model = Gtk.ListStore(*([str] * 13))
treeview = Gtk.TreeView(self.table_model)
treeview.show()
@ -193,23 +201,30 @@ class ShowInterfaceStatistics(InformationWindow):
interface_name = ns.Names.FindName(netdevice)
if not interface_name:
interface_name = "(interface %i)" % iface
self.table_model.set(tree_iter,
self.COLUMN_INTERFACE, interface_name,
self.COLUMN_TX_PACKETS, str(stats.txPackets),
self.COLUMN_TX_BYTES, str(stats.txBytes),
self.COLUMN_TX_PACKET_RATE, str(stats.txPacketRate),
self.COLUMN_TX_BIT_RATE, str(stats.txBitRate),
self.COLUMN_RX_PACKETS, str(stats.rxPackets),
self.COLUMN_RX_BYTES, str(stats.rxBytes),
self.COLUMN_RX_PACKET_RATE, str(stats.rxPacketRate),
self.COLUMN_RX_BIT_RATE, str(stats.rxBitRate)
)
self.table_model.set(
tree_iter,
self.COLUMN_INTERFACE,
interface_name,
self.COLUMN_TX_PACKETS,
str(stats.txPackets),
self.COLUMN_TX_BYTES,
str(stats.txBytes),
self.COLUMN_TX_PACKET_RATE,
str(stats.txPacketRate),
self.COLUMN_TX_BIT_RATE,
str(stats.txBitRate),
self.COLUMN_RX_PACKETS,
str(stats.rxPackets),
self.COLUMN_RX_BYTES,
str(stats.rxBytes),
self.COLUMN_RX_PACKET_RATE,
str(stats.rxPacketRate),
self.COLUMN_RX_BIT_RATE,
str(stats.rxBitRate),
)
def populate_node_menu(viz, node, menu, statistics_collector):
menu_item = Gtk.MenuItem("Show Interface Statistics")
menu_item.show()

View File

@ -5,6 +5,7 @@ try:
except ModuleNotFoundError:
from visualizer.base import InformationWindow
## ShowIpv4RoutingTable class
class ShowIpv4RoutingTable(InformationWindow):
## @var win
@ -41,9 +42,11 @@ class ShowIpv4RoutingTable(InformationWindow):
@param node_index the node index
"""
InformationWindow.__init__(self)
self.win = Gtk.Dialog(parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE))
self.win = Gtk.Dialog(
parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE),
)
self.win.connect("response", self._response_cb)
self.win.set_title("IPv4 routing table for node %i" % node_index)
self.visualizer = visualizer
@ -54,36 +57,34 @@ class ShowIpv4RoutingTable(InformationWindow):
treeview = Gtk.TreeView(self.table_model)
treeview.show()
sw = Gtk.ScrolledWindow()
sw.set_properties(hscrollbar_policy=Gtk.PolicyType.AUTOMATIC,
vscrollbar_policy=Gtk.PolicyType.AUTOMATIC)
sw.set_properties(
hscrollbar_policy=Gtk.PolicyType.AUTOMATIC, vscrollbar_policy=Gtk.PolicyType.AUTOMATIC
)
sw.show()
sw.add(treeview)
self.win.vbox.add(sw)
self.win.set_default_size(600, 300)
# Dest.
column = Gtk.TreeViewColumn('Destination', Gtk.CellRendererText(),
text=self.COLUMN_DESTINATION)
column = Gtk.TreeViewColumn(
"Destination", Gtk.CellRendererText(), text=self.COLUMN_DESTINATION
)
treeview.append_column(column)
# Next hop
column = Gtk.TreeViewColumn('Next hop', Gtk.CellRendererText(),
text=self.COLUMN_NEXT_HOP)
column = Gtk.TreeViewColumn("Next hop", Gtk.CellRendererText(), text=self.COLUMN_NEXT_HOP)
treeview.append_column(column)
# Interface
column = Gtk.TreeViewColumn('Interface', Gtk.CellRendererText(),
text=self.COLUMN_INTERFACE)
column = Gtk.TreeViewColumn("Interface", Gtk.CellRendererText(), text=self.COLUMN_INTERFACE)
treeview.append_column(column)
# Type
column = Gtk.TreeViewColumn('Type', Gtk.CellRendererText(),
text=self.COLUMN_TYPE)
column = Gtk.TreeViewColumn("Type", Gtk.CellRendererText(), text=self.COLUMN_TYPE)
treeview.append_column(column)
# Prio
column = Gtk.TreeViewColumn('Prio', Gtk.CellRendererText(),
text=self.COLUMN_PRIO)
column = Gtk.TreeViewColumn("Prio", Gtk.CellRendererText(), text=self.COLUMN_PRIO)
treeview.append_column(column)
self.visualizer.add_information_window(self)
@ -112,7 +113,7 @@ class ShowIpv4RoutingTable(InformationWindow):
if routing is None:
return
routing_protocols = [] # list of (protocol, type_string, priority)
routing_protocols = [] # list of (protocol, type_string, priority)
if isinstance(routing, ns.Ipv4StaticRouting):
ipv4_routing = routing_protocols.append((routing, "static", 0))
@ -134,17 +135,24 @@ class ShowIpv4RoutingTable(InformationWindow):
tree_iter = self.table_model.append()
netdevice = ipv4.GetNetDevice(route.GetInterface())
if netdevice is None:
interface_name = 'lo'
interface_name = "lo"
else:
interface_name = ns.Names.FindName(netdevice)
if not interface_name:
interface_name = "(interface %i)" % route.GetInterface()
self.table_model.set(tree_iter,
self.COLUMN_DESTINATION, str(route.GetDest()),
self.COLUMN_NEXT_HOP, str(route.GetGateway()),
self.COLUMN_INTERFACE, interface_name,
self.COLUMN_TYPE, type_string,
self.COLUMN_PRIO, prio)
self.table_model.set(
tree_iter,
self.COLUMN_DESTINATION,
str(route.GetDest()),
self.COLUMN_NEXT_HOP,
str(route.GetGateway()),
self.COLUMN_INTERFACE,
interface_name,
self.COLUMN_TYPE,
type_string,
self.COLUMN_PRIO,
prio,
)
def populate_node_menu(viz, node, menu):
@ -157,5 +165,6 @@ def populate_node_menu(viz, node, menu):
menu_item.connect("activate", _show_ipv4_routing_table)
menu.add(menu_item)
def register(viz):
viz.connect("populate-node-menu", populate_node_menu)

View File

@ -1,11 +1,11 @@
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import Gdk, Gtk
try:
from ns3.visualizer.base import InformationWindow
except ModuleNotFoundError:
from visualizer.base import InformationWindow
## ShowOlsrRoutingTable class
class ShowOlsrRoutingTable(InformationWindow):
## @var win
@ -31,10 +31,12 @@ class ShowOlsrRoutingTable(InformationWindow):
@param node_index the node index
"""
InformationWindow.__init__(self)
self.win = Gtk.Dialog(parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE))
self.win.set_default_size(Gdk.Screen.width()/2, Gdk.Screen.height()/2)
self.win = Gtk.Dialog(
parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE),
)
self.win.set_default_size(Gdk.Screen.width() / 2, Gdk.Screen.height() / 2)
self.win.connect("response", self._response_cb)
self.win.set_title("OLSR routing table for node %i" % node_index)
self.visualizer = visualizer
@ -45,30 +47,29 @@ class ShowOlsrRoutingTable(InformationWindow):
treeview = Gtk.TreeView(self.table_model)
treeview.show()
sw = Gtk.ScrolledWindow()
sw.set_properties(hscrollbar_policy=Gtk.PolicyType.AUTOMATIC,
vscrollbar_policy=Gtk.PolicyType.AUTOMATIC)
sw.set_properties(
hscrollbar_policy=Gtk.PolicyType.AUTOMATIC, vscrollbar_policy=Gtk.PolicyType.AUTOMATIC
)
sw.show()
sw.add(treeview)
self.win.vbox.add(sw)
# Dest.
column = Gtk.TreeViewColumn('Destination', Gtk.CellRendererText(),
text=self.COLUMN_DESTINATION)
column = Gtk.TreeViewColumn(
"Destination", Gtk.CellRendererText(), text=self.COLUMN_DESTINATION
)
treeview.append_column(column)
# Next hop
column = Gtk.TreeViewColumn('Next hop', Gtk.CellRendererText(),
text=self.COLUMN_NEXT_HOP)
column = Gtk.TreeViewColumn("Next hop", Gtk.CellRendererText(), text=self.COLUMN_NEXT_HOP)
treeview.append_column(column)
# Interface
column = Gtk.TreeViewColumn('Interface', Gtk.CellRendererText(),
text=self.COLUMN_INTERFACE)
column = Gtk.TreeViewColumn("Interface", Gtk.CellRendererText(), text=self.COLUMN_INTERFACE)
treeview.append_column(column)
# Num. Hops
column = Gtk.TreeViewColumn('Num. Hops', Gtk.CellRendererText(),
text=self.COLUMN_NUM_HOPS)
column = Gtk.TreeViewColumn("Num. Hops", Gtk.CellRendererText(), text=self.COLUMN_NUM_HOPS)
treeview.append_column(column)
self.visualizer.add_information_window(self)
@ -102,16 +103,22 @@ class ShowOlsrRoutingTable(InformationWindow):
tree_iter = self.table_model.append()
netdevice = ipv4.GetNetDevice(route.interface)
if netdevice is None:
interface_name = 'lo'
interface_name = "lo"
else:
interface_name = ns.Names.FindName(netdevice)
if not interface_name:
interface_name = "(interface %i)" % route.interface
self.table_model.set(tree_iter,
self.COLUMN_DESTINATION, str(route.destAddr),
self.COLUMN_NEXT_HOP, str(route.nextAddr),
self.COLUMN_INTERFACE, interface_name,
self.COLUMN_NUM_HOPS, route.distance)
self.table_model.set(
tree_iter,
self.COLUMN_DESTINATION,
str(route.destAddr),
self.COLUMN_NEXT_HOP,
str(route.nextAddr),
self.COLUMN_INTERFACE,
interface_name,
self.COLUMN_NUM_HOPS,
route.distance,
)
def populate_node_menu(viz, node, menu):
@ -129,5 +136,6 @@ def populate_node_menu(viz, node, menu):
menu_item.connect("activate", _show_ipv4_routing_table)
menu.add(menu_item)
def register(viz):
viz.connect("populate-node-menu", populate_node_menu)

View File

@ -1,5 +1,4 @@
from gi.repository import GObject
from gi.repository import Gtk
from gi.repository import GObject, Gtk
try:
from ns import ns
@ -15,7 +14,8 @@ try:
except ModuleNotFoundError:
from visualizer.base import InformationWindow
from kiwi.ui.objectlist import ObjectList, Column
from kiwi.ui.objectlist import Column, ObjectList
## ShowLastPackets class
class ShowLastPackets(InformationWindow):
@ -47,6 +47,7 @@ class ShowLastPackets(InformationWindow):
"""
PacketList class
"""
## @var table_model
# table model
(
@ -54,7 +55,7 @@ class ShowLastPackets(InformationWindow):
COLUMN_INTERFACE,
COLUMN_SIZE,
COLUMN_CONTENTS,
) = range(4)
) = range(4)
def __init__(self):
"""
@ -62,9 +63,11 @@ class ShowLastPackets(InformationWindow):
@param self this object
"""
super(ShowLastPackets.PacketList, self).__init__()
self.set_properties(hscrollbar_policy=Gtk.PolicyType.AUTOMATIC,
vscrollbar_policy=Gtk.PolicyType.AUTOMATIC)
self.table_model = Gtk.ListStore(*([str]*4))
self.set_properties(
hscrollbar_policy=Gtk.PolicyType.AUTOMATIC,
vscrollbar_policy=Gtk.PolicyType.AUTOMATIC,
)
self.table_model = Gtk.ListStore(*([str] * 4))
treeview = Gtk.TreeView(self.table_model)
treeview.show()
self.add(treeview)
@ -95,13 +98,17 @@ class ShowLastPackets(InformationWindow):
interface_name = ns.core.Names.FindName(sample.device)
if not interface_name:
interface_name = "(interface %i)" % sample.device.GetIfIndex()
self.table_model.set(tree_iter,
self.COLUMN_TIME, str(sample.time.GetSeconds()),
self.COLUMN_INTERFACE, interface_name,
self.COLUMN_SIZE, str(sample.packet.GetSize ()),
self.COLUMN_CONTENTS, str(sample.packet)
)
self.table_model.set(
tree_iter,
self.COLUMN_TIME,
str(sample.time.GetSeconds()),
self.COLUMN_INTERFACE,
interface_name,
self.COLUMN_SIZE,
str(sample.packet.GetSize()),
self.COLUMN_CONTENTS,
str(sample.packet),
)
def __init__(self, visualizer, node_index):
"""!
@ -111,9 +118,11 @@ class ShowLastPackets(InformationWindow):
@param node_index the node index
"""
InformationWindow.__init__(self)
self.win = Gtk.Dialog(parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE))
self.win = Gtk.Dialog(
parent=visualizer.window,
flags=Gtk.DialogFlags.DESTROY_WITH_PARENT,
buttons=("_Close", Gtk.ResponseType.CLOSE),
)
self.win.connect("response", self._response_cb)
self.win.set_title("Last packets for node %i" % node_index)
self.visualizer = visualizer
@ -122,9 +131,13 @@ class ShowLastPackets(InformationWindow):
def smart_expand(expander, vbox):
if expander.get_expanded():
vbox.set_child_packing(expander, expand=True, fill=True, padding=0, pack_type=Gtk.PACK_START)
vbox.set_child_packing(
expander, expand=True, fill=True, padding=0, pack_type=Gtk.PACK_START
)
else:
vbox.set_child_packing(expander, expand=False, fill=False, padding=0, pack_type=Gtk.PACK_START)
vbox.set_child_packing(
expander, expand=False, fill=False, padding=0, pack_type=Gtk.PACK_START
)
main_hbox = Gtk.HBox(False, 4)
main_hbox.show()
@ -157,7 +170,6 @@ class ShowLastPackets(InformationWindow):
main_vbox.pack_start(group, expand=False, fill=False)
group.connect_after("activate", smart_expand, main_vbox)
# Packet Filter
# - options
@ -176,17 +188,20 @@ class ShowLastPackets(InformationWindow):
sel_buttons_box.add(select_all_button)
sel_buttons_box.add(select_none_button)
self.packet_filter_widget = ObjectList([
Column('selected', title="Sel.", data_type=bool, editable=True),
Column('name', title="Header"),
], sortable=True)
self.packet_filter_widget = ObjectList(
[
Column("selected", title="Sel.", data_type=bool, editable=True),
Column("name", title="Header"),
],
sortable=True,
)
self.packet_filter_widget.show()
packet_filter_vbox.pack_start(self.packet_filter_widget, True, True, 4)
class TypeIdConfig(object):
__slots__ = ['name', 'selected', 'typeid']
__slots__ = ["name", "selected", "typeid"]
self.packet_filter_list = [] # list of TypeIdConfig instances
self.packet_filter_list = [] # list of TypeIdConfig instances
Header = ns.core.TypeId.LookupByName("ns3::Header")
Trailer = ns.core.TypeId.LookupByName("ns3::Trailer")
@ -216,15 +231,22 @@ class ShowLastPackets(InformationWindow):
def update_capture_options():
if self.op_AND_button.props.active:
self.packet_capture_options.mode = ns.visualizer.PyViz.PACKET_CAPTURE_FILTER_HEADERS_AND
self.packet_capture_options.mode = (
ns.visualizer.PyViz.PACKET_CAPTURE_FILTER_HEADERS_AND
)
else:
self.packet_capture_options.mode = ns.visualizer.PyViz.PACKET_CAPTURE_FILTER_HEADERS_OR
self.packet_capture_options.mode = (
ns.visualizer.PyViz.PACKET_CAPTURE_FILTER_HEADERS_OR
)
self.packet_capture_options.numLastPackets = 100
self.packet_capture_options.headers = [c.typeid for c in self.packet_filter_list if c.selected]
self.packet_capture_options.headers = [
c.typeid for c in self.packet_filter_list if c.selected
]
self.visualizer.simulation.lock.acquire()
try:
self.visualizer.simulation.sim_helper.SetPacketCaptureOptions(
self.node.GetId(), self.packet_capture_options)
self.node.GetId(), self.packet_capture_options
)
finally:
self.visualizer.simulation.lock.release()
@ -247,7 +269,9 @@ class ShowLastPackets(InformationWindow):
op_buttons_box.show()
packet_filter_vbox.pack_start(op_buttons_box, False, False, 4)
self.op_AND_button = GObject.new(Gtk.RadioButton, label="AND", visible=True)
self.op_OR_button = GObject.new(Gtk.RadioButton, label="OR", visible=True, group=self.op_AND_button)
self.op_OR_button = GObject.new(
Gtk.RadioButton, label="OR", visible=True, group=self.op_AND_button
)
op_buttons_box.add(self.op_AND_button)
op_buttons_box.add(self.op_OR_button)
self.op_OR_button.props.active = True
@ -256,6 +280,7 @@ class ShowLastPackets(InformationWindow):
def cell_edited(l, obj, attribute):
update_capture_options()
self.packet_filter_widget.connect("cell-edited", cell_edited)
update_capture_options()
@ -298,5 +323,6 @@ def populate_node_menu(viz, node, menu):
menu_item.connect("activate", _show_it)
menu.add(menu_item)
def register(viz):
viz.connect("populate-node-menu", populate_node_menu)

View File

@ -1,4 +1,5 @@
import math
try:
from ns import ns
except ModuleNotFoundError:
@ -14,6 +15,7 @@ try:
except ModuleNotFoundError:
from visualizer.base import Link, transform_distance_canvas_to_simulation
## WifiLink class
class WifiLink(Link):
## @var node1
@ -38,15 +40,19 @@ class WifiLink(Link):
super(WifiLink, self).__init__()
self.node1 = sta
self.dev = dev
self.node2 = None # ap
self.node2 = None # ap
self.canvas_item = GooCanvas.CanvasGroup(parent=parent_canvas_item)
self.invisible_line = GooCanvas.CanvasPolyline(parent=self.canvas_item,
line_width=25.0,
visibility=GooCanvas.CanvasItemVisibility.HIDDEN)
self.visible_line = GooCanvas.CanvasPolyline(parent=self.canvas_item,
line_width=1.0,
stroke_color_rgba=0xC00000FF,
line_dash=GooCanvas.CanvasLineDash.newv([2.0, 2.0 ]))
self.invisible_line = GooCanvas.CanvasPolyline(
parent=self.canvas_item,
line_width=25.0,
visibility=GooCanvas.CanvasItemVisibility.HIDDEN,
)
self.visible_line = GooCanvas.CanvasPolyline(
parent=self.canvas_item,
line_width=1.0,
stroke_color_rgba=0xC00000FF,
line_dash=GooCanvas.CanvasLineDash.newv([2.0, 2.0]),
)
# self.invisible_line.set_property("pointer-events", (GooCanvas.CanvasPointerEvents.STROKE_MASK
# |GooCanvas.CanvasPointerEvents.FILL_MASK
# |GooCanvas.CanvasPointerEvents.PAINTED_MASK))
@ -106,13 +112,17 @@ class WifiLink(Link):
pos2_x, pos2_y = self.node2.get_position()
dx = pos2_x - pos1_x
dy = pos2_y - pos1_y
d = transform_distance_canvas_to_simulation(math.sqrt(dx*dx + dy*dy))
d = transform_distance_canvas_to_simulation(math.sqrt(dx * dx + dy * dy))
mac = self.dev.GetMac()
tooltip.set_text(("WiFi link between STA Node %i and AP Node %i; distance=%.2f m.\n"
"SSID: %s\n"
"BSSID: %s")
% (self.node1.node_index, self.node2.node_index, d,
mac.GetSsid(), mac.GetBssid()))
tooltip.set_text(
(
"WiFi link between STA Node %i and AP Node %i; distance=%.2f m.\n"
"SSID: %s\n"
"BSSID: %s"
)
% (self.node1.node_index, self.node2.node_index, d, mac.GetSsid(), mac.GetBssid())
)
## WifiLinkMonitor class
class WifiLinkMonitor(object):
@ -125,8 +135,8 @@ class WifiLinkMonitor(object):
@param self The object pointer.
@param dummy_viz A dummy visualizer
"""
self.access_points = {} # bssid -> node
self.stations = [] # list of (sta_netdevice, viz_node, wifi_link)
self.access_points = {} # bssid -> node
self.stations = [] # list of (sta_netdevice, viz_node, wifi_link)
def scan_nodes(self, viz):
"""! Scan nodes function.
@ -134,7 +144,7 @@ class WifiLinkMonitor(object):
@param viz The visualizer object
@return none
"""
for (sta_netdevice, viz_node, wifi_link) in self.stations:
for sta_netdevice, viz_node, wifi_link in self.stations:
wifi_link.destroy()
self.access_points = {}
@ -153,8 +163,8 @@ class WifiLinkMonitor(object):
elif isinstance(wifi_mac, ns.wifi.ApWifiMac):
bssid = ns.network.Mac48Address.ConvertFrom(dev.GetAddress())
self.access_points[str(bssid)] = node
#print "APs: ", self.access_points
#print "STAs: ", self.stations
# print "APs: ", self.access_points
# print "STAs: ", self.stations
def simulation_periodic_update(self, viz):
"""! Simulation Periodic Update function.
@ -162,12 +172,12 @@ class WifiLinkMonitor(object):
@param viz The visualizer object
@return none
"""
for (sta_netdevice, viz_node, wifi_link) in self.stations:
for sta_netdevice, viz_node, wifi_link in self.stations:
if not sta_netdevice.IsLinkUp():
wifi_link.set_ap(None)
continue
bssid = str(sta_netdevice.GetMac().GetBssid())
if bssid == '00:00:00:00:00:00':
if bssid == "00:00:00:00:00:00":
wifi_link.set_ap(None)
continue
ap = self.access_points[bssid]
@ -179,7 +189,7 @@ class WifiLinkMonitor(object):
@param viz The visualizer object
@return none
"""
for (dummy_sta_netdevice, dummy_viz_node, wifi_link) in self.stations:
for dummy_sta_netdevice, dummy_viz_node, wifi_link in self.stations:
if wifi_link is not None:
wifi_link.update_points()

View File

@ -1,8 +1,9 @@
from gi.repository import GObject, GooCanvas
import rsvg
#import cairo
# import cairo
import os.path
import rsvg
from gi.repository import GObject, GooCanvas
## SvgItem class
class SvgItem(GooCanvas.ItemSimple):
@ -35,38 +36,43 @@ class SvgItem(GooCanvas.ItemSimple):
## setup our custom properties
__gproperties__ = {
'x': (float, # property type
'X', # property nick name
'The x coordinate of a SVG image', # property description
-10e6, # property minimum value
10e6, # property maximum value
0, # property default value
GObject.PARAM_READWRITE), # property flags
'y': (float,
'Y',
'The y coordinate of a SVG image',
-10e6,
10e6,
0,
GObject.PARAM_READWRITE),
'width': (float,
'Width',
'The width of the SVG Image',
0,
10e6,
0,
GObject.PARAM_READWRITE),
'height': (float,
'Height',
'The width of the SVG Image',
0,
10e6,
0,
GObject.PARAM_READWRITE),
}
"x": (
float, # property type
"X", # property nick name
"The x coordinate of a SVG image", # property description
-10e6, # property minimum value
10e6, # property maximum value
0, # property default value
GObject.PARAM_READWRITE,
), # property flags
"y": (
float,
"Y",
"The y coordinate of a SVG image",
-10e6,
10e6,
0,
GObject.PARAM_READWRITE,
),
"width": (
float,
"Width",
"The width of the SVG Image",
0,
10e6,
0,
GObject.PARAM_READWRITE,
),
"height": (
float,
"Height",
"The width of the SVG Image",
0,
10e6,
0,
GObject.PARAM_READWRITE,
),
}
def __init__(self, x, y, rsvg_handle, **kwargs):
"""!
@ -97,26 +103,26 @@ class SvgItem(GooCanvas.ItemSimple):
@param value property value
@return exception if unknown property
"""
if pspec.name == 'x':
if pspec.name == "x":
self.x = value
# make sure we update the display
self.changed(True)
elif pspec.name == 'y':
elif pspec.name == "y":
self.y = value
# make sure we update the display
self.changed(True)
elif pspec.name == 'width':
elif pspec.name == "width":
self.custom_width = value
self._size_changed()
# make sure we update the display
self.changed(True)
elif pspec.name == 'height':
elif pspec.name == "height":
self.custom_height = value
self._size_changed()
@ -124,7 +130,7 @@ class SvgItem(GooCanvas.ItemSimple):
self.changed(True)
else:
raise AttributeError('unknown property %s' % pspec.name)
raise AttributeError("unknown property %s" % pspec.name)
def _size_changed(self):
"""!
@ -141,12 +147,12 @@ class SvgItem(GooCanvas.ItemSimple):
self.width = self.custom_width
self.sx = self.custom_width / self.handle.props.width
self.sy = self.sx
self.height = self.handle.props.height*self.sy
self.height = self.handle.props.height * self.sy
elif self.custom_width is None and self.custom_height is not None:
self.height = self.custom_height
self.sy = self.custom_height / self.handle.props.height
self.sx = self.sy
self.width = self.handle.props.width*self.sx
self.sx = self.sy
self.width = self.handle.props.width * self.sx
else:
self.width = self.custom_width
self.height = self.custom_height
@ -160,23 +166,23 @@ class SvgItem(GooCanvas.ItemSimple):
@param pspec property name
@return property value or exception if unknown property
"""
if pspec.name == 'x':
if pspec.name == "x":
return self.x
elif pspec.name == 'y':
elif pspec.name == "y":
return self.y
elif pspec.name == 'width':
elif pspec.name == "width":
self.width = self.handle.props.width
self.height = self.handle.props.height
return self.width
elif pspec.name == 'height':
elif pspec.name == "height":
return self.height
else:
raise AttributeError('unknown property %s' % pspec.name)
raise AttributeError("unknown property %s" % pspec.name)
def do_simple_paint(self, cr, bounds):
"""!
@ -212,7 +218,9 @@ class SvgItem(GooCanvas.ItemSimple):
@param is_pointer_event is the event a pointer event
@return true if at or false if not
"""
if ((x < self.x) or (x > self.x + self.width)) or ((y < self.y) or (y > self.y + self.height)):
if ((x < self.x) or (x > self.x + self.width)) or (
(y < self.y) or (y > self.y + self.height)
):
return False
else:
return True
@ -220,12 +228,12 @@ class SvgItem(GooCanvas.ItemSimple):
_rsvg_cache = dict()
def rsvg_handle_factory(base_file_name):
try:
return _rsvg_cache[base_file_name]
except KeyError:
full_path = os.path.join(os.path.dirname(__file__), 'resource', base_file_name)
full_path = os.path.join(os.path.dirname(__file__), "resource", base_file_name)
rsvg_handle = rsvg.Handle(full_path)
_rsvg_cache[base_file_name] = rsvg_handle
return rsvg_handle

View File

@ -11,202 +11,208 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.imgmath']
extensions = ["sphinx.ext.imgmath"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'wifi'
master_doc = "wifi"
# General information about the project.
project = u'ns-3'
copyright = u'ns-3 project'
project = "ns-3"
copyright = "ns-3 project"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
version = "ns-3-dev"
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
release = "ns-3-dev"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'ns-3doc'
# htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
# ('wifi-testing', 'wifi-doc-testing.tex', u'Wi-Fi Testing Documentation', u'ns-3 project', 'manual'),
# ('wifi-design', 'wifi-doc-design.tex', u'Wi-Fi Design Documentation', u'ns-3 project', 'manual'),
# ('wifi-user', 'wifi-doc-user.tex', u'Wi-Fi User Documentation', u'ns-3 project', 'manual'),
('wifi', 'wifi-module-doc.tex', u'The ns-3 Wi-Fi Module Documentation', u'ns-3 project', 'manual'),
# ('wifi-testing', 'wifi-doc-testing.tex', u'Wi-Fi Testing Documentation', u'ns-3 project', 'manual'),
# ('wifi-design', 'wifi-doc-design.tex', u'Wi-Fi Design Documentation', u'ns-3 project', 'manual'),
# ('wifi-user', 'wifi-doc-user.tex', u'Wi-Fi User Documentation', u'ns-3 project', 'manual'),
(
"wifi",
"wifi-module-doc.tex",
"The ns-3 Wi-Fi Module Documentation",
"ns-3 project",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# add page breaks in the pdf. Level 1 is for top-level sections, level 2 for subsections, and so on.
@ -217,7 +223,4 @@ pdf_break_level = 4
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
man_pages = [("index", "ns-3-model-library", "ns-3 Model Library", ["ns-3 project"], 1)]

View File

@ -16,113 +16,185 @@
#
# Authors: Hao Yin and Sebastien Deronne
#
import numpy as np
import math
import numpy as np
def bianchi_ax(data_rate, ack_rate, k, difs):
# Parameters for 11ax
nA = np.linspace(5, 50, 10)
CWmin = 15
CWmax = 1023
L_DATA = 1500 * 8 # data size in bits
L_ACK = 14 * 8 # ACK size in bits
#B = 1/(CWmin+1)
B=0
EP = L_DATA/(1-B)
T_GI = 800e-9 # guard interval in seconds
T_SYMBOL_ACK = 4e-6 # symbol duration in seconds (for ACK)
T_SYMBOL_DATA = 12.8e-6 + T_GI # symbol duration in seconds (for DATA)
T_PHY_ACK = 20e-6 # PHY preamble & header duration in seconds (for ACK)
T_PHY_DATA = 44e-6 # PHY preamble & header duration in seconds (for DATA)
L_SERVICE = 16 # service field length in bits
L_TAIL = 6 # tail length in bits
L_MAC = (30) * 8 # MAC header size in bits
L_APP_HDR = 8 * 8 # bits added by the upper layer(s)
L_DATA = 1500 * 8 # data size in bits
L_ACK = 14 * 8 # ACK size in bits
# B = 1/(CWmin+1)
B = 0
EP = L_DATA / (1 - B)
T_GI = 800e-9 # guard interval in seconds
T_SYMBOL_ACK = 4e-6 # symbol duration in seconds (for ACK)
T_SYMBOL_DATA = 12.8e-6 + T_GI # symbol duration in seconds (for DATA)
T_PHY_ACK = 20e-6 # PHY preamble & header duration in seconds (for ACK)
T_PHY_DATA = 44e-6 # PHY preamble & header duration in seconds (for DATA)
L_SERVICE = 16 # service field length in bits
L_TAIL = 6 # tail length in bits
L_MAC = (30) * 8 # MAC header size in bits
L_APP_HDR = 8 * 8 # bits added by the upper layer(s)
T_SIFS = 16e-6
T_DIFS = 34e-6
T_SLOT = 9e-6
delta = 1e-7
Aggregation_Type = 'A_MPDU' #A_MPDU or A_MSDU (HYBRID not fully supported)
Aggregation_Type = "A_MPDU" # A_MPDU or A_MSDU (HYBRID not fully supported)
K_MSDU = 1
K_MPDU = k
L_MPDU_HEADER = 4
L_MSDU_HEADER = 14 * 8
if (k <= 1):
Aggregation_Type = 'NONE'
if k <= 1:
Aggregation_Type = "NONE"
N_DBPS = data_rate * T_SYMBOL_DATA # number of data bits per OFDM symbol
N_DBPS = data_rate * T_SYMBOL_DATA # number of data bits per OFDM symbol
if (Aggregation_Type == 'NONE'):
N_SYMBOLS = math.ceil((L_SERVICE + (L_MAC + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS)
if Aggregation_Type == "NONE":
N_SYMBOLS = math.ceil((L_SERVICE + (L_MAC + L_DATA + L_APP_HDR) + L_TAIL) / N_DBPS)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
K_MPDU = 1
K_MSDU = 1
if (Aggregation_Type == 'A_MSDU'):
N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + K_MSDU*(L_MSDU_HEADER + L_DATA + L_APP_HDR)) + L_TAIL)/N_DBPS)
if Aggregation_Type == "A_MSDU":
N_SYMBOLS = math.ceil(
(
L_SERVICE
+ K_MPDU * (L_MAC + L_MPDU_HEADER + K_MSDU * (L_MSDU_HEADER + L_DATA + L_APP_HDR))
+ L_TAIL
)
/ N_DBPS
)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
if (Aggregation_Type == 'A_MPDU'):
N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS)
if Aggregation_Type == "A_MPDU":
N_SYMBOLS = math.ceil(
(L_SERVICE + K_MPDU * (L_MAC + L_MPDU_HEADER + L_DATA + L_APP_HDR) + L_TAIL) / N_DBPS
)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
#Calculate ACK Duration
N_DBPS = ack_rate * T_SYMBOL_ACK # number of data bits per OFDM symbol
N_SYMBOLS = math.ceil((L_SERVICE + L_ACK + L_TAIL)/N_DBPS)
# Calculate ACK Duration
N_DBPS = ack_rate * T_SYMBOL_ACK # number of data bits per OFDM symbol
N_SYMBOLS = math.ceil((L_SERVICE + L_ACK + L_TAIL) / N_DBPS)
T_ACK = T_PHY_ACK + (T_SYMBOL_ACK * N_SYMBOLS)
T_s = T_DATA + T_SIFS + T_ACK + T_DIFS
if difs == 1: #DIFS
if difs == 1: # DIFS
T_C = T_DATA + T_DIFS
else:
T_s = T_DATA + T_SIFS + T_ACK + T_DIFS + delta
T_C = T_DATA + T_DIFS + T_SIFS + T_ACK + delta
T_S = T_s/(1-B) + T_SLOT
T_S = T_s / (1 - B) + T_SLOT
S_bianchi = np.zeros(len(nA))
for j in range(len(nA)):
n = nA[j]*1
n = nA[j] * 1
W = CWmin + 1
m = math.log2((CWmax + 1)/(CWmin + 1))
m = math.log2((CWmax + 1) / (CWmin + 1))
tau1 = np.linspace(0, 0.1, 100000)
p = 1 - np.power((1 - tau1),(n - 1))
ps = p*0
p = 1 - np.power((1 - tau1), (n - 1))
ps = p * 0
for i in range(int(m)):
ps = ps + np.power(2*p, i)
ps = ps + np.power(2 * p, i)
taup = 2./(1 + W + p*W*ps)
taup = 2.0 / (1 + W + p * W * ps)
b = np.argmin(np.abs(tau1 - taup))
tau = taup[b]
Ptr = 1 - math.pow((1 - tau), int(n))
Ps = n*tau*math.pow((1 - tau), int(n-1))/Ptr
Ps = n * tau * math.pow((1 - tau), int(n - 1)) / Ptr
S_bianchi[j] = K_MSDU*K_MPDU*Ps*Ptr*EP/((1-Ptr)*T_SLOT+Ptr*Ps*T_S+Ptr*(1-Ps)*T_C)/1e6
S_bianchi[j] = (
K_MSDU
* K_MPDU
* Ps
* Ptr
* EP
/ ((1 - Ptr) * T_SLOT + Ptr * Ps * T_S + Ptr * (1 - Ps) * T_C)
/ 1e6
)
bianchi_result = S_bianchi
return bianchi_result
def str_result(bianchi_result, mcs, bw):
str_bianchi = ' {' + '\"HeMcs{:d}'.format(mcs) + '_{:d}MHz\"'.format(bw) + ', {\n'
for i in range (len(bianchi_result)):
str_tmp = ' {' + '{:d}, {:.4f}'.format(5*(i+1), bianchi_result[i]) +'},\n'
str_bianchi = str_bianchi + str_tmp
str_bianchi = " {" + '"HeMcs{:d}'.format(mcs) + '_{:d}MHz"'.format(bw) + ", {\n"
for i in range(len(bianchi_result)):
str_tmp = " {" + "{:d}, {:.4f}".format(5 * (i + 1), bianchi_result[i]) + "},\n"
str_bianchi = str_bianchi + str_tmp
str_bianchi = str_bianchi + " }},\n"
print(str_bianchi)
return str_bianchi
# Settings for different MCS and mode
data_rates_20MHz = [8.603e6, 17.206e6, 25.8e6, 34.4e6, 51.5e6, 68.8e6, 77.4e6, 86e6, 103.2e6, 114.7e6, 129e6, 143.4e6]
data_rates_20MHz = [
8.603e6,
17.206e6,
25.8e6,
34.4e6,
51.5e6,
68.8e6,
77.4e6,
86e6,
103.2e6,
114.7e6,
129e6,
143.4e6,
]
ack_rates_20MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_40MHz = [17.2e6, 34.4e6, 51.5e6, 68.8e6, 103.2e6, 137.6e6, 154.9e6, 172.1e6, 206.5e6, 229.4e6, 258.1e6, 286.8e6]
data_rates_40MHz = [
17.2e6,
34.4e6,
51.5e6,
68.8e6,
103.2e6,
137.6e6,
154.9e6,
172.1e6,
206.5e6,
229.4e6,
258.1e6,
286.8e6,
]
ack_rates_40MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_80MHz = [36e6, 72.1e6, 108.1e6, 144.1e6, 216.2e6, 288.2e6, 324.3e6, 360.3e6, 432.4e6, 480.4e6, 540.4e6, 600.5e6]
data_rates_80MHz = [
36e6,
72.1e6,
108.1e6,
144.1e6,
216.2e6,
288.2e6,
324.3e6,
360.3e6,
432.4e6,
480.4e6,
540.4e6,
600.5e6,
]
ack_rates_80MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_160MHz = [72.1e6, 144.1e6, 216.2e6, 288.2e6, 432.4e6, 576.5e6, 648.5e6, 720.6e6, 864.7e6, 960.8e6, 1080.9e6, 1201e6]
data_rates_160MHz = [
72.1e6,
144.1e6,
216.2e6,
288.2e6,
432.4e6,
576.5e6,
648.5e6,
720.6e6,
864.7e6,
960.8e6,
1080.9e6,
1201e6,
]
ack_rates_160MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
# Generate results with frame aggregation disabled

File diff suppressed because it is too large Load Diff

734
test.py

File diff suppressed because it is too large Load Diff

View File

@ -8,53 +8,51 @@ import os
def get_list_from_file(file_path, list_name):
'''Looks for a Python list called list_name in the file specified
"""Looks for a Python list called list_name in the file specified
by file_path and returns it.
If the file or list name aren't found, this function will return
an empty list.
'''
"""
# Read in the file if it exists.
if not os.path.exists(file_path):
return []
with open(file_path, "r", encoding="utf-8") as file_in:
# Look for the list.
list_string = ""
parsing_multiline_list = False
for line in file_in:
# Remove any comments.
if '#' in line:
(line, comment) = line.split('#', 1)
if "#" in line:
(line, comment) = line.split("#", 1)
# Parse the line.
if list_name in line or parsing_multiline_list:
list_string += line
# Handle multiline lists.
if ']' not in list_string:
if "]" not in list_string:
parsing_multiline_list = True
else:
# Evaluate the list once its end is reached.
# Make the split function only split it once.
return eval(list_string.split('=', 1)[1].strip())
return eval(list_string.split("=", 1)[1].strip())
# List name was not found
return []
def get_bool_from_file(file_path, bool_name, value_if_missing):
'''Looks for a Python boolean variable called bool_name in the
"""Looks for a Python boolean variable called bool_name in the
file specified by file_path and returns its value.
If the file or boolean variable aren't found, this function will
return value_if_missing.
'''
"""
# Read in the file if it exists.
if not os.path.exists(file_path):
@ -63,16 +61,15 @@ def get_bool_from_file(file_path, bool_name, value_if_missing):
with open(file_path, "r", encoding="utf-8") as file_in:
# Look for the boolean variable.
for line in file_in:
# Remove any comments.
if '#' in line:
(line, comment) = line.split('#', 1)
if "#" in line:
(line, comment) = line.split("#", 1)
# Parse the line.
if bool_name in line:
# Evaluate the variable's line once it is found. Make
# the split function only split it once.
return eval(line.split('=', 1)[1].strip())
return eval(line.split("=", 1)[1].strip())
# Boolean variable was not found
return value_if_missing
@ -85,17 +82,17 @@ def get_bool_from_file(file_path, bool_name, value_if_missing):
def read_config_file():
# By default, all modules will be enabled, examples will be disabled,
# and tests will be disabled.
modules_enabled = ['all_modules']
modules_enabled = ["all_modules"]
examples_enabled = False
tests_enabled = False
tests_enabled = False
# See if the ns3 configuration file exists in the current working
# directory and then look for it in the ~ directory.
config_file_exists = False
dot_ns3rc_name = '.ns3rc'
dot_ns3rc_name = ".ns3rc"
dot_ns3rc_path = dot_ns3rc_name
if not os.path.exists(dot_ns3rc_path):
dot_ns3rc_path = os.path.expanduser('~/') + dot_ns3rc_name
dot_ns3rc_path = os.path.expanduser("~/") + dot_ns3rc_name
if not os.path.exists(dot_ns3rc_path):
# Return all of the default values if the .ns3rc file can't be found.
return (config_file_exists, modules_enabled, examples_enabled, tests_enabled)
@ -103,17 +100,17 @@ def read_config_file():
config_file_exists = True
# Read in the enabled modules.
modules_enabled = get_list_from_file(dot_ns3rc_path, 'modules_enabled')
modules_enabled = get_list_from_file(dot_ns3rc_path, "modules_enabled")
if not modules_enabled:
# Enable all modules if the modules_enabled line can't be found.
modules_enabled = ['all_modules']
modules_enabled = ["all_modules"]
# Read in whether examples should be enabled or not.
value_if_missing = False
examples_enabled = get_bool_from_file(dot_ns3rc_path, 'examples_enabled', value_if_missing)
examples_enabled = get_bool_from_file(dot_ns3rc_path, "examples_enabled", value_if_missing)
# Read in whether tests should be enabled or not.
value_if_missing = False
tests_enabled = get_bool_from_file(dot_ns3rc_path, 'tests_enabled', value_if_missing)
tests_enabled = get_bool_from_file(dot_ns3rc_path, "tests_enabled", value_if_missing)
return (config_file_exists, modules_enabled, examples_enabled, tests_enabled)

View File

@ -42,7 +42,6 @@ import re
import shutil
import subprocess
import sys
from typing import Callable, Dict, List, Tuple
###########################################################
@ -55,76 +54,76 @@ CLANG_FORMAT_VERSIONS = [
14,
]
CLANG_FORMAT_GUARD_ON = '// clang-format on'
CLANG_FORMAT_GUARD_OFF = '// clang-format off'
CLANG_FORMAT_GUARD_ON = "// clang-format on"
CLANG_FORMAT_GUARD_OFF = "// clang-format off"
DIRECTORIES_TO_SKIP = [
'__pycache__',
'.git',
'bindings',
'build',
'cmake-cache',
'testpy-output',
"__pycache__",
".git",
"bindings",
"build",
"cmake-cache",
"testpy-output",
]
# List of files entirely copied from elsewhere that should not be checked,
# in order to optimize the performance of this script
FILES_TO_SKIP = [
'valgrind.h',
"valgrind.h",
]
FILE_EXTENSIONS_TO_CHECK_FORMATTING = [
'.c',
'.cc',
'.h',
".c",
".cc",
".h",
]
FILE_EXTENSIONS_TO_CHECK_INCLUDE_PREFIXES = FILE_EXTENSIONS_TO_CHECK_FORMATTING
FILE_EXTENSIONS_TO_CHECK_WHITESPACE = [
'.c',
'.cc',
'.click',
'.cmake',
'.conf',
'.css',
'.dot',
'.gnuplot',
'.gp',
'.h',
'.html',
'.js',
'.json',
'.m',
'.md',
'.mob',
'.ns_params',
'.ns_movements',
'.params',
'.pl',
'.plt',
'.py',
'.rst',
'.seqdiag',
'.sh',
'.txt',
'.yml',
".c",
".cc",
".click",
".cmake",
".conf",
".css",
".dot",
".gnuplot",
".gp",
".h",
".html",
".js",
".json",
".m",
".md",
".mob",
".ns_params",
".ns_movements",
".params",
".pl",
".plt",
".py",
".rst",
".seqdiag",
".sh",
".txt",
".yml",
]
FILES_TO_CHECK_WHITESPACE = [
'Makefile',
'ns3',
"Makefile",
"ns3",
]
FILE_EXTENSIONS_TO_CHECK_TABS = [
'.c',
'.cc',
'.h',
'.md',
'.py',
'.rst',
'.sh',
'.yml',
".c",
".cc",
".h",
".md",
".py",
".rst",
".sh",
".yml",
]
TAB_SIZE = 4
@ -142,14 +141,16 @@ def should_analyze_directory(dirpath: str) -> bool:
_, directory = os.path.split(dirpath)
return not (directory in DIRECTORIES_TO_SKIP or
(directory.startswith('.') and directory != '.'))
return not (
directory in DIRECTORIES_TO_SKIP or (directory.startswith(".") and directory != ".")
)
def should_analyze_file(path: str,
files_to_check: List[str],
file_extensions_to_check: List[str],
) -> bool:
def should_analyze_file(
path: str,
files_to_check: List[str],
file_extensions_to_check: List[str],
) -> bool:
"""
Check whether a file should be analyzed.
@ -166,11 +167,12 @@ def should_analyze_file(path: str,
basename, extension = os.path.splitext(filename)
return (basename in files_to_check or
extension in file_extensions_to_check)
return basename in files_to_check or extension in file_extensions_to_check
def find_files_to_check_style(paths: List[str]) -> Tuple[List[str], List[str], List[str], List[str]]:
def find_files_to_check_style(
paths: List[str],
) -> Tuple[List[str], List[str], List[str], List[str]]:
"""
Find all files to be checked in a given list of paths.
@ -199,7 +201,7 @@ def find_files_to_check_style(paths: List[str]) -> Tuple[List[str], List[str], L
files_to_check.extend([os.path.join(dirpath, f) for f in filenames])
else:
raise ValueError(f'Error: {path} is not a file nor a directory')
raise ValueError(f"Error: {path} is not a file nor a directory")
files_to_check.sort()
@ -239,47 +241,48 @@ def find_clang_format_path() -> str:
# Find exact version
for version in CLANG_FORMAT_VERSIONS:
clang_format_path = shutil.which(f'clang-format-{version}')
clang_format_path = shutil.which(f"clang-format-{version}")
if clang_format_path:
return clang_format_path
# Find default version and check if it is supported
clang_format_path = shutil.which('clang-format')
clang_format_path = shutil.which("clang-format")
if clang_format_path:
process = subprocess.run(
[clang_format_path, '--version'],
[clang_format_path, "--version"],
capture_output=True,
text=True,
check=True,
)
version = process.stdout.strip().split(' ')[-1]
major_version = int(version.split('.')[0])
version = process.stdout.strip().split(" ")[-1]
major_version = int(version.split(".")[0])
if major_version in CLANG_FORMAT_VERSIONS:
return clang_format_path
# No supported version of clang-format found
raise RuntimeError(
f'Could not find any supported version of clang-format installed on this system. '
f'List of supported versions: {CLANG_FORMAT_VERSIONS}.'
f"Could not find any supported version of clang-format installed on this system. "
f"List of supported versions: {CLANG_FORMAT_VERSIONS}."
)
###########################################################
# CHECK STYLE MAIN FUNCTIONS
###########################################################
def check_style_clang_format(paths: List[str],
enable_check_include_prefixes: bool,
enable_check_formatting: bool,
enable_check_whitespace: bool,
enable_check_tabs: bool,
fix: bool,
verbose: bool,
n_jobs: int = 1,
) -> bool:
def check_style_clang_format(
paths: List[str],
enable_check_include_prefixes: bool,
enable_check_formatting: bool,
enable_check_whitespace: bool,
enable_check_tabs: bool,
fix: bool,
verbose: bool,
n_jobs: int = 1,
) -> bool:
"""
Check / fix the coding style of a list of files.
@ -294,10 +297,12 @@ def check_style_clang_format(paths: List[str],
@return Whether all files are compliant with all enabled style checks.
"""
(files_to_check_include_prefixes,
files_to_check_formatting,
files_to_check_whitespace,
files_to_check_tabs) = find_files_to_check_style(paths)
(
files_to_check_include_prefixes,
files_to_check_formatting,
files_to_check_whitespace,
files_to_check_tabs,
) = find_files_to_check_style(paths)
check_include_prefixes_successful = True
check_formatting_successful = True
@ -316,11 +321,11 @@ def check_style_clang_format(paths: List[str],
check_style_line_function=check_include_prefixes_line,
)
print('')
print("")
if enable_check_formatting:
check_formatting_successful = check_style_files(
'bad code formatting',
"bad code formatting",
check_formatting_file,
files_to_check_formatting,
fix,
@ -329,11 +334,11 @@ def check_style_clang_format(paths: List[str],
clang_format_path=find_clang_format_path(),
)
print('')
print("")
if enable_check_whitespace:
check_whitespace_successful = check_style_files(
'trailing whitespace',
"trailing whitespace",
check_manually_file,
files_to_check_whitespace,
fix,
@ -343,11 +348,11 @@ def check_style_clang_format(paths: List[str],
check_style_line_function=check_whitespace_line,
)
print('')
print("")
if enable_check_tabs:
check_tabs_successful = check_style_files(
'tabs',
"tabs",
check_manually_file,
files_to_check_tabs,
fix,
@ -357,22 +362,25 @@ def check_style_clang_format(paths: List[str],
check_style_line_function=check_tabs_line,
)
return all([
check_include_prefixes_successful,
check_formatting_successful,
check_whitespace_successful,
check_tabs_successful,
])
return all(
[
check_include_prefixes_successful,
check_formatting_successful,
check_whitespace_successful,
check_tabs_successful,
]
)
def check_style_files(style_check_str: str,
check_style_file_function: Callable[..., Tuple[str, bool, List[str]]],
filenames: List[str],
fix: bool,
verbose: bool,
n_jobs: int,
**kwargs,
) -> bool:
def check_style_files(
style_check_str: str,
check_style_file_function: Callable[..., Tuple[str, bool, List[str]]],
filenames: List[str],
fix: bool,
verbose: bool,
n_jobs: int,
**kwargs,
) -> bool:
"""
Check / fix style of a list of files.
@ -399,7 +407,7 @@ def check_style_files(style_check_str: str,
*[arg if isinstance(arg, list) else itertools.repeat(arg) for arg in kwargs.values()],
)
for (filename, is_file_compliant, verbose_infos) in non_compliant_files_results:
for filename, is_file_compliant, verbose_infos in non_compliant_files_results:
if not is_file_compliant:
non_compliant_files.append(filename)
@ -408,22 +416,22 @@ def check_style_files(style_check_str: str,
# Output results
if not non_compliant_files:
print(f'- No files detected with {style_check_str}')
print(f"- No files detected with {style_check_str}")
return True
else:
n_non_compliant_files = len(non_compliant_files)
if fix:
print(f'- Fixed {style_check_str} in the files ({n_non_compliant_files}):')
print(f"- Fixed {style_check_str} in the files ({n_non_compliant_files}):")
else:
print(f'- Detected {style_check_str} in the files ({n_non_compliant_files}):')
print(f"- Detected {style_check_str} in the files ({n_non_compliant_files}):")
for f in non_compliant_files:
if verbose:
print(*[f' {l}' for l in files_verbose_infos[f]], sep='\n')
print(*[f" {l}" for l in files_verbose_infos[f]], sep="\n")
else:
print(f' - {f}')
print(f" - {f}")
# If all files were fixed, there are no more non-compliant files
return fix
@ -432,11 +440,12 @@ def check_style_files(style_check_str: str,
###########################################################
# CHECK STYLE FUNCTIONS
###########################################################
def check_formatting_file(filename: str,
fix: bool,
verbose: bool,
clang_format_path: str,
) -> Tuple[str, bool, List[str]]:
def check_formatting_file(
filename: str,
fix: bool,
verbose: bool,
clang_format_path: str,
) -> Tuple[str, bool, List[str]]:
"""
Check / fix the coding style of a file with clang-format.
@ -456,18 +465,18 @@ def check_formatting_file(filename: str,
[
clang_format_path,
filename,
'-style=file',
'--dry-run',
'--Werror',
"-style=file",
"--dry-run",
"--Werror",
# Optimization: In non-verbose mode, only one error is needed to check that the file is not compliant
f'--ferror-limit={0 if verbose else 1}',
f"--ferror-limit={0 if verbose else 1}",
],
check=False,
capture_output=True,
text=True,
)
is_file_compliant = (process.returncode == 0)
is_file_compliant = process.returncode == 0
if verbose:
verbose_infos = process.stderr.splitlines()
@ -478,8 +487,8 @@ def check_formatting_file(filename: str,
[
clang_format_path,
filename,
'-style=file',
'-i',
"-style=file",
"-i",
],
check=False,
stdout=subprocess.DEVNULL,
@ -489,12 +498,13 @@ def check_formatting_file(filename: str,
return (filename, is_file_compliant, verbose_infos)
def check_manually_file(filename: str,
fix: bool,
verbose: bool,
respect_clang_format_guards: bool,
check_style_line_function: Callable[[str, str, int], Tuple[bool, str, List[str]]],
) -> Tuple[str, bool, List[str]]:
def check_manually_file(
filename: str,
fix: bool,
verbose: bool,
respect_clang_format_guards: bool,
check_style_line_function: Callable[[str, str, int], Tuple[bool, str, List[str]]],
) -> Tuple[str, bool, List[str]]:
"""
Check / fix a file manually using a function to check / fix each line.
@ -512,11 +522,10 @@ def check_manually_file(filename: str,
verbose_infos: List[str] = []
clang_format_enabled = True
with open(filename, 'r', encoding='utf-8') as f:
with open(filename, "r", encoding="utf-8") as f:
file_lines = f.readlines()
for (i, line) in enumerate(file_lines):
for i, line in enumerate(file_lines):
# Check clang-format guards
if respect_clang_format_guards:
line_stripped = line.strip()
@ -526,12 +535,16 @@ def check_manually_file(filename: str,
elif line_stripped == CLANG_FORMAT_GUARD_OFF:
clang_format_enabled = False
if (not clang_format_enabled and
line_stripped not in (CLANG_FORMAT_GUARD_ON, CLANG_FORMAT_GUARD_OFF)):
if not clang_format_enabled and line_stripped not in (
CLANG_FORMAT_GUARD_ON,
CLANG_FORMAT_GUARD_OFF,
):
continue
# Check if the line is compliant with the style and fix it
(is_line_compliant, line_fixed, line_verbose_infos) = check_style_line_function(line, filename, i)
(is_line_compliant, line_fixed, line_verbose_infos) = check_style_line_function(
line, filename, i
)
if not is_line_compliant:
is_file_compliant = False
@ -544,16 +557,17 @@ def check_manually_file(filename: str,
# Update file with the fixed lines
if fix and not is_file_compliant:
with open(filename, 'w', encoding='utf-8') as f:
with open(filename, "w", encoding="utf-8") as f:
f.writelines(file_lines)
return (filename, is_file_compliant, verbose_infos)
def check_include_prefixes_line(line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
def check_include_prefixes_line(
line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
"""
Check / fix #include headers from the same module with the "ns3/" prefix in a line.
@ -580,24 +594,31 @@ def check_include_prefixes_line(line: str,
if os.path.exists(os.path.join(parent_path, header_file)):
is_line_compliant = False
line_fixed = line_stripped.replace(
f'ns3/{header_file}', header_file).replace('<', '"').replace('>', '"') + '\n'
line_fixed = (
line_stripped.replace(f"ns3/{header_file}", header_file)
.replace("<", '"')
.replace(">", '"')
+ "\n"
)
header_index = len('#include "')
verbose_infos.extend([
f'{filename}:{line_number + 1}:{header_index + 1}: error: #include headers from the same module with the "ns3/" prefix detected',
f' {line_stripped}',
f' {"":{header_index}}^',
])
verbose_infos.extend(
[
f'{filename}:{line_number + 1}:{header_index + 1}: error: #include headers from the same module with the "ns3/" prefix detected',
f" {line_stripped}",
f' {"":{header_index}}^',
]
)
return (is_line_compliant, line_fixed, verbose_infos)
def check_whitespace_line(line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
def check_whitespace_line(
line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
"""
Check / fix whitespace in a line.
@ -610,7 +631,7 @@ def check_whitespace_line(line: str,
"""
is_line_compliant = True
line_fixed = line.rstrip() + '\n'
line_fixed = line.rstrip() + "\n"
verbose_infos: List[str] = []
if line_fixed != line:
@ -618,18 +639,19 @@ def check_whitespace_line(line: str,
line_fixed_stripped_expanded = line_fixed.rstrip().expandtabs(TAB_SIZE)
verbose_infos = [
f'{filename}:{line_number + 1}:{len(line_fixed_stripped_expanded) + 1}: error: Trailing whitespace detected',
f' {line_fixed_stripped_expanded}',
f"{filename}:{line_number + 1}:{len(line_fixed_stripped_expanded) + 1}: error: Trailing whitespace detected",
f" {line_fixed_stripped_expanded}",
f' {"":{len(line_fixed_stripped_expanded)}}^',
]
return (is_line_compliant, line_fixed, verbose_infos)
def check_tabs_line(line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
def check_tabs_line(
line: str,
filename: str,
line_number: int,
) -> Tuple[bool, str, List[str]]:
"""
Check / fix tabs in a line.
@ -645,15 +667,15 @@ def check_tabs_line(line: str,
line_fixed = line
verbose_infos: List[str] = []
tab_index = line.find('\t')
tab_index = line.find("\t")
if tab_index != -1:
is_line_compliant = False
line_fixed = line.expandtabs(TAB_SIZE)
verbose_infos = [
f'{filename}:{line_number + 1}:{tab_index + 1}: error: Tab detected',
f' {line.rstrip()}',
f"{filename}:{line_number + 1}:{tab_index + 1}: error: Tab detected",
f" {line.rstrip()}",
f' {"":{tab_index}}^',
]
@ -663,42 +685,71 @@ def check_tabs_line(line: str,
###########################################################
# MAIN
###########################################################
if __name__ == '__main__':
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Check and apply the ns-3 coding style recursively to all files in the given PATHs. '
'The script checks the formatting of the file with clang-format. '
description="Check and apply the ns-3 coding style recursively to all files in the given PATHs. "
"The script checks the formatting of the file with clang-format. "
'Additionally, it checks #include headers from the same module with the "ns3/" prefix, '
'the presence of trailing whitespace and tabs. '
"the presence of trailing whitespace and tabs. "
'Formatting, local #include "ns3/" prefixes and tabs checks respect clang-format guards. '
'When used in "check mode" (default), the script checks if all files are well '
'formatted and do not have trailing whitespace nor tabs. '
'If it detects non-formatted files, they will be printed and this process exits with a '
'non-zero code. When used in "fix mode", this script automatically fixes the files.')
"formatted and do not have trailing whitespace nor tabs. "
"If it detects non-formatted files, they will be printed and this process exits with a "
'non-zero code. When used in "fix mode", this script automatically fixes the files.'
)
parser.add_argument('paths', action='store', type=str, nargs='+',
help='List of paths to the files to check',)
parser.add_argument(
"paths",
action="store",
type=str,
nargs="+",
help="List of paths to the files to check",
)
parser.add_argument('--no-include-prefixes', action='store_true',
help='Do not check / fix #include headers from the same module with the "ns3/" prefix',)
parser.add_argument(
"--no-include-prefixes",
action="store_true",
help='Do not check / fix #include headers from the same module with the "ns3/" prefix',
)
parser.add_argument('--no-formatting', action='store_true',
help='Do not check / fix code formatting',)
parser.add_argument(
"--no-formatting",
action="store_true",
help="Do not check / fix code formatting",
)
parser.add_argument('--no-whitespace', action='store_true',
help='Do not check / fix trailing whitespace',)
parser.add_argument(
"--no-whitespace",
action="store_true",
help="Do not check / fix trailing whitespace",
)
parser.add_argument('--no-tabs', action='store_true',
help='Do not check / fix tabs',)
parser.add_argument(
"--no-tabs",
action="store_true",
help="Do not check / fix tabs",
)
parser.add_argument('--fix', action='store_true',
help='Fix coding style issues detected in the files',)
parser.add_argument(
"--fix",
action="store_true",
help="Fix coding style issues detected in the files",
)
parser.add_argument('-v', '--verbose', action='store_true',
help='Show the lines that are not well-formatted',)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Show the lines that are not well-formatted",
)
parser.add_argument('-j', '--jobs', type=int, default=max(1, os.cpu_count() - 1),
help='Number of parallel jobs',)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=max(1, os.cpu_count() - 1),
help="Number of parallel jobs",
)
args = parser.parse_args()

View File

@ -1,13 +1,12 @@
#! /usr/bin/env python3
import sys
import argparse
import os
import re
import shutil
import sys
from pathlib import Path
CMAKELISTS_TEMPLATE = '''\
CMAKELISTS_TEMPLATE = """\
check_include_file_cxx(stdint.h HAVE_STDINT_H)
if(HAVE_STDINT_H)
add_definitions(-DHAVE_STDINT_H)
@ -30,10 +29,10 @@ build_lib(
TEST_SOURCES test/{MODULE}-test-suite.cc
${{examples_as_tests_sources}}
)
'''
"""
MODEL_CC_TEMPLATE = '''\
MODEL_CC_TEMPLATE = """\
#include "{MODULE}.h"
namespace ns3
@ -42,10 +41,10 @@ namespace ns3
/* ... */
}}
'''
"""
MODEL_H_TEMPLATE = '''\
MODEL_H_TEMPLATE = """\
#ifndef {INCLUDE_GUARD}
#define {INCLUDE_GUARD}
@ -66,10 +65,10 @@ namespace ns3
}}
#endif /* {INCLUDE_GUARD} */
'''
"""
HELPER_CC_TEMPLATE = '''\
HELPER_CC_TEMPLATE = """\
#include "{MODULE}-helper.h"
namespace ns3
@ -78,10 +77,10 @@ namespace ns3
/* ... */
}}
'''
"""
HELPER_H_TEMPLATE = '''\
HELPER_H_TEMPLATE = """\
#ifndef {INCLUDE_GUARD}
#define {INCLUDE_GUARD}
@ -98,18 +97,18 @@ namespace ns3
}}
#endif /* {INCLUDE_GUARD} */
'''
"""
EXAMPLES_CMAKELISTS_TEMPLATE = '''\
EXAMPLES_CMAKELISTS_TEMPLATE = """\
build_lib_example(
NAME {MODULE}-example
SOURCE_FILES {MODULE}-example.cc
LIBRARIES_TO_LINK ${{lib{MODULE}}}
)
'''
"""
EXAMPLE_CC_TEMPLATE = '''\
EXAMPLE_CC_TEMPLATE = """\
#include "ns3/core-module.h"
#include "ns3/{MODULE}-helper.h"
@ -137,10 +136,10 @@ main(int argc, char* argv[])
Simulator::Destroy();
return 0;
}}
'''
"""
TEST_CC_TEMPLATE = '''\
TEST_CC_TEMPLATE = """\
// Include a header file from your module to test.
#include "ns3/{MODULE}.h"
@ -227,10 +226,10 @@ class {CAPITALIZED}TestSuite : public TestSuite
* Static variable for test initialization
*/
static {CAPITALIZED}TestSuite s{COMPOUND}TestSuite;
'''
"""
DOC_RST_TEMPLATE = '''Example Module Documentation
DOC_RST_TEMPLATE = """Example Module Documentation
----------------------------
.. include:: replace.txt
@ -328,18 +327,19 @@ Validation
Describe how the model has been tested/validated. What tests run in the
test suite? How much API and code is covered by the tests? Again,
references to outside published work may help here.
'''
"""
def create_file(path, template, **kwargs):
artifact_path = Path(path)
#open file for (w)rite and in (t)ext mode
# open file for (w)rite and in (t)ext mode
with artifact_path.open("wt", encoding="utf-8") as f:
f.write(template.format(**kwargs))
def make_cmakelists(moduledir, modname):
path = Path(moduledir, 'CMakeLists.txt')
path = Path(moduledir, "CMakeLists.txt")
macro = "build_lib"
create_file(path, CMAKELISTS_TEMPLATE, MODULE=modname)
@ -350,14 +350,12 @@ def make_model(moduledir, modname):
modelpath = Path(moduledir, "model")
modelpath.mkdir(parents=True)
srcfile_path = modelpath.joinpath(modname).with_suffix('.cc')
srcfile_path = modelpath.joinpath(modname).with_suffix(".cc")
create_file(srcfile_path, MODEL_CC_TEMPLATE, MODULE=modname)
hfile_path = modelpath.joinpath(modname).with_suffix('.h')
guard = "{}_H".format(modname.replace('-', '_').upper())
create_file(hfile_path, MODEL_H_TEMPLATE,
MODULE=modname,
INCLUDE_GUARD=guard)
hfile_path = modelpath.joinpath(modname).with_suffix(".h")
guard = "{}_H".format(modname.replace("-", "_").upper())
create_file(hfile_path, MODEL_H_TEMPLATE, MODULE=modname, INCLUDE_GUARD=guard)
return True
@ -366,11 +364,17 @@ def make_test(moduledir, modname):
testpath = Path(moduledir, "test")
testpath.mkdir(parents=True)
file_path = testpath.joinpath(modname+'-test-suite').with_suffix('.cc')
name_parts = modname.split('-')
create_file(file_path, TEST_CC_TEMPLATE, MODULE=modname,
CAPITALIZED=''.join([word.capitalize() for word in name_parts]),
COMPOUND=''.join([word.capitalize() if index > 0 else word for index, word in enumerate(name_parts)]))
file_path = testpath.joinpath(modname + "-test-suite").with_suffix(".cc")
name_parts = modname.split("-")
create_file(
file_path,
TEST_CC_TEMPLATE,
MODULE=modname,
CAPITALIZED="".join([word.capitalize() for word in name_parts]),
COMPOUND="".join(
[word.capitalize() if index > 0 else word for index, word in enumerate(name_parts)]
),
)
return True
@ -379,11 +383,11 @@ def make_helper(moduledir, modname):
helperpath = Path(moduledir, "helper")
helperpath.mkdir(parents=True)
srcfile_path = helperpath.joinpath(modname+'-helper').with_suffix('.cc')
srcfile_path = helperpath.joinpath(modname + "-helper").with_suffix(".cc")
create_file(srcfile_path, HELPER_CC_TEMPLATE, MODULE=modname)
h_file_path = helperpath.joinpath(modname+'-helper').with_suffix('.h')
guard = "{}_HELPER_H".format(modname.replace('-', '_').upper())
h_file_path = helperpath.joinpath(modname + "-helper").with_suffix(".h")
guard = "{}_HELPER_H".format(modname.replace("-", "_").upper())
create_file(h_file_path, HELPER_H_TEMPLATE, MODULE=modname, INCLUDE_GUARD=guard)
return True
@ -393,10 +397,10 @@ def make_examples(moduledir, modname):
examplespath = Path(moduledir, "examples")
examplespath.mkdir(parents=True)
cmakelistspath = Path(examplespath, 'CMakeLists.txt')
cmakelistspath = Path(examplespath, "CMakeLists.txt")
create_file(cmakelistspath, EXAMPLES_CMAKELISTS_TEMPLATE, MODULE=modname)
examplesfile_path = examplespath.joinpath(modname+'-example').with_suffix('.cc')
examplesfile_path = examplespath.joinpath(modname + "-example").with_suffix(".cc")
create_file(examplesfile_path, EXAMPLE_CC_TEMPLATE, MODULE=modname)
return True
@ -406,11 +410,11 @@ def make_doc(moduledir, modname):
docpath = Path(moduledir, "doc")
docpath.mkdir(parents=True)
#the module_dir template parameter must be a relative path
#instead of an absolute path
# the module_dir template parameter must be a relative path
# instead of an absolute path
mod_relpath = os.path.relpath(str(moduledir))
file_name = '{}.rst'.format(modname)
file_name = "{}.rst".format(modname)
file_path = Path(docpath, file_name)
create_file(file_path, DOC_RST_TEMPLATE, MODULE=modname, MODULE_DIR=mod_relpath)
@ -426,8 +430,7 @@ def make_module(modpath, modname):
print("Creating module {}".format(modulepath))
functions = (make_cmakelists, make_model, make_test,
make_helper, make_examples, make_doc)
functions = (make_cmakelists, make_model, make_test, make_helper, make_examples, make_doc)
try:
modulepath.mkdir(parents=True)
@ -447,6 +450,7 @@ def make_module(modpath, modname):
return True
def create_argument_parser():
description = """Generate scaffolding for ns-3 modules
@ -525,25 +529,36 @@ project directory.
formatter = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description=description,
epilog=epilog,
formatter_class=formatter)
parser = argparse.ArgumentParser(
description=description, epilog=epilog, formatter_class=formatter
)
parser.add_argument('--project', default='',
help=("Specify a relative path under the contrib directory "
"where the new modules will be generated. The path "
"will be created if it does not exist."))
parser.add_argument(
"--project",
default="",
help=(
"Specify a relative path under the contrib directory "
"where the new modules will be generated. The path "
"will be created if it does not exist."
),
)
parser.add_argument('modnames', nargs='+',
help=("One or more modules to generate. Module names "
"are limited to the following: letters, numbers, -, "
"_. Modules are generated under the contrib directory "
"except when the module name starts with src/. Modules "
"that start with src/ are generated under the src "
"directory."))
parser.add_argument(
"modnames",
nargs="+",
help=(
"One or more modules to generate. Module names "
"are limited to the following: letters, numbers, -, "
"_. Modules are generated under the contrib directory "
"except when the module name starts with src/. Modules "
"that start with src/ are generated under the src "
"directory."
),
)
return parser
def main(argv):
parser = create_argument_parser()
@ -554,46 +569,47 @@ def main(argv):
base_path = Path.cwd()
src_path = base_path.joinpath('src')
contrib_path = base_path.joinpath('contrib')
src_path = base_path.joinpath("src")
contrib_path = base_path.joinpath("contrib")
for p in (src_path, contrib_path):
if not p.is_dir():
parser.error("Cannot find the directory '{}'.\nPlease run this "
"script from the top level of the ns3 directory".format(
p))
parser.error(
"Cannot find the directory '{}'.\nPlease run this "
"script from the top level of the ns3 directory".format(p)
)
#
# Error check the arguments
#
# Alphanumeric and '-' only
allowedRE = re.compile('^(\w|-)+$')
allowedRE = re.compile("^(\w|-)+$")
project_path = None
if project:
#project may be a path in the form a/b/c
#remove any leading or trailing path separators
# project may be a path in the form a/b/c
# remove any leading or trailing path separators
project_path = Path(project)
if project_path.is_absolute():
#remove leading separator
# remove leading separator
project_path = project_path.relative_to(os.sep)
if not all(allowedRE.match(part) for part in project_path.parts):
parser.error('Project path may only contain the characters [a-zA-Z0-9_-].')
parser.error("Project path may only contain the characters [a-zA-Z0-9_-].")
#
# Create each module, if it doesn't exist
#
modules = []
for name in modnames:
if name:
#remove any leading or trailing directory separators
# remove any leading or trailing directory separators
name = name.strip(os.sep)
if not name:
#skip empty modules
# skip empty modules
continue
name_path = Path(name)
@ -602,33 +618,41 @@ def main(argv):
print("Skipping {}: module name can not be a path".format(name))
continue
#default target directory is contrib
# default target directory is contrib
modpath = contrib_path
if name_path.parts[0] == 'src':
if name_path.parts[0] == "src":
if project:
parser.error("{}: Cannot specify src/ in a module name when --project option is used".format(name))
parser.error(
"{}: Cannot specify src/ in a module name when --project option is used".format(
name
)
)
modpath = src_path
#create a new path without the src part
name_path = name_path.relative_to('src')
# create a new path without the src part
name_path = name_path.relative_to("src")
elif name_path.parts[0] == 'contrib':
elif name_path.parts[0] == "contrib":
modpath = contrib_path
#create a new path without the contrib part
name_path = name_path.relative_to('contrib')
# create a new path without the contrib part
name_path = name_path.relative_to("contrib")
if project_path:
#if a project path was specified, that overrides other paths
#project paths are always relative to the contrib path
# if a project path was specified, that overrides other paths
# project paths are always relative to the contrib path
modpath = contrib_path.joinpath(project_path)
modname = name_path.parts[0]
if not allowedRE.match(modname):
print("Skipping {}: module name may only contain the characters [a-zA-Z0-9_-]".format(modname))
print(
"Skipping {}: module name may only contain the characters [a-zA-Z0-9_-]".format(
modname
)
)
continue
modules.append((modpath, modname))
@ -640,7 +664,8 @@ def main(argv):
return 0
if __name__ == '__main__':
if __name__ == "__main__":
return_value = 0
try:
return_value = main(sys.argv)

File diff suppressed because it is too large Load Diff

View File

@ -19,6 +19,7 @@
# Author: Gustavo J. A. M. Carneiro <gjc@inescporto.pt>
import unittest
try:
from ns import ns
except ModuleNotFoundError:
@ -60,12 +61,14 @@ class TestSimulator(unittest.TestCase):
ns.Simulator.Destroy()
self._args_received = None
self._cb_time = None
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
EventImpl* pythonMakeEvent(void (*f)(std::vector<std::string>), std::vector<std::string> l)
{
return MakeEvent(f, l);
}
""")
"""
)
event = ns.cppyy.gbl.pythonMakeEvent(callback, sys.argv)
ns.Simulator.ScheduleNow(event)
ns.Simulator.Run()
@ -89,12 +92,14 @@ class TestSimulator(unittest.TestCase):
ns.Simulator.Destroy()
self._args_received = None
self._cb_time = None
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
EventImpl* pythonMakeEvent2(void (*f)(std::vector<std::string>), std::vector<std::string> l)
{
return MakeEvent(f, l);
}
""")
"""
)
event = ns.cppyy.gbl.pythonMakeEvent2(callback, sys.argv)
ns.Simulator.Schedule(ns.Seconds(123), event)
ns.Simulator.Run()
@ -120,12 +125,14 @@ class TestSimulator(unittest.TestCase):
self._cb_time = None
ns.cppyy.cppdef("void null(){ return; }")
ns.Simulator.Schedule(ns.Seconds(123), ns.cppyy.gbl.null)
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
EventImpl* pythonMakeEvent3(void (*f)(std::vector<std::string>), std::vector<std::string> l)
{
return MakeEvent(f, l);
}
""")
"""
)
event = ns.cppyy.gbl.pythonMakeEvent3(callback, sys.argv)
ns.Simulator.ScheduleDestroy(event)
ns.Simulator.Run()
@ -153,12 +160,14 @@ class TestSimulator(unittest.TestCase):
self._args_received = None
self._cb_time = None
self._context_received = None
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
EventImpl* pythonMakeEvent4(void (*f)(uint32_t, std::vector<std::string>), uint32_t context, std::vector<std::string> l)
{
return MakeEvent(f, context, l);
}
""")
"""
)
event = ns.cppyy.gbl.pythonMakeEvent4(callback, 54321, sys.argv)
ns.Simulator.ScheduleWithContext(54321, ns.Seconds(123), event)
ns.Simulator.Run()
@ -210,20 +219,29 @@ class TestSimulator(unittest.TestCase):
def python_rx_callback(socket) -> None:
self._received_packet = socket.Recv(maxSize=UINT32_MAX, flags=0)
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
Callback<void,ns3::Ptr<ns3::Socket> > make_rx_callback_test_socket(void(*func)(Ptr<Socket>))
{
return MakeCallback(func);
}
""")
"""
)
sink = ns.network.Socket.CreateSocket(node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory"))
sink = ns.network.Socket.CreateSocket(
node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory")
)
sink.Bind(ns.network.InetSocketAddress(ns.network.Ipv4Address.GetAny(), 80).ConvertTo())
sink.SetRecvCallback(ns.cppyy.gbl.make_rx_callback_test_socket(python_rx_callback))
source = ns.network.Socket.CreateSocket(node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory"))
source.SendTo(ns.network.Packet(19), 0,
ns.network.InetSocketAddress(ns.network.Ipv4Address("127.0.0.1"), 80).ConvertTo())
source = ns.network.Socket.CreateSocket(
node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory")
)
source.SendTo(
ns.network.Packet(19),
0,
ns.network.InetSocketAddress(ns.network.Ipv4Address("127.0.0.1"), 80).ConvertTo(),
)
ns.Simulator.Run()
self.assertTrue(self._received_packet is not None)
@ -297,7 +315,7 @@ class TestSimulator(unittest.TestCase):
@param self this object
@return None
"""
from ctypes import c_bool, c_int, c_double, c_char_p, create_string_buffer
from ctypes import c_bool, c_char_p, c_double, c_int, create_string_buffer
test1 = c_bool(True)
test2 = c_int(42)
@ -362,12 +380,12 @@ class TestSimulator(unittest.TestCase):
stack.Install(nodes)
address = ns.internet.Ipv4AddressHelper()
address.SetBase(ns.network.Ipv4Address("10.1.1.0"),
ns.network.Ipv4Mask("255.255.255.0"))
address.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
interfaces = address.Assign(devices)
ns.cppyy.cppdef("""
ns.cppyy.cppdef(
"""
namespace ns3
{
Callback<void,Ptr<Socket> > make_rx_callback(void(*func)(Ptr<Socket>))
@ -379,7 +397,8 @@ class TestSimulator(unittest.TestCase):
return MakeEvent(f, socket, packet, address);
}
}
""")
"""
)
## EchoServer application class
class EchoServer(ns.applications.Application):
@ -399,9 +418,14 @@ class TestSimulator(unittest.TestCase):
## Listen port for the server
self.port = port
## Socket used by the server to listen to port
self.m_socket = ns.network.Socket.CreateSocket(node,
ns.core.TypeId.LookupByName("ns3::UdpSocketFactory"))
self.m_socket.Bind(ns.network.InetSocketAddress(ns.network.Ipv4Address.GetAny(), self.port).ConvertTo())
self.m_socket = ns.network.Socket.CreateSocket(
node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory")
)
self.m_socket.Bind(
ns.network.InetSocketAddress(
ns.network.Ipv4Address.GetAny(), self.port
).ConvertTo()
)
self.m_socket.SetRecvCallback(ns.make_rx_callback(EchoServer._Receive))
EchoServer.socketToInstanceDict[self.m_socket] = self
@ -422,13 +446,16 @@ class TestSimulator(unittest.TestCase):
self.m_socket.SendTo(packet, 0, address)
if EchoServer.LOGGING:
inetAddress = ns.InetSocketAddress.ConvertFrom(address)
print("At time +{s}s server sent {b} bytes from {ip} port {port}"
.format(s=ns.Simulator.Now().GetSeconds(),
b=packet.__deref__().GetSize(),
ip=inetAddress.GetIpv4(),
port=inetAddress.GetPort()),
file=sys.stderr,
flush=True)
print(
"At time +{s}s server sent {b} bytes from {ip} port {port}".format(
s=ns.Simulator.Now().GetSeconds(),
b=packet.__deref__().GetSize(),
ip=inetAddress.GetIpv4(),
port=inetAddress.GetPort(),
),
file=sys.stderr,
flush=True,
)
def Receive(self):
"""! Function to receive a packet from an address
@ -439,13 +466,16 @@ class TestSimulator(unittest.TestCase):
packet = self.m_socket.RecvFrom(address)
if EchoServer.LOGGING:
inetAddress = ns.InetSocketAddress.ConvertFrom(address)
print("At time +{s}s server received {b} bytes from {ip} port {port}"
.format(s=ns.Simulator.Now().GetSeconds(),
b=packet.__deref__().GetSize(),
ip=inetAddress.GetIpv4(),
port=inetAddress.GetPort()),
file=sys.stderr,
flush=True)
print(
"At time +{s}s server received {b} bytes from {ip} port {port}".format(
s=ns.Simulator.Now().GetSeconds(),
b=packet.__deref__().GetSize(),
ip=inetAddress.GetIpv4(),
port=inetAddress.GetPort(),
),
file=sys.stderr,
flush=True,
)
event = ns.pythonMakeEventSend(EchoServer._Send, self.m_socket, packet, address)
ns.Simulator.Schedule(ns.Seconds(1), event)
@ -493,5 +523,5 @@ class TestSimulator(unittest.TestCase):
ns.Simulator.Destroy()
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(verbosity=1, failfast=True)

View File

@ -17,42 +17,52 @@
#
from __future__ import print_function
import sys
import subprocess
import argparse
import os
import subprocess
import sys
def print_case_in_file(case_string, out):
for i in range(100):
print("-", end='', file=out)
print("-", end="", file=out)
print(file=out)
print("running test case " + case_string, end='\n\n', file=out)
print("running test case " + case_string, end="\n\n", file=out)
out.flush()
def print_failed_cases(failed_cases):
print("\nFailed Cases:")
for case in failed_cases:
print(case)
def print_cmds(cmds):
print('Commands to be executed:')
print("Commands to be executed:")
for cmd in cmds:
print(cmd.replace(sys.executable, ''))
print(cmd.replace(sys.executable, ""))
def set_workdir():
dir_files = [f for f in os.listdir('.') if os.path.exists(f)]
if not 'VERSION' in dir_files and not 'ns3' in dir_files:
if os.path.split(os.path.abspath('.'))[1] == 'tests' and os.path.split(os.path.abspath(os.pardir))[1] == 'utils':
os.chdir('../../')
dir_files = [f for f in os.listdir(".") if os.path.exists(f)]
if not "VERSION" in dir_files and not "ns3" in dir_files:
if (
os.path.split(os.path.abspath("."))[1] == "tests"
and os.path.split(os.path.abspath(os.pardir))[1] == "utils"
):
os.chdir("../../")
else:
print('Error: Invalid working directory')
print("Error: Invalid working directory")
sys.exit(1)
## TestBaseClass class
class TestBaseClass:
"""
Generic class for testing tools based on provided commands and test cases.
Generic class for testing tools based on provided commands and test cases.
"""
## @var my_env
# os environment
## @var mode
@ -72,9 +82,9 @@ class TestBaseClass:
"""
self.my_env = os.environ
set_workdir()
self.my_env['LD_LIBRARY_PATH'] = os.getcwd() + "/build"
self.my_env["LD_LIBRARY_PATH"] = os.getcwd() + "/build"
self.mode = mode
self.outfile = 'test-port-'+self.mode+'.out'
self.outfile = "test-port-" + self.mode + ".out"
self.options = self.parseargs(argv, desc)
def parseargs(self, argv, desc):
@ -86,15 +96,39 @@ class TestBaseClass:
@return command line arguments
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-f', '--file', action='store', dest='out_file', default=self.outfile,
metavar="FILE",
help='File to be used for storing the command specific output (Default: '+self.outfile+')')
parser.add_argument('-c', action='store_true', dest='cmds', default=False,
help='List out all the commands being tested')
parser.add_argument('-m', action='store_true', dest='mute', default=False,
help='Sends only stderr output to FILE')
parser.add_argument('-x', '--customcmd', action='store', dest='custcmd', default=None,
help='Enter a comma-separated list of commands to override the existing ones. NOT APPLICABLE FOR TEST-PY SUITE.')
parser.add_argument(
"-f",
"--file",
action="store",
dest="out_file",
default=self.outfile,
metavar="FILE",
help="File to be used for storing the command specific output (Default: "
+ self.outfile
+ ")",
)
parser.add_argument(
"-c",
action="store_true",
dest="cmds",
default=False,
help="List out all the commands being tested",
)
parser.add_argument(
"-m",
action="store_true",
dest="mute",
default=False,
help="Sends only stderr output to FILE",
)
parser.add_argument(
"-x",
"--customcmd",
action="store",
dest="custcmd",
default=None,
help="Enter a comma-separated list of commands to override the existing ones. NOT APPLICABLE FOR TEST-PY SUITE.",
)
return parser.parse_args(argv)
def override_cmds(self):
@ -115,38 +149,39 @@ class TestBaseClass:
if self.options.cmds:
print_cmds(cmds)
return
base_dir = os.sep.join(os.path.abspath(__file__).replace(os.path.pathsep, '/').split('/')[:-3])
base_dir = os.sep.join(
os.path.abspath(__file__).replace(os.path.pathsep, "/").split("/")[:-3]
)
final_return = 0
total_tests = len(cmds)
passed = 0
progress = 0.0
failed_cases = []
with open(self.options.out_file, 'w', encoding='utf-8') as out:
with open(self.options.out_file, "w", encoding="utf-8") as out:
outstream = out
with open(os.devnull, 'w', encoding='utf-8') as sink:
with open(os.devnull, "w", encoding="utf-8") as sink:
if self.options.mute:
outstream = sink
for cmd in cmds:
case_string = cmd.replace(sys.executable, '')
case_string = cmd.replace(sys.executable, "")
print("running test case: " + case_string)
print_case_in_file(case_string, out)
progress += 1
ret = subprocess.call(cmd,
shell=True,
env=self.my_env,
stdout=outstream,
stderr=out,
cwd=base_dir
)
ret = subprocess.call(
cmd, shell=True, env=self.my_env, stdout=outstream, stderr=out, cwd=base_dir
)
if not ret:
passed += 1
else:
final_return = 1
failed_cases.append(case_string)
print("[ %s out of %s ] test cases passed; Progress = %.2f%% \n" % (passed, total_tests, progress*100/total_tests))
print(
"[ %s out of %s ] test cases passed; Progress = %.2f%% \n"
% (passed, total_tests, progress * 100 / total_tests)
)
if final_return != 0:
print_failed_cases(failed_cases)
else:
print("\nAll cases passed")
print("Detailed output available in " + self.options.out_file, end='\n\n')
print("Detailed output available in " + self.options.out_file, end="\n\n")
return final_return

File diff suppressed because it is too large Load Diff

View File

@ -61,61 +61,67 @@
# write detailed test results into XML-FILE.xml
from __future__ import print_function
from TestBase import TestBaseClass
import sys
from TestBase import TestBaseClass
def main(argv):
"""
Prepares test cases and executes
Prepares test cases and executes
"""
test_cases = [
'',
'-h',
'--help',
'-b build/',
'--buildpath=build/',
'-c performance',
'--constrain=performance',
'-d',
'--duration',
'-e socket-options-ipv6',
'--example=socket-options-ipv6',
'-u',
'--update-data',
'-f EXTENSIVE',
'--fullness=EXTENSIVE',
'-g',
'--grind',
'-l',
'--list',
'-m',
'--multiple',
'-n',
'--no-build',
'-p first',
'--pyexample=first',
'-r',
'--retain',
'-s ns3-tcp-state',
'--suite=ns3-tcp-state',
'-t t_opt.txt',
'--text=t_opt.txt && rm t_opt.txt',
'-v',
'--verbose',
'-w t_opt.html && rm t_opt.html',
'--web=t_opt.html && rm t_opt.html',
'--html=t_opt.html && rm t_opt.html',
'-x t_opt.xml && rm t_opt.xml',
'--xml=t_opt.xml && rm t_opt.xml',
"",
"-h",
"--help",
"-b build/",
"--buildpath=build/",
"-c performance",
"--constrain=performance",
"-d",
"--duration",
"-e socket-options-ipv6",
"--example=socket-options-ipv6",
"-u",
"--update-data",
"-f EXTENSIVE",
"--fullness=EXTENSIVE",
"-g",
"--grind",
"-l",
"--list",
"-m",
"--multiple",
"-n",
"--no-build",
"-p first",
"--pyexample=first",
"-r",
"--retain",
"-s ns3-tcp-state",
"--suite=ns3-tcp-state",
"-t t_opt.txt",
"--text=t_opt.txt && rm t_opt.txt",
"-v",
"--verbose",
"-w t_opt.html && rm t_opt.html",
"--web=t_opt.html && rm t_opt.html",
"--html=t_opt.html && rm t_opt.html",
"-x t_opt.xml && rm t_opt.xml",
"--xml=t_opt.xml && rm t_opt.xml",
]
configure_string = sys.executable + ' ns3 configure --enable-tests --enable-examples'
clean_string = sys.executable + ' ns3 clean'
cmd_execute_list = ['%s && %s test.py %s && %s' % (configure_string, sys.executable, option, clean_string) for option in test_cases]
runner = TestBaseClass(argv[1:], "Test suite for the ns-3 unit test runner", 'test-py')
configure_string = sys.executable + " ns3 configure --enable-tests --enable-examples"
clean_string = sys.executable + " ns3 clean"
cmd_execute_list = [
"%s && %s test.py %s && %s" % (configure_string, sys.executable, option, clean_string)
for option in test_cases
]
runner = TestBaseClass(argv[1:], "Test suite for the ns-3 unit test runner", "test-py")
return runner.runtests(cmd_execute_list)
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv))