Skip to content

Commit

Permalink
black formatted existing files
Browse files Browse the repository at this point in the history
  • Loading branch information
eileen-kuehn committed Oct 27, 2019
1 parent aae25f2 commit d1806dc
Show file tree
Hide file tree
Showing 22 changed files with 486 additions and 312 deletions.
81 changes: 49 additions & 32 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import os
import sys
import lapis
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))

sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))


# -- Project information -----------------------------------------------------
Expand All @@ -43,24 +44,24 @@
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.imgmath",
"sphinx.ext.viewcode",
]

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]

# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_suffix = ".rst"

# The master toctree document.
master_doc = 'index'
master_doc = "index"

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand All @@ -72,7 +73,7 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]

# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
Expand All @@ -83,7 +84,7 @@
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"

# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
Expand All @@ -94,7 +95,7 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_static_path = ["_static"]

# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
Expand All @@ -110,7 +111,7 @@
# -- Options for HTMLHelp output ---------------------------------------------

# Output file base name for HTML help builder.
htmlhelp_basename = 'lapisdoc'
htmlhelp_basename = "lapisdoc"


# -- Options for LaTeX output ------------------------------------------------
Expand All @@ -119,15 +120,12 @@
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',

# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',

# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',

# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
Expand All @@ -137,19 +135,21 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'lapis.tex', 'lapis Documentation',
'Eileen Kuehn, Max Fischer', 'manual'),
(
master_doc,
"lapis.tex",
"lapis Documentation",
"Eileen Kuehn, Max Fischer",
"manual",
)
]


# -- Options for manual page output ------------------------------------------

# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'lapis', 'lapis Documentation',
[author], 1)
]
man_pages = [(master_doc, "lapis", "lapis Documentation", [author], 1)]


# -- Options for Texinfo output ----------------------------------------------
Expand All @@ -158,9 +158,15 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'lapis', 'lapis Documentation',
author, 'lapis', 'One line description of project.',
'Miscellaneous'),
(
master_doc,
"lapis",
"lapis Documentation",
author,
"lapis",
"One line description of project.",
"Miscellaneous",
)
]


Expand All @@ -179,7 +185,7 @@
# epub_uid = ''

# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
epub_exclude_files = ["search.html"]


# -- Extension configuration -------------------------------------------------
Expand All @@ -188,8 +194,8 @@

# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ('https://docs.python.org/3', None),
"usim": ('https://usim.readthedocs.io/en/stable', None),
"python": ("https://docs.python.org/3", None),
"usim": ("https://usim.readthedocs.io/en/stable", None),
}

# -- Options for todo extension ----------------------------------------------
Expand All @@ -203,10 +209,21 @@
def run_apidoc(_):
"""Run the `apidoc` tool to generate `autodoc` documentation for all modules"""
from sphinx.apidoc import main
output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'source', 'api'))
source_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', "lapis"))
main(['--module-first', '--separate', '--output-dir=' + output_dir, source_dir, '--force'])

output_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), "source", "api")
)
source_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "lapis"))
main(
[
"--module-first",
"--separate",
"--output-dir=" + output_dir,
source_dir,
"--force",
]
)


def setup(app):
app.connect('builder-inited', run_apidoc)
app.connect("builder-inited", run_apidoc)
2 changes: 1 addition & 1 deletion lapis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Lapis is an adaptable, performant, and interactive scheduling (Lapis) simulator"""

__version__ = '0.2.0'
__version__ = "0.2.0"
105 changes: 69 additions & 36 deletions lapis/cli/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,17 @@
from lapis.scheduler import CondorJobScheduler
from lapis.simulator import Simulator

from lapis.monitor import LoggingSocketHandler, LoggingUDPSocketHandler, \
SimulationTimeFilter
from lapis.monitor import (
LoggingSocketHandler,
LoggingUDPSocketHandler,
SimulationTimeFilter,
)

last_step = 0

job_import_mapper = {
"htcondor": htcondor_job_reader,
"swf": swf_job_reader
}
job_import_mapper = {"htcondor": htcondor_job_reader, "swf": swf_job_reader}

pool_import_mapper = {
"htcondor": htcondor_pool_reader
}
pool_import_mapper = {"htcondor": htcondor_pool_reader}


@click.group()
Expand All @@ -37,15 +35,16 @@
@click.pass_context
def cli(ctx, seed, until, log_tcp, log_file, log_telegraf):
ctx.ensure_object(dict)
ctx.obj['seed'] = seed
ctx.obj['until'] = until
ctx.obj["seed"] = seed
ctx.obj["until"] = until
monitoring_logger = logging.getLogger()
monitoring_logger.setLevel(logging.DEBUG)
time_filter = SimulationTimeFilter()
monitoring_logger.addFilter(time_filter)
if log_tcp:
socketHandler = LoggingSocketHandler(
'localhost', logging.handlers.DEFAULT_TCP_LOGGING_PORT)
"localhost", logging.handlers.DEFAULT_TCP_LOGGING_PORT
)
socketHandler.setFormatter(JsonFormatter())
monitoring_logger.addHandler(socketHandler)
if log_file:
Expand All @@ -54,85 +53,119 @@ def cli(ctx, seed, until, log_tcp, log_file, log_telegraf):
monitoring_logger.addHandler(streamHandler)
if log_telegraf:
telegrafHandler = LoggingUDPSocketHandler(
"localhost", logging.handlers.DEFAULT_UDP_LOGGING_PORT)
"localhost", logging.handlers.DEFAULT_UDP_LOGGING_PORT
)
telegrafHandler.setFormatter(LineProtocolFormatter(resolution=1))
monitoring_logger.addHandler(telegrafHandler)


@cli.command()
@click.option("--job-file", "job_file", type=(
click.File("r"), click.Choice(list(job_import_mapper.keys()))))
@click.option("--pool-file", "pool_file", type=(
click.File("r"), click.Choice(list(pool_import_mapper.keys()))), multiple=True)
@click.option(
"--job-file",
"job_file",
type=(click.File("r"), click.Choice(list(job_import_mapper.keys()))),
)
@click.option(
"--pool-file",
"pool_file",
type=(click.File("r"), click.Choice(list(pool_import_mapper.keys()))),
multiple=True,
)
@click.pass_context
def static(ctx, job_file, pool_file):
click.echo("starting static environment")
simulator = Simulator(seed=ctx.obj["seed"])
file, file_type = job_file
simulator.create_job_generator(
job_input=file, job_reader=job_import_mapper[file_type])
job_input=file, job_reader=job_import_mapper[file_type]
)
simulator.create_scheduler(scheduler_type=CondorJobScheduler)
for current_pool in pool_file:
pool_file, pool_file_type = current_pool
simulator.create_pools(
pool_input=pool_file,
pool_reader=pool_import_mapper[pool_file_type],
pool_type=StaticPool)
pool_type=StaticPool,
)
simulator.run(until=ctx.obj["until"])


@cli.command()
@click.option("--job-file", "job_file", type=(
click.File("r"), click.Choice(list(job_import_mapper.keys()))))
@click.option("--pool-file", "pool_file", type=(
click.File("r"), click.Choice(list(pool_import_mapper.keys()))), multiple=True)
@click.option(
"--job-file",
"job_file",
type=(click.File("r"), click.Choice(list(job_import_mapper.keys()))),
)
@click.option(
"--pool-file",
"pool_file",
type=(click.File("r"), click.Choice(list(pool_import_mapper.keys()))),
multiple=True,
)
@click.pass_context
def dynamic(ctx, job_file, pool_file):
click.echo("starting dynamic environment")
simulator = Simulator(seed=ctx.obj["seed"])
file, file_type = job_file
simulator.create_job_generator(
job_input=file, job_reader=job_import_mapper[file_type])
job_input=file, job_reader=job_import_mapper[file_type]
)
simulator.create_scheduler(scheduler_type=CondorJobScheduler)
for current_pool in pool_file:
file, file_type = current_pool
simulator.create_pools(
pool_input=file,
pool_reader=pool_import_mapper[file_type],
pool_type=Pool,
controller=SimulatedLinearController)
controller=SimulatedLinearController,
)
simulator.run(until=ctx.obj["until"])


@cli.command()
@click.option("--job-file", "job_file", type=(
click.File("r"), click.Choice(list(job_import_mapper.keys()))))
@click.option("--static-pool-file", "static_pool_file", type=(
click.File("r"), click.Choice(list(pool_import_mapper.keys()))), multiple=True)
@click.option("--dynamic-pool-file", "dynamic_pool_file", type=(
click.File("r"), click.Choice(list(pool_import_mapper.keys()))), multiple=True)
@click.option(
"--job-file",
"job_file",
type=(click.File("r"), click.Choice(list(job_import_mapper.keys()))),
)
@click.option(
"--static-pool-file",
"static_pool_file",
type=(click.File("r"), click.Choice(list(pool_import_mapper.keys()))),
multiple=True,
)
@click.option(
"--dynamic-pool-file",
"dynamic_pool_file",
type=(click.File("r"), click.Choice(list(pool_import_mapper.keys()))),
multiple=True,
)
@click.pass_context
def hybrid(ctx, job_file, static_pool_file, dynamic_pool_file):
click.echo("starting hybrid environment")
simulator = Simulator(seed=ctx.obj["seed"])
file, file_type = job_file
simulator.create_job_generator(
job_input=file, job_reader=job_import_mapper[file_type])
job_input=file, job_reader=job_import_mapper[file_type]
)
simulator.create_scheduler(scheduler_type=CondorJobScheduler)
for current_pool in static_pool_file:
file, file_type = current_pool
simulator.create_pools(
pool_input=file, pool_reader=pool_import_mapper[file_type],
pool_type=StaticPool)
pool_input=file,
pool_reader=pool_import_mapper[file_type],
pool_type=StaticPool,
)
for current_pool in dynamic_pool_file:
file, file_type = current_pool
simulator.create_pools(
pool_input=file,
pool_reader=pool_import_mapper[file_type],
pool_type=Pool,
controller=SimulatedLinearController)
controller=SimulatedLinearController,
)
simulator.run(until=ctx.obj["until"])


if __name__ == '__main__':
if __name__ == "__main__":
cli()
Loading

0 comments on commit d1806dc

Please sign in to comment.